focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public synchronized void resetExecutionProgressCheckIntervalMs() { _executionProgressCheckIntervalMs = _requestedExecutionProgressCheckIntervalMs == null ? _defaultExecutionProgressCheckIntervalMs : _requestedExecutionProgressCheckIntervalMs; LOG.info("ExecutionProgressCheckInterval has reset to {}, which equals requestedExecutionProgressCheckIntervalMs, " + "or _defaultExecutionProgressCheckIntervalMs if no requested value", _executionProgressCheckIntervalMs); }
@Test public void testResetExecutionProgressCheckIntervalMs() { KafkaCruiseControlConfig config = new KafkaCruiseControlConfig(getExecutorProperties()); Executor executor = new Executor(config, null, new MetricRegistry(), EasyMock.mock(MetadataClient.class), null, EasyMock.mock(AnomalyDetectorManager.class)); long defaultExecutionProgressCheckIntervalMs = config.getLong(ExecutorConfig.EXECUTION_PROGRESS_CHECK_INTERVAL_MS_CONFIG); executor.resetExecutionProgressCheckIntervalMs(); assertEquals(defaultExecutionProgressCheckIntervalMs, executor.executionProgressCheckIntervalMs()); // Set requestedExecutionProgressCheckIntervalMs long requestedExecutionProgressCheckIntervalMs = 2 * defaultExecutionProgressCheckIntervalMs; executor.setRequestedExecutionProgressCheckIntervalMs(requestedExecutionProgressCheckIntervalMs); assertEquals(requestedExecutionProgressCheckIntervalMs, executor.executionProgressCheckIntervalMs()); }
public void init() { this.scheduledExecutorService = Executors.newSingleThreadScheduledExecutor( ApolloThreadFactory .create("DatabaseDiscoveryWithCache", true) ); scheduledExecutorService.scheduleAtFixedRate(this::updateCacheTask, SYNC_TASK_PERIOD_IN_SECOND, SYNC_TASK_PERIOD_IN_SECOND, TimeUnit.SECONDS); // load them for init try { this.getInstances(ServiceNameConsts.APOLLO_CONFIGSERVICE); } catch (Throwable t) { log.error("fail to get instances of service name {}", ServiceNameConsts.APOLLO_CONFIGSERVICE, t); } try { this.getInstances(ServiceNameConsts.APOLLO_ADMINSERVICE); } catch (Throwable t) { log.error("fail to get instances of service name {}", ServiceNameConsts.APOLLO_ADMINSERVICE, t); } }
@Test void init() { DatabaseDiscoveryClient client = Mockito.mock(DatabaseDiscoveryClient.class); DatabaseDiscoveryClientMemoryCacheDecoratorImpl decorator = new DatabaseDiscoveryClientMemoryCacheDecoratorImpl(client); decorator.init(); }
public abstract Map<String, String> properties(final Map<String, String> defaultProperties, final long additionalRetentionMs);
@Test public void shouldUseSuppliedConfigsForUnwindowedUnversionedChangelogConfig() { final Map<String, String> configs = new HashMap<>(); configs.put("retention.ms", "1000"); configs.put("retention.bytes", "10000"); configs.put("message.timestamp.type", "LogAppendTime"); final UnwindowedUnversionedChangelogTopicConfig topicConfig = new UnwindowedUnversionedChangelogTopicConfig("name", configs); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("1000", properties.get(TopicConfig.RETENTION_MS_CONFIG)); assertEquals("10000", properties.get(TopicConfig.RETENTION_BYTES_CONFIG)); assertEquals("LogAppendTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); }
public void deleteInstanceMetadata(Service service, String metadataId) { MetadataOperation<InstanceMetadata> operation = buildMetadataOperation(service); operation.setTag(metadataId); WriteRequest operationLog = WriteRequest.newBuilder().setGroup(Constants.INSTANCE_METADATA) .setOperation(DataOperation.DELETE.name()).setData(ByteString.copyFrom(serializer.serialize(operation))) .build(); submitMetadataOperation(operationLog); }
@Test void testDeleteInstanceMetadata() { assertThrows(NacosRuntimeException.class, () -> { String metadataId = "metadataId"; namingMetadataOperateService.deleteInstanceMetadata(service, metadataId); Mockito.verify(service).getNamespace(); Mockito.verify(service).getGroup(); Mockito.verify(service).getName(); }); }
@Override public CompletableFuture<SendPushNotificationResult> sendNotification(PushNotification pushNotification) { Message.Builder builder = Message.builder() .setToken(pushNotification.deviceToken()) .setAndroidConfig(AndroidConfig.builder() .setPriority(pushNotification.urgent() ? AndroidConfig.Priority.HIGH : AndroidConfig.Priority.NORMAL) .build()); final String key = switch (pushNotification.notificationType()) { case NOTIFICATION -> "newMessageAlert"; case ATTEMPT_LOGIN_NOTIFICATION_HIGH_PRIORITY -> "attemptLoginContext"; case CHALLENGE -> "challenge"; case RATE_LIMIT_CHALLENGE -> "rateLimitChallenge"; }; builder.putData(key, pushNotification.data() != null ? pushNotification.data() : ""); final Timer.Sample sample = Timer.start(); return GoogleApiUtil.toCompletableFuture(firebaseMessagingClient.sendAsync(builder.build()), executor) .whenComplete((ignored, throwable) -> sample.stop(SEND_NOTIFICATION_TIMER)) .thenApply(ignored -> new SendPushNotificationResult(true, Optional.empty(), false, Optional.empty())) .exceptionally(throwable -> { if (ExceptionUtils.unwrap(throwable) instanceof final FirebaseMessagingException firebaseMessagingException) { final String errorCode; if (firebaseMessagingException.getMessagingErrorCode() != null) { errorCode = firebaseMessagingException.getMessagingErrorCode().name(); } else { logger.warn("Received an FCM exception with no error code", firebaseMessagingException); errorCode = "unknown"; } final boolean unregistered = firebaseMessagingException.getMessagingErrorCode() == MessagingErrorCode.UNREGISTERED; return new SendPushNotificationResult(false, Optional.of(errorCode), unregistered, Optional.empty()); } else { throw ExceptionUtils.wrap(throwable); } }); }
@Test void testSendMessage() { final PushNotification pushNotification = new PushNotification("foo", PushNotification.TokenType.FCM, PushNotification.NotificationType.NOTIFICATION, null, null, null, true); final SettableApiFuture<String> sendFuture = SettableApiFuture.create(); sendFuture.set("message-id"); when(firebaseMessaging.sendAsync(any())).thenReturn(sendFuture); final SendPushNotificationResult result = fcmSender.sendNotification(pushNotification).join(); verify(firebaseMessaging).sendAsync(any(Message.class)); assertTrue(result.accepted()); assertTrue(result.errorCode().isEmpty()); assertFalse(result.unregistered()); }
UriEndpoint createUriEndpoint(String url, boolean isWs) { return createUriEndpoint(url, isWs, connectAddress); }
@Test void createUriEndpointIPv6Address() { String test1 = this.builder.host("::1") .port(8080) .build() .createUriEndpoint("/foo", false) .toExternalForm(); String test2 = this.builder.host("::1") .port(8080) .build() .createUriEndpoint("/foo", true) .toExternalForm(); assertThat(test1).isEqualTo("http://[::1]:8080/foo"); assertThat(test2).isEqualTo("ws://[::1]:8080/foo"); }
static Properties adminClientConfiguration(String bootstrapHostnames, PemTrustSet kafkaCaTrustSet, PemAuthIdentity authIdentity, Properties config) { if (config == null) { throw new InvalidConfigurationException("The config parameter should not be null"); } config.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapHostnames); // configuring TLS encryption if requested if (kafkaCaTrustSet != null) { config.putIfAbsent(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); config.setProperty(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "PEM"); config.setProperty(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, kafkaCaTrustSet.trustedCertificatesString()); } // configuring TLS client authentication if (authIdentity != null) { config.putIfAbsent(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); config.setProperty(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "PEM"); config.setProperty(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG, authIdentity.certificateChainAsPem()); config.setProperty(SslConfigs.SSL_KEYSTORE_KEY_CONFIG, authIdentity.privateKeyAsPem()); } config.putIfAbsent(AdminClientConfig.METADATA_MAX_AGE_CONFIG, "30000"); config.putIfAbsent(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "10000"); config.putIfAbsent(AdminClientConfig.RETRIES_CONFIG, "3"); config.putIfAbsent(AdminClientConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, "40000"); return config; }
@Test public void testTlsConnection() { Properties config = DefaultAdminClientProvider.adminClientConfiguration("my-kafka:9092", mockPemTrustSet(), null, new Properties()); assertThat(config.size(), is(8)); assertDefaultConfigs(config); assertThat(config.get(AdminClientConfig.SECURITY_PROTOCOL_CONFIG), is("SSL")); assertThat(config.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG), is("PEM")); assertThat(config.get(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG).toString(), containsString("ca1")); // The order is not deterministic. So we check both certificates are present assertThat(config.get(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG).toString(), containsString("ca2")); }
@Override public void validate(final String methodName, final Class<?>[] parameterTypes, final Object[] arguments) throws Exception { List<Class<?>> groups = new ArrayList<>(); Class<?> methodClass = methodClass(methodName); if (Objects.nonNull(methodClass)) { groups.add(methodClass); } Set<ConstraintViolation<?>> violations = new HashSet<>(); Method method = clazz.getMethod(methodName, parameterTypes); Class<?>[] methodClasses; if (method.isAnnotationPresent(MethodValidated.class)) { methodClasses = method.getAnnotation(MethodValidated.class).value(); groups.addAll(Arrays.asList(methodClasses)); } // add into default group groups.add(0, Default.class); groups.add(1, clazz); // convert list to array Class<?>[] classGroups = new Class<?>[groups.size()]; classGroups = groups.toArray(classGroups); Object parameterBean = getMethodParameterBean(clazz, method, arguments); if (parameterBean != null) { violations.addAll(validator.validate(parameterBean, classGroups)); } for (Object arg : arguments) { validate(violations, arg, classGroups); } if (!violations.isEmpty()) { LOG.error("Failed to validate service: {}, method: {}, cause: {}", clazz.getName(), methodName, violations); StringBuilder validateError = new StringBuilder(); violations.forEach(each -> validateError.append(each.getMessage()).append(",")); throw new ValidationException(validateError.substring(0, validateError.length() - 1)); } }
@Test public void testItWithMapArg() throws Exception { final Map<String, String> map = new HashMap<>(); map.put("key", "value"); apacheDubboClientValidatorUnderTest.validate( "methodFive", new Class<?>[]{Map.class}, new Object[]{map}); }
@SuppressWarnings("unchecked") @SneakyThrows(ReflectiveOperationException.class) public static <T extends ShardingAlgorithm> T newInstance(final String shardingAlgorithmClassName, final Class<T> superShardingAlgorithmClass, final Properties props) { Class<?> algorithmClass = Class.forName(shardingAlgorithmClassName); if (!superShardingAlgorithmClass.isAssignableFrom(algorithmClass)) { throw new ShardingAlgorithmClassImplementationException(shardingAlgorithmClassName, superShardingAlgorithmClass); } T result = (T) algorithmClass.getDeclaredConstructor().newInstance(); result.init(convertToStringTypedProperties(props)); return result; }
@Test void assertNewInstance() { assertThat(ClassBasedShardingAlgorithmFactory.newInstance(ClassBasedStandardShardingAlgorithmFixture.class.getName(), StandardShardingAlgorithm.class, new Properties()), instanceOf(ClassBasedStandardShardingAlgorithmFixture.class)); }
@Override public void rollback() throws SQLException { for (TransactionHook each : transactionHooks) { each.beforeRollback(connection.getCachedConnections().values(), getTransactionContext()); } if (connection.getConnectionSession().getTransactionStatus().isInTransaction()) { try { if (TransactionType.LOCAL == TransactionUtils.getTransactionType(getTransactionContext()) || null == distributionTransactionManager) { localTransactionManager.rollback(); } else { distributionTransactionManager.rollback(); } } finally { for (TransactionHook each : transactionHooks) { each.afterRollback(connection.getCachedConnections().values(), getTransactionContext()); } for (Connection each : connection.getCachedConnections().values()) { ConnectionSavepointManager.getInstance().transactionFinished(each); } connection.getConnectionSession().getTransactionStatus().setInTransaction(false); connection.getConnectionSession().getConnectionContext().close(); } } }
@Test void assertRollbackForDistributedTransaction() throws SQLException { ContextManager contextManager = mockContextManager(TransactionType.XA); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); newBackendTransactionManager(TransactionType.XA, true); backendTransactionManager.rollback(); verify(transactionStatus).setInTransaction(false); verify(distributionTransactionManager).rollback(); }
@Override public ConsumeMessageDirectlyResult consumeMessageDirectly(MessageExt msg, String brokerName) { ConsumeMessageDirectlyResult result = new ConsumeMessageDirectlyResult(); result.setOrder(true); List<MessageExt> msgs = new ArrayList<>(); msgs.add(msg); MessageQueue mq = new MessageQueue(); mq.setBrokerName(brokerName); mq.setTopic(msg.getTopic()); mq.setQueueId(msg.getQueueId()); ConsumeOrderlyContext context = new ConsumeOrderlyContext(mq); this.defaultMQPushConsumerImpl.resetRetryAndNamespace(msgs, this.consumerGroup); final long beginTime = System.currentTimeMillis(); log.info("consumeMessageDirectly receive new message: {}", msg); try { ConsumeOrderlyStatus status = this.messageListener.consumeMessage(msgs, context); if (status != null) { switch (status) { case COMMIT: result.setConsumeResult(CMResult.CR_COMMIT); break; case ROLLBACK: result.setConsumeResult(CMResult.CR_ROLLBACK); break; case SUCCESS: result.setConsumeResult(CMResult.CR_SUCCESS); break; case SUSPEND_CURRENT_QUEUE_A_MOMENT: result.setConsumeResult(CMResult.CR_LATER); break; default: break; } } else { result.setConsumeResult(CMResult.CR_RETURN_NULL); } } catch (Throwable e) { result.setConsumeResult(CMResult.CR_THROW_EXCEPTION); result.setRemark(UtilAll.exceptionSimpleDesc(e)); log.warn("consumeMessageDirectly exception: {} Group: {} Msgs: {} MQ: {}", UtilAll.exceptionSimpleDesc(e), ConsumeMessageOrderlyService.this.consumerGroup, msgs, mq, e); } result.setAutoCommit(context.isAutoCommit()); result.setSpentTimeMills(System.currentTimeMillis() - beginTime); log.info("consumeMessageDirectly Result: {}", result); return result; }
@Test public void testConsumeMessageDirectly_WithNoException() { Map<ConsumeOrderlyStatus, CMResult> map = new HashMap(); map.put(ConsumeOrderlyStatus.SUCCESS, CMResult.CR_SUCCESS); map.put(ConsumeOrderlyStatus.SUSPEND_CURRENT_QUEUE_A_MOMENT, CMResult.CR_LATER); map.put(ConsumeOrderlyStatus.COMMIT, CMResult.CR_COMMIT); map.put(ConsumeOrderlyStatus.ROLLBACK, CMResult.CR_ROLLBACK); map.put(null, CMResult.CR_RETURN_NULL); for (ConsumeOrderlyStatus consumeOrderlyStatus : map.keySet()) { final ConsumeOrderlyStatus status = consumeOrderlyStatus; MessageListenerOrderly listenerOrderly = new MessageListenerOrderly() { @Override public ConsumeOrderlyStatus consumeMessage(List<MessageExt> msgs, ConsumeOrderlyContext context) { return status; } }; ConsumeMessageOrderlyService consumeMessageOrderlyService = new ConsumeMessageOrderlyService(pushConsumer.getDefaultMQPushConsumerImpl(), listenerOrderly); MessageExt msg = new MessageExt(); msg.setTopic(topic); assertTrue(consumeMessageOrderlyService.consumeMessageDirectly(msg, brokerName).getConsumeResult().equals(map.get(consumeOrderlyStatus))); } }
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); if (commandLine.hasOption('n')) { defaultMQAdminExt.setNamesrvAddr(commandLine.getOptionValue('n').trim()); } try { defaultMQAdminExt.start(); String group = commandLine.getOptionValue('g').trim(); String topic = commandLine.getOptionValue('t').trim(); ProducerConnection pc = defaultMQAdminExt.examineProducerConnectionInfo(group, topic); int i = 1; for (Connection conn : pc.getConnectionSet()) { System.out.printf("%04d %-32s %-22s %-8s %s%n", i++, conn.getClientId(), conn.getClientAddr(), conn.getLanguage(), MQVersion.getVersionDesc(conn.getVersion()) ); } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { ProducerConnectionSubCommand cmd = new ProducerConnectionSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-g default-producer-group", "-t unit-test", String.format("-n localhost:%d", nameServerMocker.listenPort())}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
@Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CeTask ceTask = (CeTask) o; return uuid.equals(ceTask.uuid); }
@Test public void equals_and_hashCode_on_uuid() { underTest.setType("TYPE_1").setUuid("UUID_1"); CeTask task1 = underTest.build(); CeTask task1bis = underTest.build(); CeTask task2 = new CeTask.Builder().setType("TYPE_1").setUuid("UUID_2").build(); assertThat(task1.equals(task1)).isTrue(); assertThat(task1.equals(task1bis)).isTrue(); assertThat(task1.equals(task2)).isFalse(); assertThat(task1) .hasSameHashCodeAs(task1) .hasSameHashCodeAs(task1bis); }
@Override public Map<String, List<TopicPartition>> assign(Map<String, Integer> partitionsPerTopic, Map<String, Subscription> subscriptions) { Map<String, List<TopicPartition>> assignment = new HashMap<>(); List<MemberInfo> memberInfoList = new ArrayList<>(); for (Map.Entry<String, Subscription> memberSubscription : subscriptions.entrySet()) { assignment.put(memberSubscription.getKey(), new ArrayList<>()); memberInfoList.add(new MemberInfo(memberSubscription.getKey(), memberSubscription.getValue().groupInstanceId())); } CircularIterator<MemberInfo> assigner = new CircularIterator<>(Utils.sorted(memberInfoList)); for (TopicPartition partition : allPartitionsSorted(partitionsPerTopic, subscriptions)) { final String topic = partition.topic(); while (!subscriptions.get(assigner.peek().memberId).topics().contains(topic)) assigner.next(); assignment.get(assigner.next().memberId).add(partition); } return assignment; }
@Test public void testStaticMemberRoundRobinAssignmentPersistent() { // Have 3 static members instance1, instance2, instance3 to be persistent // across generations. Their assignment shall be the same. String consumer1 = "consumer1"; String instance1 = "instance1"; String consumer2 = "consumer2"; String instance2 = "instance2"; String consumer3 = "consumer3"; String instance3 = "instance3"; List<MemberInfo> staticMemberInfos = new ArrayList<>(); staticMemberInfos.add(new MemberInfo(consumer1, Optional.of(instance1))); staticMemberInfos.add(new MemberInfo(consumer2, Optional.of(instance2))); staticMemberInfos.add(new MemberInfo(consumer3, Optional.of(instance3))); // Consumer 4 is a dynamic member. String consumer4 = "consumer4"; Map<String, Integer> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(3, 3); Map<String, Subscription> consumers = new HashMap<>(); for (MemberInfo m : staticMemberInfos) { Subscription subscription = new Subscription(topics(topic1, topic2), null); subscription.setGroupInstanceId(m.groupInstanceId); consumers.put(m.memberId, subscription); } consumers.put(consumer4, new Subscription(topics(topic1, topic2))); Map<String, List<TopicPartition>> expectedAssignment = new HashMap<>(); expectedAssignment.put(consumer1, partitions(tp(topic1, 0), tp(topic2, 1))); expectedAssignment.put(consumer2, partitions(tp(topic1, 1), tp(topic2, 2))); expectedAssignment.put(consumer3, partitions(tp(topic1, 2))); expectedAssignment.put(consumer4, partitions(tp(topic2, 0))); Map<String, List<TopicPartition>> assignment = assignor.assign(partitionsPerTopic, consumers); assertEquals(expectedAssignment, assignment); // Replace dynamic member 4 with a new dynamic member 5. consumers.remove(consumer4); String consumer5 = "consumer5"; consumers.put(consumer5, new Subscription(topics(topic1, topic2))); expectedAssignment.remove(consumer4); expectedAssignment.put(consumer5, partitions(tp(topic2, 0))); assignment = assignor.assign(partitionsPerTopic, consumers); assertEquals(expectedAssignment, assignment); }
public static QueryBuilder query(final String query) { return new QueryBuilder() { protected Query makeQueryObject(EntityManager entityManager) { return entityManager.createQuery(query); } @Override public String toString() { return "Query: " + query + getParameterDescription(); } }; }
@Test public void testQueryBuilder() { QueryBuilder q = QueryBuilder.query("select x from SendEmail x"); assertNotNull(q); assertEquals("Query: select x from SendEmail x", q.toString()); }
public static JClass resolveType(JClassContainer _package, String typeDefinition) { try { FieldDeclaration fieldDeclaration = (FieldDeclaration) JavaParser.parseBodyDeclaration(typeDefinition + " foo;"); ClassOrInterfaceType c = (ClassOrInterfaceType) ((ReferenceType) fieldDeclaration.getType()).getType(); return buildClass(_package, c, 0); } catch (ParseException e) { throw new GenerationException("Couldn't parse type: " + typeDefinition, e); } }
@Test public void testResolveTypeCanHandleExtendsWildcard() { final JCodeModel codeModel = new JCodeModel(); final JClass _class = TypeUtil.resolveType(codeModel.rootPackage(), "java.util.List<? extends java.lang.Number>"); assertThat(_class.erasure(), equalTo(codeModel.ref(List.class))); assertThat(_class.typeParams(), emptyArray()); assertThat(_class.isParameterized(), is(Boolean.TRUE)); assertThat(_class.getTypeParameters(), hasSize(1)); assertThat(_class.getTypeParameters().get(0)._extends(), is(equalTo(codeModel.ref(Number.class)))); }
@Override public void batchDeregisterService(String serviceName, String groupName, List<Instance> instances) throws NacosException { synchronized (redoService.getRegisteredInstances()) { List<Instance> retainInstance = getRetainInstance(serviceName, groupName, instances); batchRegisterService(serviceName, groupName, retainInstance); } }
@Test void testBatchDeregisterServiceWithEmptyInstances() throws NacosException { assertThrows(NacosException.class, () -> { client.batchDeregisterService(SERVICE_NAME, GROUP_NAME, Collections.EMPTY_LIST); }); }
public boolean putMessage(MessageExtBrokerInner messageInner) { PutMessageResult putMessageResult = store.putMessage(messageInner); if (putMessageResult != null && putMessageResult.getPutMessageStatus() == PutMessageStatus.PUT_OK) { return true; } else { LOGGER.error("Put message failed, topic: {}, queueId: {}, msgId: {}", messageInner.getTopic(), messageInner.getQueueId(), messageInner.getMsgId()); return false; } }
@Test public void testPutMessage() { when(messageStore.putMessage(any(MessageExtBrokerInner.class))) .thenReturn(new PutMessageResult(PutMessageStatus.PUT_OK, new AppendMessageResult(AppendMessageStatus.PUT_OK))); Boolean success = transactionBridge.putMessage(createMessageBrokerInner()); assertThat(success).isEqualTo(true); }
@Override public boolean localMember() { return localMember; }
@Test public void testConstructor_withLocalMember_isFalse() { MemberImpl member = new MemberImpl(address, MemberVersion.of("3.8.0"), false); assertBasicMemberImplFields(member); assertFalse(member.localMember()); }
public PlanNodeStatsEstimate subtractSubsetStats(PlanNodeStatsEstimate superset, PlanNodeStatsEstimate subset) { if (superset.isOutputRowCountUnknown() || subset.isOutputRowCountUnknown()) { return PlanNodeStatsEstimate.unknown(); } double supersetRowCount = superset.getOutputRowCount(); double subsetRowCount = subset.getOutputRowCount(); double outputRowCount = max(supersetRowCount - subsetRowCount, 0); // everything will be filtered out after applying negation if (outputRowCount == 0) { return createZeroStats(superset); } PlanNodeStatsEstimate.Builder result = PlanNodeStatsEstimate.builder(); result.setOutputRowCount(outputRowCount); superset.getVariablesWithKnownStatistics().forEach(symbol -> { VariableStatsEstimate supersetSymbolStats = superset.getVariableStatistics(symbol); VariableStatsEstimate subsetSymbolStats = subset.getVariableStatistics(symbol); VariableStatsEstimate.Builder newSymbolStats = VariableStatsEstimate.builder(); // for simplicity keep the average row size the same as in the input // in most cases the average row size doesn't change after applying filters newSymbolStats.setAverageRowSize(supersetSymbolStats.getAverageRowSize()); // nullsCount double supersetNullsCount = supersetSymbolStats.getNullsFraction() * supersetRowCount; double subsetNullsCount = subsetSymbolStats.getNullsFraction() * subsetRowCount; double newNullsCount = max(supersetNullsCount - subsetNullsCount, 0); newSymbolStats.setNullsFraction(min(newNullsCount, outputRowCount) / outputRowCount); // distinctValuesCount double supersetDistinctValues = supersetSymbolStats.getDistinctValuesCount(); double subsetDistinctValues = subsetSymbolStats.getDistinctValuesCount(); double newDistinctValuesCount; if (isNaN(supersetDistinctValues) || isNaN(subsetDistinctValues)) { newDistinctValuesCount = NaN; } else if (supersetDistinctValues == 0) { newDistinctValuesCount = 0; } else if (subsetDistinctValues == 0) { newDistinctValuesCount = supersetDistinctValues; } else { double supersetNonNullsCount = supersetRowCount - supersetNullsCount; double subsetNonNullsCount = subsetRowCount - subsetNullsCount; double supersetValuesPerDistinctValue = supersetNonNullsCount / supersetDistinctValues; double subsetValuesPerDistinctValue = subsetNonNullsCount / subsetDistinctValues; if (supersetValuesPerDistinctValue <= subsetValuesPerDistinctValue) { newDistinctValuesCount = max(supersetDistinctValues - subsetDistinctValues, 0); } else { newDistinctValuesCount = supersetDistinctValues; } } newSymbolStats.setDistinctValuesCount(newDistinctValuesCount); // range newSymbolStats.setLowValue(supersetSymbolStats.getLowValue()); newSymbolStats.setHighValue(supersetSymbolStats.getHighValue()); result.addVariableStatistics(symbol, newSymbolStats.build()); }); return result.build(); }
@Test public void testSubtractRowCount() { PlanNodeStatsEstimate unknownStats = statistics(NaN, NaN, NaN, NaN, StatisticRange.empty()); PlanNodeStatsEstimate first = statistics(40, NaN, NaN, NaN, StatisticRange.empty()); PlanNodeStatsEstimate second = statistics(10, NaN, NaN, NaN, StatisticRange.empty()); assertEquals(calculator.subtractSubsetStats(unknownStats, unknownStats), PlanNodeStatsEstimate.unknown()); assertEquals(calculator.subtractSubsetStats(first, unknownStats), PlanNodeStatsEstimate.unknown()); assertEquals(calculator.subtractSubsetStats(unknownStats, second), PlanNodeStatsEstimate.unknown()); assertEquals(calculator.subtractSubsetStats(first, second).getOutputRowCount(), 30.0); }
@Override public int getClassId() { return PredicateDataSerializerHook.NOTEQUAL_PREDICATE; }
@Test public void getId_isConstant() { NotEqualPredicate predicate = new NotEqualPredicate("bar", "foo"); int id = predicate.getClassId(); // make sure the ID has not been changed by accident assertEquals(9, id); }
@Override public void onProjectBranchesChanged(Set<Project> projects, Set<String> impactedBranches) { checkNotNull(projects, "projects can't be null"); if (projects.isEmpty()) { return; } Arrays.stream(listeners) .forEach(safelyCallListener(listener -> listener.onProjectBranchesChanged(projects, impactedBranches))); }
@Test @UseDataProvider("oneOrManyProjects") public void onProjectBranchesChanged_does_not_fail_if_there_is_no_listener(Set<Project> projects) { assertThatNoException().isThrownBy(()-> underTestNoListeners.onProjectBranchesChanged(projects, emptySet())); }
@Override public void createView(CreateViewStmt stmt) throws DdlException { String dbName = stmt.getDbName(); String viewName = stmt.getTable(); Database db = getDb(stmt.getDbName()); if (db == null) { ErrorReport.reportDdlException(ErrorCode.ERR_BAD_DB_ERROR, dbName); } if (getView(dbName, viewName) != null) { if (stmt.isSetIfNotExists()) { LOG.info("create view[{}] which already exists", viewName); return; } else if (stmt.isReplace()) { LOG.info("view {} already exists, need to replace it", viewName); } else { ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, viewName); } } ConnectorViewDefinition viewDefinition = ConnectorViewDefinition.fromCreateViewStmt(stmt); icebergCatalog.createView(viewDefinition, stmt.isReplace()); }
@Test public void testCreateView(@Mocked RESTCatalog restCatalog, @Mocked BaseView baseView, @Mocked ImmutableSQLViewRepresentation representation) throws Exception { UtFrameUtils.createMinStarRocksCluster(); AnalyzeTestUtil.init(); IcebergRESTCatalog icebergRESTCatalog = new IcebergRESTCatalog(restCatalog, new Configuration()); CachingIcebergCatalog cachingIcebergCatalog = new CachingIcebergCatalog( CATALOG_NAME, icebergRESTCatalog, DEFAULT_CATALOG_PROPERTIES, Executors.newSingleThreadExecutor()); IcebergMetadata metadata = new IcebergMetadata(CATALOG_NAME, HDFS_ENVIRONMENT, cachingIcebergCatalog, Executors.newSingleThreadExecutor(), Executors.newSingleThreadExecutor(), new IcebergCatalogProperties(DEFAULT_CONFIG)); new Expectations() { { restCatalog.loadNamespaceMetadata(Namespace.of("db")); result = ImmutableMap.of("location", "xxxxx"); minTimes = 1; restCatalog.name(); result = "rest_catalog"; minTimes = 1; } }; CreateViewStmt stmt = new CreateViewStmt(false, false, new TableName("catalog", "db", "table"), Lists.newArrayList(new ColWithComment("k1", "", NodePosition.ZERO)), "", null, NodePosition.ZERO); stmt.setColumns(Lists.newArrayList(new Column("k1", INT))); metadata.createView(stmt); new Expectations() { { representation.sql(); result = "select * from table"; minTimes = 1; baseView.sqlFor("starrocks"); result = representation; minTimes = 1; baseView.properties(); result = ImmutableMap.of("comment", "mocked"); minTimes = 1; baseView.schema(); result = new Schema(Types.NestedField.optional(1, "k1", Types.IntegerType.get())); minTimes = 1; baseView.name(); result = "view"; minTimes = 1; baseView.location(); result = "xxx"; minTimes = 1; restCatalog.loadView(TableIdentifier.of("db", "view")); result = baseView; minTimes = 1; } }; Table table = metadata.getView("db", "view"); Assert.assertEquals(ICEBERG_VIEW, table.getType()); Assert.assertEquals("xxx", table.getTableLocation()); }
@Override public BarSeries aggregate(BarSeries series, String aggregatedSeriesName) { final List<Bar> aggregatedBars = barAggregator.aggregate(series.getBarData()); return new BaseBarSeries(aggregatedSeriesName, aggregatedBars); }
@Test public void testAggregateWithNewName() { final List<Bar> bars = new LinkedList<>(); final ZonedDateTime time = ZonedDateTime.of(2019, 6, 12, 4, 1, 0, 0, ZoneId.systemDefault()); final Bar bar0 = new MockBar(time, 1d, 2d, 3d, 4d, 5d, 6d, 7, numFunction); final Bar bar1 = new MockBar(time.plusDays(1), 2d, 3d, 3d, 4d, 5d, 6d, 7, numFunction); final Bar bar2 = new MockBar(time.plusDays(2), 3d, 4d, 4d, 5d, 6d, 7d, 7, numFunction); bars.add(bar0); bars.add(bar1); bars.add(bar2); final BarSeries barSeries = new BaseBarSeries("name", bars); final BarSeries aggregated = baseBarSeriesAggregator.aggregate(barSeries, "newName"); assertEquals("newName", aggregated.getName()); assertEquals(2, aggregated.getBarCount()); assertSame(bar0, aggregated.getBar(0)); assertSame(bar2, aggregated.getBar(1)); }
protected Map<String, String[]> generateParameterMap(MultiValuedTreeMap<String, String> qs, ContainerConfig config) { Map<String, String[]> output; Map<String, List<String>> formEncodedParams = getFormUrlEncodedParametersMap(); if (qs == null) { // Just transform the List<String> values to String[] output = formEncodedParams.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, (e) -> e.getValue().toArray(new String[0]))); } else { Map<String, List<String>> queryStringParams; if (config.isQueryStringCaseSensitive()) { queryStringParams = qs; } else { // If it's case insensitive, we check the entire map on every parameter queryStringParams = qs.entrySet().stream().parallel().collect( Collectors.toMap( Map.Entry::getKey, e -> getQueryParamValuesAsList(qs, e.getKey(), false) )); } // Merge formEncodedParams and queryStringParams Maps output = Stream.of(formEncodedParams, queryStringParams).flatMap(m -> m.entrySet().stream()) .collect( Collectors.toMap( Map.Entry::getKey, e -> e.getValue().toArray(new String[0]), // If a parameter is in both Maps, we merge the list of values (and ultimately transform to String[]) (formParam, queryParam) -> Stream.of(formParam, queryParam).flatMap(Stream::of).toArray(String[]::new) )); } return output; }
@Test void parameterMapWithMultipleValues_generateParameterMap_validQuery() { AwsProxyHttpServletRequest request = new AwsProxyHttpServletRequest(multipleParams, mockContext, null, config); Map<String, String[]> paramMap = null; try { paramMap = request.generateParameterMap(request.getAwsProxyRequest().getMultiValueQueryStringParameters(), config); } catch (Exception e) { e.printStackTrace(); fail("Could not generate parameter map"); } assertArrayEquals(new String[]{"two", "three"}, paramMap.get("one")); assertArrayEquals(new String[]{"{\"name\":\"faisal\"}"}, paramMap.get("json")); assertTrue(paramMap.size() == 2); }
public static DynamicVoter parse(String input) { input = input.trim(); int atIndex = input.indexOf("@"); if (atIndex < 0) { throw new IllegalArgumentException("No @ found in dynamic voter string."); } if (atIndex == 0) { throw new IllegalArgumentException("Invalid @ at beginning of dynamic voter string."); } String idString = input.substring(0, atIndex); int nodeId; try { nodeId = Integer.parseInt(idString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse node id in dynamic voter string.", e); } if (nodeId < 0) { throw new IllegalArgumentException("Invalid negative node id " + nodeId + " in dynamic voter string."); } input = input.substring(atIndex + 1); if (input.isEmpty()) { throw new IllegalArgumentException("No hostname found after node id."); } String host; if (input.startsWith("[")) { int endBracketIndex = input.indexOf("]"); if (endBracketIndex < 0) { throw new IllegalArgumentException("Hostname began with left bracket, but no right " + "bracket was found."); } host = input.substring(1, endBracketIndex); input = input.substring(endBracketIndex + 1); } else { int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following hostname could be found."); } host = input.substring(0, endColonIndex); input = input.substring(endColonIndex); } if (!input.startsWith(":")) { throw new IllegalArgumentException("Port section must start with a colon."); } input = input.substring(1); int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following port could be found."); } String portString = input.substring(0, endColonIndex); int port; try { port = Integer.parseInt(portString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse port in dynamic voter string.", e); } if (port < 0 || port > 65535) { throw new IllegalArgumentException("Invalid port " + port + " in dynamic voter string."); } String directoryIdString = input.substring(endColonIndex + 1); Uuid directoryId; try { directoryId = Uuid.fromString(directoryIdString); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Failed to parse directory ID in dynamic voter string.", e); } return new DynamicVoter(directoryId, nodeId, host, port); }
@Test public void testParseDynamicVoterWithoutId2() { assertEquals("Invalid @ at beginning of dynamic voter string.", assertThrows(IllegalArgumentException.class, () -> DynamicVoter.parse("@localhost:8020:K90IZ-0DRNazJ49kCZ1EMQ")). getMessage()); }
public <UpdateT> List<UpdateT> extractUpdates( boolean delta, CounterUpdateExtractor<UpdateT> extractors) { return extractUpdatesImpl(delta, false, extractors); }
@Test public void testExtractUpdates() { CounterSet deltaSet = new CounterSet(); CounterSet cumulativeSet = new CounterSet(); CounterUpdateExtractor<?> updateExtractor = Mockito.mock(CounterUpdateExtractor.class); Counter<Long, Long> delta1 = deltaSet.longSum(name1); Counter<Long, Long> cumulative1 = cumulativeSet.longSum(name1); // delta counters delta1.addValue(10L); deltaSet.extractUpdates(true, updateExtractor); verify(updateExtractor).longSum(name1, true, 10L); delta1.addValue(5L); deltaSet.extractUpdates(true, updateExtractor); verify(updateExtractor).longSum(name1, true, 5L); // no updates to delta counters deltaSet.extractUpdates(true, updateExtractor); verify(updateExtractor).longSum(name1, true, 0L); // cumulative counters cumulative1.addValue(10L); cumulativeSet.extractUpdates(false, updateExtractor); verify(updateExtractor).longSum(name1, false, 10L); cumulative1.addValue(5L); cumulativeSet.extractUpdates(false, updateExtractor); verify(updateExtractor).longSum(name1, false, 15L); // no updates to cumulative counters cumulativeSet.extractUpdates(false, updateExtractor); verify(updateExtractor, times(2)).longSum(name1, false, 15L); // test extracting only modified deltas Counter<Integer, Integer> delta2 = deltaSet.intSum(name2); delta1.addValue(100L); delta2.addValue(200); deltaSet.extractModifiedDeltaUpdates(updateExtractor); verify(updateExtractor).longSum(name1, true, 100L); verify(updateExtractor).intSum(name2, true, 200); delta1.addValue(1L); deltaSet.extractModifiedDeltaUpdates(updateExtractor); verify(updateExtractor).longSum(name1, true, 1L); delta2.addValue(5); deltaSet.extractModifiedDeltaUpdates(updateExtractor); verify(updateExtractor).intSum(name2, true, 5); verifyNoMoreInteractions(updateExtractor); }
protected void validatePartitionedTopicName(String tenant, String namespace, String encodedTopic) { // first, it has to be a validate topic name validateTopicName(tenant, namespace, encodedTopic); // second, "-partition-" is not allowed if (encodedTopic.contains(TopicName.PARTITIONED_TOPIC_SUFFIX)) { throw new RestException(Status.PRECONDITION_FAILED, "Partitioned Topic Name should not contain '-partition-'"); } }
@Test public void testValidatePartitionedTopicNameInvalid() { String tenant = "test-tenant"; String namespace = "test-namespace"; String topic = Codec.encode("test-topic-partition-0"); AdminResource resource = mockResource(); try { resource.validatePartitionedTopicName(tenant, namespace, topic); fail("Should fail validation on invalid partitioned topic"); } catch (RestException re) { assertEquals(Status.PRECONDITION_FAILED.getStatusCode(), re.getResponse().getStatus()); } }
@Override public String readSource() throws Exception { CompletableFuture<GetResponse> responseFuture = client.getKVClient().get(ByteSequence.from(key, charset)); List<KeyValue> kvs = responseFuture.get().getKvs(); return kvs.size() == 0 ? null : kvs.get(0).getValue().toString(charset); }
@Test public void testReadSource() throws Exception { EtcdDataSource dataSource = new EtcdDataSource("foo", value -> value); KV kvClient = Client.builder() .endpoints(endPoints) .build().getKVClient(); kvClient.put(ByteSequence.from("foo".getBytes()), ByteSequence.from("test".getBytes())); Assert.assertNotNull(dataSource.readSource().equals("test")); kvClient.put(ByteSequence.from("foo".getBytes()), ByteSequence.from("test2".getBytes())); Assert.assertNotNull(dataSource.getProperty().equals("test2")); }
@Override public boolean isInputConsumable( SchedulingExecutionVertex executionVertex, Set<ExecutionVertexID> verticesToSchedule, Map<ConsumedPartitionGroup, Boolean> consumableStatusCache) { for (ConsumedPartitionGroup consumedPartitionGroup : executionVertex.getConsumedPartitionGroups()) { if (!consumableStatusCache.computeIfAbsent( consumedPartitionGroup, (group) -> isConsumedPartitionGroupConsumable(group, verticesToSchedule))) { return false; } } return true; }
@Test void testUpstreamNotScheduledHybridInput() { final TestingSchedulingTopology topology = new TestingSchedulingTopology(); final List<TestingSchedulingExecutionVertex> producers = topology.addExecutionVertices().withParallelism(2).finish(); final List<TestingSchedulingExecutionVertex> consumer = topology.addExecutionVertices().withParallelism(2).finish(); topology.connectAllToAll(producers, consumer) .withResultPartitionState(ResultPartitionState.CREATED) .withResultPartitionType(ResultPartitionType.HYBRID_FULL) .finish(); DefaultInputConsumableDecider inputConsumableDecider = createDefaultInputConsumableDecider(Collections.emptySet(), topology); assertThat( inputConsumableDecider.isInputConsumable( consumer.get(0), Collections.emptySet(), new HashMap<>())) .isFalse(); assertThat( inputConsumableDecider.isInputConsumable( consumer.get(1), Collections.emptySet(), new HashMap<>())) .isFalse(); }
@Deprecated @Restricted(DoNotUse.class) public static String resolve(ConfigurationContext context, String toInterpolate) { return context.getSecretSourceResolver().resolve(toInterpolate); }
@Test public void resolve_empty() { assertThat(resolve(""), equalTo("")); }
@Override public ManagedChannel shutdown() { ArrayList<ManagedChannel> channels = new ArrayList<>(); synchronized (this) { shutdownStarted = true; channels.addAll(usedChannels); channels.addAll(channelCache); } for (ManagedChannel channel : channels) { channel.shutdown(); } return this; }
@Test public void testShutdown() throws Exception { ManagedChannel mockChannel = mock(ManagedChannel.class); when(channelSupplier.get()).thenReturn(mockChannel); ClientCall<Object, Object> mockCall1 = mock(ClientCall.class); ClientCall<Object, Object> mockCall2 = mock(ClientCall.class); when(mockChannel.newCall(any(), any())).thenReturn(mockCall1, mockCall2); IsolationChannel isolationChannel = IsolationChannel.create(channelSupplier); ClientCall<Object, Object> call1 = isolationChannel.newCall(methodDescriptor, CallOptions.DEFAULT); call1.start(new NoopClientCall.NoopClientCallListener<>(), new Metadata()); ArgumentCaptor<Listener<Object>> captor1 = ArgumentCaptor.forClass(ClientCall.Listener.class); verify(mockCall1).start(captor1.capture(), any()); when(mockChannel.shutdown()).thenReturn(mockChannel); when(mockChannel.isShutdown()).thenReturn(false, true); isolationChannel.shutdown(); assertFalse(isolationChannel.isShutdown()); ClientCall<Object, Object> call2 = isolationChannel.newCall(methodDescriptor, CallOptions.DEFAULT); call2.start(new NoopClientCall.NoopClientCallListener<>(), new Metadata()); ArgumentCaptor<Listener<Object>> captor2 = ArgumentCaptor.forClass(ClientCall.Listener.class); verify(mockCall2).start(captor2.capture(), any()); captor1.getValue().onClose(Status.CANCELLED, new Metadata()); captor2.getValue().onClose(Status.CANCELLED, new Metadata()); assertTrue(isolationChannel.isShutdown()); verify(channelSupplier, times(1)).get(); verify(mockChannel, times(2)).newCall(any(), any()); verify(mockChannel, times(1)).shutdown(); verify(mockChannel, times(2)).isShutdown(); }
protected List<MavenArtifact> processResponse(Dependency dependency, HttpURLConnection conn) throws IOException { final List<MavenArtifact> result = new ArrayList<>(); try (InputStreamReader streamReader = new InputStreamReader(conn.getInputStream(), StandardCharsets.UTF_8); JsonParser parser = objectReader.getFactory().createParser(streamReader)) { if (init(parser) && parser.nextToken() == com.fasterxml.jackson.core.JsonToken.START_OBJECT) { // at least one result do { final FileImpl file = objectReader.readValue(parser); checkHashes(dependency, file.getChecksums()); final Matcher pathMatcher = PATH_PATTERN.matcher(file.getPath()); if (!pathMatcher.matches()) { throw new IllegalStateException("Cannot extract the Maven information from the path " + "retrieved in Artifactory " + file.getPath()); } final String groupId = pathMatcher.group("groupId").replace('/', '.'); final String artifactId = pathMatcher.group("artifactId"); final String version = pathMatcher.group("version"); result.add(new MavenArtifact(groupId, artifactId, version, file.getDownloadUri(), MavenArtifact.derivePomUrl(artifactId, version, file.getDownloadUri()))); } while (parser.nextToken() == com.fasterxml.jackson.core.JsonToken.START_OBJECT); } else { throw new FileNotFoundException("Artifact " + dependency + " not found in Artifactory"); } } return result; }
@Test public void shouldProcessCorrectlyArtifactoryAnswer() throws IOException { // Given Dependency dependency = new Dependency(); dependency.setSha1sum("c5b4c491aecb72e7c32a78da0b5c6b9cda8dee0f"); dependency.setSha256sum("512b4bf6927f4864acc419b8c5109c23361c30ed1f5798170248d33040de068e"); dependency.setMd5sum("2d1dd0fc21ee96bccfab4353d5379649"); final HttpURLConnection urlConnection = mock(HttpURLConnection.class); final byte[] payload = payloadWithSha256().getBytes(StandardCharsets.UTF_8); when(urlConnection.getInputStream()).thenReturn(new ByteArrayInputStream(payload)); // When final List<MavenArtifact> mavenArtifacts = searcher.processResponse(dependency, urlConnection); // Then assertEquals(1, mavenArtifacts.size()); final MavenArtifact artifact = mavenArtifacts.get(0); assertEquals("com.google.code.gson", artifact.getGroupId()); assertEquals("gson", artifact.getArtifactId()); assertEquals("2.8.5", artifact.getVersion()); assertEquals("https://artifactory.techno.ingenico.com/artifactory/repo1-cache/com/google/code/gson/gson/2.8.5/gson-2.8.5-sources.jar", artifact.getArtifactUrl()); assertEquals("https://artifactory.techno.ingenico.com/artifactory/repo1-cache/com/google/code/gson/gson/2.8.5/gson-2.8.5.pom", artifact.getPomUrl()); }
@Override public void put(final Bytes key, final byte[] valueAndTimestamp) { wrapped().put(key, valueAndTimestamp); log(key, rawValue(valueAndTimestamp), valueAndTimestamp == null ? context.timestamp() : timestamp(valueAndTimestamp)); }
@Test public void shouldPropagateDelete() { store.put(hi, rawThere); store.delete(hi); assertThat(root.approximateNumEntries(), equalTo(0L)); assertThat(root.get(hi), nullValue()); }
@Override public void handleReply(Reply reply) { if (failure.get() != null) { return; } if (containsFatalErrors(reply.getErrors())) { failure.compareAndSet(null, new IOException(formatErrors(reply))); return; } long now = System.currentTimeMillis(); long latency = now - (long) reply.getContext(); numReplies.incrementAndGet(); accumulateReplies(now, latency); }
@Test public void requireThatXML2JsonFeederWorks() throws Throwable { ByteArrayOutputStream dump = new ByteArrayOutputStream(); assertFeed(new FeederParams().setDumpStream(dump), "<vespafeed>" + " <document documenttype='simple' documentid='id:simple:simple::0'>" + " <my_str>foo</my_str>" + " </document>" + " <update documenttype='simple' documentid='id:simple:simple::1'>" + " <assign field='my_str'>bar</assign>" + " </update>" + " <remove documenttype='simple' documentid='id:simple:simple::2'/>" + "</vespafeed>", new MessageHandler() { @Override public void handleMessage(Message msg) { Reply reply = ((DocumentMessage)msg).createReply(); reply.swapState(msg); reply.popHandler().handleReply(reply); } }, "", "(.+\n)+" + "\\s*\\d+,\\s*3,.+\n"); assertEquals(169, dump.size()); assertEquals(""" [ {"id":"id:simple:simple::0","fields":{"my_str":"foo"}}, {"update":"id:simple:simple::1","fields":{"my_str":{"assign":"bar"}}}, {"remove":"id:simple:simple::2"} ]""", dump.toString()); }
@Override public NacosGrpcProtocolNegotiator build() { Properties properties = EnvUtil.getProperties(); RpcServerTlsConfig config = RpcServerTlsConfigFactory.getInstance().createClusterConfig(properties); if (config.getEnableTls()) { SslContext sslContext = DefaultTlsContextBuilder.getSslContext(config); return new OptionalTlsProtocolNegotiator(sslContext, config); } return null; }
@Test void testBuildTlsEnabled() { Properties properties = new Properties(); properties.setProperty(RpcConstants.NACOS_PEER_RPC + ".enableTls", "true"); properties.setProperty(RpcConstants.NACOS_PEER_RPC + ".compatibility", "false"); properties.setProperty(RpcConstants.NACOS_PEER_RPC + ".ciphers", "ECDHE-RSA-AES128-GCM-SHA256,ECDHE-RSA-AES256-GCM-SHA384"); properties.setProperty(RpcConstants.NACOS_PEER_RPC + ".protocols", "TLSv1.2,TLSv1.3"); properties.setProperty(RpcConstants.NACOS_PEER_RPC + ".certPrivateKey", "test-server-key.pem"); properties.setProperty(RpcConstants.NACOS_PEER_RPC + ".certChainFile", "test-server-cert.pem"); properties.setProperty(RpcConstants.NACOS_PEER_RPC + ".trustCollectionCertFile", "test-ca-cert.pem"); PropertiesPropertySource propertySource = new PropertiesPropertySource("myPropertySource", properties); MutablePropertySources propertySources = environment.getPropertySources(); propertySources.addLast(propertySource); NacosGrpcProtocolNegotiator negotiator = builder.build(); assertNotNull(negotiator); }
public Tuple2<Long, Double> increase(String name, ImmutableMap<String, String> labels, Double value, long windowSize, long now) { ID id = new ID(name, labels); Queue<Tuple2<Long, Double>> window = windows.computeIfAbsent(id, unused -> new PriorityQueue<>()); synchronized (window) { window.offer(Tuple.of(now, value)); long waterLevel = now - windowSize; Tuple2<Long, Double> peek = window.peek(); if (peek._1 > waterLevel) { return peek; } Tuple2<Long, Double> result = peek; while (peek._1 < waterLevel) { result = window.poll(); peek = window.element(); } // Choose the closed slot to the expected timestamp if (waterLevel - result._1 <= peek._1 - waterLevel) { return result; } return peek; } }
@Test public void testPT35S() { double[] actuals = parameters().stream().mapToDouble(e -> { Tuple2<Long, Double> increase = CounterWindow.INSTANCE.increase( "test", ImmutableMap.<String, String>builder().build(), e._2, Duration.parse("PT35S").getSeconds() * 1000, e._1 ); return e._2 - increase._2; }).toArray(); Assertions.assertArrayEquals(new double[] {0, 1d, 2d, 2d, 2d, 0d, 3d, 3d}, actuals, 0.d); }
@Override public NativeEntity<Output> createNativeEntity(Entity entity, Map<String, ValueReference> parameters, Map<EntityDescriptor, Object> nativeEntities, String username) { if (entity instanceof EntityV1) { return decode((EntityV1) entity, parameters, username); } else { throw new IllegalArgumentException("Unsupported entity version: " + entity.getClass()); } }
@Test public void createNativeEntity() { final Entity entity = EntityV1.builder() .id(ModelId.of("1")) .type(ModelTypes.OUTPUT_V1) .data(objectMapper.convertValue(OutputEntity.create( ValueReference.of("STDOUT"), ValueReference.of("org.graylog2.outputs.LoggingOutput"), ReferenceMapUtils.toReferenceMap(ImmutableMap.of("prefix", "Writing message: ")) ), JsonNode.class)) .build(); final NativeEntity<Output> nativeEntity = facade.createNativeEntity(entity, Collections.emptyMap(), Collections.emptyMap(), "username"); assertThat(nativeEntity.descriptor().type()).isEqualTo(ModelTypes.OUTPUT_V1); assertThat(nativeEntity.entity().getTitle()).isEqualTo("STDOUT"); assertThat(nativeEntity.entity().getType()).isEqualTo("org.graylog2.outputs.LoggingOutput"); assertThat(nativeEntity.entity().getCreatorUserId()).isEqualTo("username"); assertThat(nativeEntity.entity().getConfiguration()).containsEntry("prefix", "Writing message: "); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { long datetime = readDatetimeV2FromPayload(payload); return 0L == datetime ? MySQLTimeValueUtils.DATETIME_OF_ZERO : readDatetime(columnDef, datetime, payload); }
@Test void assertReadWithoutFraction5() { columnDef.setColumnMeta(5); when(payload.readInt1()).thenReturn(0xfe, 0xf3, 0xff, 0x7e, 0xfb); when(payload.getByteBuf()).thenReturn(byteBuf); when(byteBuf.readUnsignedMedium()).thenReturn(999990); LocalDateTime expected = LocalDateTime.of(9999, 12, 31, 23, 59, 59, 999990000); assertThat(new MySQLDatetime2BinlogProtocolValue().read(columnDef, payload), is(Timestamp.valueOf(expected))); }
public PathPrefixAuth() { }
@Test public void testPathPrefixAuth() { String s = "[{\"clientId\":\"my-client\",\"clientSecret\":\"my-secret\",\"tokenUrl\":\"www.example.com/token\"}]"; List<PathPrefixAuth> pathPrefixAuths = null; try { pathPrefixAuths = Config.getInstance().getMapper().readValue(s, new TypeReference<>() {}); } catch (Exception e) { e.printStackTrace(); } Assert.assertNotNull(pathPrefixAuths); Assert.assertEquals(pathPrefixAuths.size(), 1); }
public static boolean in(Polygon p, EdgeIteratorState edge) { BBox edgeBBox = GHUtility.createBBox(edge); BBox polyBBOX = p.getBounds(); if (!polyBBOX.intersects(edgeBBox)) return false; if (p.isRectangle() && polyBBOX.contains(edgeBBox)) return true; return p.intersects(edge.fetchWayGeometry(FetchMode.ALL).makeImmutable()); // TODO PERF: cache bbox and edge wayGeometry for multiple area }
@Test public void testInRectangle() { Polygon square = new Polygon(new double[]{0, 0, 20, 20}, new double[]{0, 20, 20, 0}); assertTrue(square.isRectangle()); BaseGraph g = new BaseGraph.Builder(1).create(); // (1,1) (2,2) (3,3) // Polygon fully contains the edge and its BBox g.getNodeAccess().setNode(0, 1, 1); g.getNodeAccess().setNode(1, 3, 3); EdgeIteratorState edge = g.edge(0, 1).setWayGeometry(Helper.createPointList(2, 2)); assertTrue(CustomWeightingHelper.in(square, edge)); // (0,0) (20,0) (20,20) // Polygon contains the edge; BBoxes overlap g.getNodeAccess().setNode(2, 0, 0); g.getNodeAccess().setNode(3, 20, 20); edge = g.edge(2, 3).setWayGeometry(Helper.createPointList(20, 0)); assertTrue(CustomWeightingHelper.in(square, edge)); // (0,30) (10,40) (20,50) // Edge is outside the polygon; BBoxes are not intersecting g.getNodeAccess().setNode(4, 0, 30); g.getNodeAccess().setNode(5, 20, 50); edge = g.edge(4, 5).setWayGeometry(Helper.createPointList(10, 40)); assertFalse(CustomWeightingHelper.in(square, edge)); // (0,30) (30,30) (30,0) // Edge is outside the polygon; BBoxes are intersecting g.getNodeAccess().setNode(6, 0, 30); g.getNodeAccess().setNode(7, 30, 0); edge = g.edge(6, 7).setWayGeometry(Helper.createPointList(30, 30)); assertFalse(CustomWeightingHelper.in(square, edge)); }
public static String decode(byte[] data) { return data == null ? null : new String(data, RpcConstants.DEFAULT_CHARSET); }
@Test public void decode() { Assert.assertNull(StringSerializer.decode(null)); Assert.assertEquals("", StringSerializer.decode(new byte[0])); Assert.assertEquals("11", StringSerializer.decode(StringSerializer.encode("11"))); }
public static String wrap(String input, Formatter formatter) throws FormatterException { return StringWrapper.wrap(Formatter.MAX_LINE_LENGTH, input, formatter); }
@Test public void testAwkwardLineEndWrapping() throws Exception { String input = lines( "class T {", // This is a wide line, but has to be split in code because of 100-char limit. " String s = someMethodWithQuiteALongNameThatWillGetUsUpCloseToTheColumnLimit() " + "+ \"foo bar foo bar foo bar\";", "", " String someMethodWithQuiteALongNameThatWillGetUsUpCloseToTheColumnLimit() {", " return null;", " }", "}"); String output = lines( "class T {", " String s =", " someMethodWithQuiteALongNameThatWillGetUsUpCloseToTheColumnLimit()", " + \"foo bar foo bar foo bar\";", "", " String someMethodWithQuiteALongNameThatWillGetUsUpCloseToTheColumnLimit() {", " return null;", " }", "}"); assertThat(StringWrapper.wrap(100, input, new Formatter())).isEqualTo(output); }
@DeleteMapping(params = "delType=ids") @Secured(action = ActionTypes.WRITE, signType = SignType.CONFIG) public RestResult<Boolean> deleteConfigs(HttpServletRequest request, @RequestParam(value = "ids") List<Long> ids) { String clientIp = RequestUtil.getRemoteIp(request); String srcUser = RequestUtil.getSrcUserName(request); final Timestamp time = TimeUtils.getCurrentTime(); List<ConfigInfo> configInfoList = configInfoPersistService.removeConfigInfoByIds(ids, clientIp, srcUser); if (CollectionUtils.isEmpty(configInfoList)) { return RestResultUtils.success(true); } for (ConfigInfo configInfo : configInfoList) { ConfigChangePublisher.notifyConfigChange( new ConfigDataChangeEvent(false, configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant(), time.getTime())); ConfigTraceService.logPersistenceEvent(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant(), null, time.getTime(), clientIp, ConfigTraceService.PERSISTENCE_EVENT, ConfigTraceService.PERSISTENCE_TYPE_REMOVE, null); } return RestResultUtils.success(true); }
@Test void testDeleteConfigs() throws Exception { List<ConfigInfo> resultInfos = new ArrayList<>(); String dataId = "dataId1123"; String group = "group34567"; String tenant = "tenant45678"; resultInfos.add(new ConfigInfo(dataId, group, tenant)); Mockito.when(configInfoPersistService.removeConfigInfoByIds(eq(Arrays.asList(1L, 2L)), anyString(), eq(null))) .thenReturn(resultInfos); AtomicReference<ConfigDataChangeEvent> reference = new AtomicReference<>(); NotifyCenter.registerSubscriber(new Subscriber() { @Override public void onEvent(Event event) { ConfigDataChangeEvent event1 = (ConfigDataChangeEvent) event; if (event1.dataId.equals(dataId)) { reference.set((ConfigDataChangeEvent) event); } } @Override public Class<? extends Event> subscribeType() { return ConfigDataChangeEvent.class; } }); MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.delete(Constants.CONFIG_CONTROLLER_PATH).param("delType", "ids") .param("ids", "1,2"); String actualValue = mockmvc.perform(builder).andReturn().getResponse().getContentAsString(); String code = JacksonUtils.toObj(actualValue).get("code").toString(); String data = JacksonUtils.toObj(actualValue).get("data").toString(); assertEquals("200", code); assertEquals("true", data); Thread.sleep(200L); //expect assertTrue(reference.get() != null); }
@Override public List<OptExpression> transform(OptExpression input, OptimizerContext context) { LogicalOlapScanOperator logicalOlapScanOperator = (LogicalOlapScanOperator) input.getOp(); LogicalOlapScanOperator prunedOlapScanOperator = null; if (logicalOlapScanOperator.getSelectedPartitionId() == null) { prunedOlapScanOperator = OptOlapPartitionPruner.prunePartitions(logicalOlapScanOperator); } else { // do merge pruned partitions with new pruned partitions prunedOlapScanOperator = OptOlapPartitionPruner.mergePartitionPrune(logicalOlapScanOperator); } Utils.setOpAppliedRule(prunedOlapScanOperator, Operator.OP_PARTITION_PRUNE_BIT); return Lists.newArrayList(OptExpression.create(prunedOlapScanOperator, input.getInputs())); }
@Test public void transform2(@Mocked OlapTable olapTable, @Mocked RangePartitionInfo partitionInfo) { FeConstants.runningUnitTest = true; Partition part1 = new Partition(1, "p1", null, null); Partition part2 = new Partition(2, "p2", null, null); Partition part3 = new Partition(3, "p3", null, null); Partition part4 = new Partition(4, "p4", null, null); Partition part5 = new Partition(5, "p5", null, null); List<Column> columns = Lists.newArrayList( new Column("dealDate", Type.DATE, false), new Column("main_brand_id", Type.INT, false) ); Map<Long, Range<PartitionKey>> keyRange = Maps.newHashMap(); PartitionKey p1 = new PartitionKey(); p1.pushColumn(new DateLiteral(2019, 11, 1), PrimitiveType.DATE); p1.pushColumn(new IntLiteral(100), PrimitiveType.INT); PartitionKey p2 = new PartitionKey(); p2.pushColumn(new DateLiteral(2020, 2, 1), PrimitiveType.DATE); p2.pushColumn(new IntLiteral(200), PrimitiveType.INT); PartitionKey p3 = new PartitionKey(); p3.pushColumn(new DateLiteral(2020, 5, 1), PrimitiveType.DATE); p3.pushColumn(new IntLiteral(300), PrimitiveType.INT); PartitionKey p4 = new PartitionKey(); p4.pushColumn(new DateLiteral(2020, 8, 1), PrimitiveType.DATE); p4.pushColumn(new IntLiteral(400), PrimitiveType.INT); PartitionKey p5 = new PartitionKey(); p5.pushColumn(new DateLiteral(2020, 11, 1), PrimitiveType.DATE); p5.pushColumn(new IntLiteral(500), PrimitiveType.INT); PartitionKey p6 = new PartitionKey(); p6.pushColumn(new DateLiteral(2021, 2, 1), PrimitiveType.DATE); p6.pushColumn(new IntLiteral(600), PrimitiveType.INT); keyRange.put(1L, Range.closed(p1, p2)); keyRange.put(2L, Range.closed(p2, p3)); keyRange.put(3L, Range.closed(p3, p4)); keyRange.put(4L, Range.closed(p4, p5)); keyRange.put(5L, Range.closed(p5, p6)); ColumnRefFactory columnRefFactory = new ColumnRefFactory(); ColumnRefOperator column1 = columnRefFactory.create("dealDate", ScalarType.DATE, false); ColumnRefOperator column2 = columnRefFactory.create("main_brand_id", ScalarType.INT, false); Map<ColumnRefOperator, Column> scanColumnMap = Maps.newHashMap(); scanColumnMap.put(column1, new Column("dealDate", Type.DATE, false)); scanColumnMap.put(column2, new Column("main_brand_id", Type.INT, false)); Map<Column, ColumnRefOperator> scanMetaColMap = Maps.newHashMap(); scanMetaColMap.put(new Column("dealDate", Type.DATE, false), column1); scanMetaColMap.put(new Column("main_brand_id", Type.INT, false), column2); BinaryPredicateOperator binaryPredicateOperator1 = new BinaryPredicateOperator(BinaryType.GE, column1, ConstantOperator.createDate(LocalDateTime.of(2020, 8, 1, 0, 0, 0))); BinaryPredicateOperator binaryPredicateOperator2 = new BinaryPredicateOperator(BinaryType.LE, column1, ConstantOperator.createDate(LocalDateTime.of(2020, 12, 1, 0, 0, 0))); BinaryPredicateOperator binaryPredicateOperator3 = new BinaryPredicateOperator(BinaryType.GE, column2, ConstantOperator.createInt(150)); BinaryPredicateOperator binaryPredicateOperator4 = new BinaryPredicateOperator(BinaryType.LE, column2, ConstantOperator.createInt(150)); ScalarOperator predicate = Utils.compoundAnd(binaryPredicateOperator1, binaryPredicateOperator2, binaryPredicateOperator3, binaryPredicateOperator4); LogicalOlapScanOperator operator = new LogicalOlapScanOperator(olapTable, scanColumnMap, scanMetaColMap, null, -1, predicate); new Expectations() { { olapTable.getPartitionInfo(); result = partitionInfo; partitionInfo.isRangePartition(); result = true; partitionInfo.getIdToRange(false); result = keyRange; partitionInfo.getPartitionColumns((Map<ColumnId, Column>) any); result = columns; olapTable.getPartitions(); result = Lists.newArrayList(part1, part2, part3, part4, part5); minTimes = 0; olapTable.getPartition(1); result = part1; minTimes = 0; olapTable.getPartition(2); result = part2; minTimes = 0; olapTable.getPartition(3); result = part3; minTimes = 0; olapTable.getPartition(4); result = part4; minTimes = 0; olapTable.getPartition(5); result = part5; minTimes = 0; } }; PartitionPruneRule rule = new PartitionPruneRule(); assertNull(operator.getSelectedPartitionId()); OptExpression optExpression = rule.transform(new OptExpression(operator), new OptimizerContext(new Memo(), columnRefFactory)).get(0); assertEquals(3, ((LogicalOlapScanOperator) optExpression.getOp()).getSelectedPartitionId().size()); }
public Fetch<K, V> collectFetch(final FetchBuffer fetchBuffer) { final Fetch<K, V> fetch = Fetch.empty(); final Queue<CompletedFetch> pausedCompletedFetches = new ArrayDeque<>(); int recordsRemaining = fetchConfig.maxPollRecords; try { while (recordsRemaining > 0) { final CompletedFetch nextInLineFetch = fetchBuffer.nextInLineFetch(); if (nextInLineFetch == null || nextInLineFetch.isConsumed()) { final CompletedFetch completedFetch = fetchBuffer.peek(); if (completedFetch == null) break; if (!completedFetch.isInitialized()) { try { fetchBuffer.setNextInLineFetch(initialize(completedFetch)); } catch (Exception e) { // Remove a completedFetch upon a parse with exception if (1) it contains no completedFetch, and // (2) there are no fetched completedFetch with actual content preceding this exception. // The first condition ensures that the completedFetches is not stuck with the same completedFetch // in cases such as the TopicAuthorizationException, and the second condition ensures that no // potential data loss due to an exception in a following record. if (fetch.isEmpty() && FetchResponse.recordsOrFail(completedFetch.partitionData).sizeInBytes() == 0) fetchBuffer.poll(); throw e; } } else { fetchBuffer.setNextInLineFetch(completedFetch); } fetchBuffer.poll(); } else if (subscriptions.isPaused(nextInLineFetch.partition)) { // when the partition is paused we add the records back to the completedFetches queue instead of draining // them so that they can be returned on a subsequent poll if the partition is resumed at that time log.debug("Skipping fetching records for assigned partition {} because it is paused", nextInLineFetch.partition); pausedCompletedFetches.add(nextInLineFetch); fetchBuffer.setNextInLineFetch(null); } else { final Fetch<K, V> nextFetch = fetchRecords(nextInLineFetch, recordsRemaining); recordsRemaining -= nextFetch.numRecords(); fetch.add(nextFetch); } } } catch (KafkaException e) { if (fetch.isEmpty()) throw e; } finally { // add any polled completed fetches for paused partitions back to the completed fetches queue to be // re-evaluated in the next poll fetchBuffer.addAll(pausedCompletedFetches); } return fetch; }
@Test public void testNoResultsIfInitializing() { buildDependencies(); // Intentionally call assign (vs. assignAndSeek) so that we don't set the position. The SubscriptionState // will consider the partition as in the SubscriptionState.FetchStates.INITIALIZED state. assign(topicAPartition0); // The position should thus be null and considered un-fetchable and invalid. assertNull(subscriptions.position(topicAPartition0)); assertFalse(subscriptions.isFetchable(topicAPartition0)); assertFalse(subscriptions.hasValidPosition(topicAPartition0)); // Add some valid CompletedFetch records to the FetchBuffer queue and collect them into the Fetch. CompletedFetch completedFetch = completedFetchBuilder.build(); fetchBuffer.add(completedFetch); Fetch<String, String> fetch = fetchCollector.collectFetch(fetchBuffer); // Verify that no records are fetched for the partition as it did not have a valid position set. assertEquals(0, fetch.numRecords()); }
@Override protected @UnknownKeyFor @NonNull @Initialized SchemaTransform from( JdbcWriteSchemaTransformConfiguration configuration) { configuration.validate(); return new JdbcWriteSchemaTransform(configuration); }
@Test public void testWriteToTableWithJdbcTypeSpecified() throws SQLException { JdbcWriteSchemaTransformProvider provider = null; for (SchemaTransformProvider p : ServiceLoader.load(SchemaTransformProvider.class)) { if (p instanceof JdbcWriteSchemaTransformProvider) { provider = (JdbcWriteSchemaTransformProvider) p; break; } } assertNotNull(provider); Schema schema = Schema.of( Schema.Field.of("id", Schema.FieldType.INT64), Schema.Field.of("name", Schema.FieldType.STRING)); List<Row> rows = ImmutableList.of( Row.withSchema(schema).attachValues(1L, "name1"), Row.withSchema(schema).attachValues(2L, "name2")); PCollectionRowTuple.of("input", pipeline.apply(Create.of(rows).withRowSchema(schema))) .apply( provider.from( JdbcWriteSchemaTransformProvider.JdbcWriteSchemaTransformConfiguration.builder() .setJdbcUrl(DATA_SOURCE_CONFIGURATION.getUrl().get()) .setJdbcType("derby") .setLocation(writeTableName) .build())); pipeline.run(); DatabaseTestHelper.assertRowCount(DATA_SOURCE, writeTableName, 2); }
@Udf(schema = "ARRAY<STRUCT<K STRING, V INT>>") public List<Struct> entriesInt( @UdfParameter(description = "The map to create entries from") final Map<String, Integer> map, @UdfParameter(description = "If true then the resulting entries are sorted by key") final boolean sorted ) { return entries(map, INT_STRUCT_SCHEMA, sorted); }
@Test public void shouldReturnNullListForNullMapInt() { assertNull(entriesUdf.entriesInt(null, false)); }
@Override public Optional<String> getReturnTo(HttpRequest request) { return getParameter(request, RETURN_TO_PARAMETER) .flatMap(OAuth2AuthenticationParametersImpl::sanitizeRedirectUrl); }
@Test public void get_return_to_is_empty_when_no_value() { when(request.getCookies()).thenReturn(new Cookie[]{wrapCookie(AUTHENTICATION_COOKIE_NAME, "{}")}); Optional<String> redirection = underTest.getReturnTo(request); assertThat(redirection).isEmpty(); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = originalGlyphIds; for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(repositionGlyphs(intermediateGlyphsFromGsub)); }
@Test void testApplyTransforms_simple_hosshoi_kar() { // given List<Integer> glyphsAfterGsub = Arrays.asList(56, 102, 91); // when List<Integer> result = gsubWorkerForBengali.applyTransforms(getGlyphIds("আমি")); // then assertEquals(glyphsAfterGsub, result); }
public static Set<Metric> mapFromDataProvider(TelemetryDataProvider<?> provider) { switch (provider.getDimension()) { case INSTALLATION -> { return mapInstallationMetric(provider); } case PROJECT -> { return mapProjectMetric(provider); } case USER -> { return mapUserMetric(provider); } case LANGUAGE -> { return mapLanguageMetric(provider); } default -> throw new IllegalArgumentException("Dimension: " + provider.getDimension() + " not yet implemented."); } }
@Test void mapFromDataProvider_whenAdhocInstallationProviderWithValue_shouldMapToMetric() { TestTelemetryAdhocBean provider = new TestTelemetryAdhocBean(Dimension.INSTALLATION, true); // Force the value to be returned Set<Metric> metrics = TelemetryMetricsMapper.mapFromDataProvider(provider); List<InstallationMetric> userMetrics = retrieveList(metrics); assertThat(userMetrics) .extracting(InstallationMetric::getKey, InstallationMetric::getType, InstallationMetric::getValue, InstallationMetric::getGranularity) .containsExactlyInAnyOrder( tuple("telemetry-adhoc-bean", TelemetryDataType.BOOLEAN, true, Granularity.ADHOC) ); }
public static String getValidFilePath(String inputPath) { return getValidFilePath(inputPath, false); }
@Test public void getValidFilePath_writeToBlockedPath_throwsIllegalArgumentException() { try { SecurityUtils.getValidFilePath("/usr/lib/test.txt"); } catch (IllegalArgumentException e) { return; } fail("Did not throw exception"); }
public static Builder builder() { return new Builder(); }
@Test public void testBuilder() { ByteArray hash = new ByteArray(1); Block block = Block.builder() .setResourceId("resource") .setBlockHash(hash) .setIndexInFile(1) .setLines(2, 3) .setUnit(4, 5) .build(); assertThat(block.getResourceId(), is("resource")); assertThat(block.getBlockHash(), sameInstance(hash)); assertThat(block.getIndexInFile(), is(1)); assertThat(block.getStartLine(), is(2)); assertThat(block.getEndLine(), is(3)); assertThat(block.getStartUnit(), is(4)); assertThat(block.getEndUnit(), is(5)); }
@Override public MapperResult getTenantIdList(MapperContext context) { String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '" + NamespaceUtil.getNamespaceDefaultId() + "' GROUP BY tenant_id LIMIT " + context.getStartRow() + "," + context.getPageSize(); return new MapperResult(sql, Collections.emptyList()); }
@Test void testGetTenantIdList() { MapperResult mapperResult = configInfoMapperByMySql.getTenantIdList(context); assertEquals(mapperResult.getSql(), "SELECT tenant_id FROM config_info WHERE tenant_id != '" + NamespaceUtil.getNamespaceDefaultId() + "' GROUP BY tenant_id LIMIT " + startRow + "," + pageSize); assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs); }
boolean sendRecords() { int processed = 0; recordBatch(toSend.size()); final SourceRecordWriteCounter counter = toSend.isEmpty() ? null : new SourceRecordWriteCounter(toSend.size(), sourceTaskMetricsGroup); for (final SourceRecord preTransformRecord : toSend) { ProcessingContext<SourceRecord> context = new ProcessingContext<>(preTransformRecord); final SourceRecord record = transformationChain.apply(context, preTransformRecord); final ProducerRecord<byte[], byte[]> producerRecord = convertTransformedRecord(context, record); if (producerRecord == null || context.failed()) { counter.skipRecord(); recordDropped(preTransformRecord); processed++; continue; } log.trace("{} Appending record to the topic {} with key {}, value {}", this, record.topic(), record.key(), record.value()); Optional<SubmittedRecords.SubmittedRecord> submittedRecord = prepareToSendRecord(preTransformRecord, producerRecord); try { final String topic = producerRecord.topic(); maybeCreateTopic(topic); producer.send( producerRecord, (recordMetadata, e) -> { if (e != null) { if (producerClosed) { log.trace("{} failed to send record to {}; this is expected as the producer has already been closed", AbstractWorkerSourceTask.this, topic, e); } else { log.error("{} failed to send record to {}: ", AbstractWorkerSourceTask.this, topic, e); } log.trace("{} Failed record: {}", AbstractWorkerSourceTask.this, preTransformRecord); producerSendFailed(context, false, producerRecord, preTransformRecord, e); if (retryWithToleranceOperator.getErrorToleranceType() == ToleranceType.ALL) { counter.skipRecord(); submittedRecord.ifPresent(SubmittedRecords.SubmittedRecord::ack); } } else { counter.completeRecord(); log.trace("{} Wrote record successfully: topic {} partition {} offset {}", AbstractWorkerSourceTask.this, recordMetadata.topic(), recordMetadata.partition(), recordMetadata.offset()); recordSent(preTransformRecord, producerRecord, recordMetadata); submittedRecord.ifPresent(SubmittedRecords.SubmittedRecord::ack); if (topicTrackingEnabled) { recordActiveTopic(producerRecord.topic()); } } }); // Note that this will cause retries to take place within a transaction } catch (RetriableException | org.apache.kafka.common.errors.RetriableException e) { log.warn("{} Failed to send record to topic '{}' and partition '{}'. Backing off before retrying: ", this, producerRecord.topic(), producerRecord.partition(), e); toSend = toSend.subList(processed, toSend.size()); submittedRecord.ifPresent(SubmittedRecords.SubmittedRecord::drop); counter.retryRemaining(); return false; } catch (ConnectException e) { log.warn("{} Failed to send record to topic '{}' and partition '{}' due to an unrecoverable exception: ", this, producerRecord.topic(), producerRecord.partition(), e); log.trace("{} Failed to send {} with unrecoverable exception: ", this, producerRecord, e); throw e; } catch (KafkaException e) { producerSendFailed(context, true, producerRecord, preTransformRecord, e); } processed++; recordDispatched(preTransformRecord); } toSend = null; batchDispatched(); return true; }
@Test public void testHeadersWithCustomConverter() throws Exception { StringConverter stringConverter = new StringConverter(); SampleConverterWithHeaders testConverter = new SampleConverterWithHeaders(); createWorkerTask(stringConverter, testConverter, stringConverter, RetryWithToleranceOperatorTest.noopOperator(), Collections::emptyList); expectSendRecord(null); expectApplyTransformationChain(); expectTopicCreation(TOPIC); String stringA = "Árvíztűrő tükörfúrógép"; String encodingA = "latin2"; String stringB = "Тестовое сообщение"; String encodingB = "koi8_r"; org.apache.kafka.connect.header.Headers headersA = new ConnectHeaders() .addString("encoding", encodingA); org.apache.kafka.connect.header.Headers headersB = new ConnectHeaders() .addString("encoding", encodingB); workerTask.toSend = Arrays.asList( new SourceRecord(PARTITION, OFFSET, "topic", null, Schema.STRING_SCHEMA, "a", Schema.STRING_SCHEMA, stringA, null, headersA), new SourceRecord(PARTITION, OFFSET, "topic", null, Schema.STRING_SCHEMA, "b", Schema.STRING_SCHEMA, stringB, null, headersB) ); workerTask.sendRecords(); ArgumentCaptor<ProducerRecord<byte[], byte[]>> sent = verifySendRecord(2); List<ProducerRecord<byte[], byte[]>> capturedValues = sent.getAllValues(); assertEquals(2, capturedValues.size()); ProducerRecord<byte[], byte[]> sentRecordA = capturedValues.get(0); ProducerRecord<byte[], byte[]> sentRecordB = capturedValues.get(1); assertEquals(ByteBuffer.wrap("a".getBytes()), ByteBuffer.wrap(sentRecordA.key())); assertEquals( ByteBuffer.wrap(stringA.getBytes(encodingA)), ByteBuffer.wrap(sentRecordA.value()) ); assertEquals(encodingA, new String(sentRecordA.headers().lastHeader("encoding").value())); assertEquals(ByteBuffer.wrap("b".getBytes()), ByteBuffer.wrap(sentRecordB.key())); assertEquals( ByteBuffer.wrap(stringB.getBytes(encodingB)), ByteBuffer.wrap(sentRecordB.value()) ); assertEquals(encodingB, new String(sentRecordB.headers().lastHeader("encoding").value())); verifyTaskGetTopic(2); verifyTopicCreation(); }
public static void checkTenant(String tenant) throws NacosException { if (StringUtils.isBlank(tenant) || !ParamUtils.isValid(tenant)) { throw new NacosException(NacosException.CLIENT_INVALID_PARAM, TENANT_INVALID_MSG); } }
@Test void testCheckTenantFail() throws NacosException { Throwable exception = assertThrows(NacosException.class, () -> { String tenant = ""; ParamUtils.checkTenant(tenant); }); assertTrue(exception.getMessage().contains("tenant invalid")); }
public static String getDefaultHost(@Nullable String strInterface, @Nullable String nameserver, boolean tryfallbackResolution) throws UnknownHostException { if (strInterface == null || "default".equals(strInterface)) { return cachedHostname; } if (nameserver != null && "default".equals(nameserver)) { nameserver = null; } String[] hosts = getHosts(strInterface, nameserver, tryfallbackResolution); return hosts[0]; }
@Test public void testDefaultDnsServer() throws Exception { String host = DNS.getDefaultHost(getLoopbackInterface(), DEFAULT); Assertions.assertThat(host) .isEqualTo(DNS.getDefaultHost(getLoopbackInterface())); }
public int getBlobServerPort() { final int blobServerPort = KubernetesUtils.parsePort(flinkConfig, BlobServerOptions.PORT); checkArgument(blobServerPort > 0, "%s should not be 0.", BlobServerOptions.PORT.key()); return blobServerPort; }
@Test void testGetBlobServerPortException1() { flinkConfig.set(BlobServerOptions.PORT, "1000-2000"); String errMsg = BlobServerOptions.PORT.key() + " should be specified to a fixed port. Do not support a range of ports."; assertThatThrownBy( () -> kubernetesJobManagerParameters.getBlobServerPort(), "Should fail with an exception.") .satisfies( cause -> assertThat(cause) .isInstanceOf(FlinkRuntimeException.class) .hasMessageMatching(errMsg)); }
public static String input() { return scanner().nextLine(); }
@Test @Disabled public void inputTest() { Console.log("Please input something: "); String input = Console.input(); Console.log(input); }
private WorkflowRun getRun() { return PipelineRunImpl.findRun(runExternalizableId); }
@Test public void getRun_FirstFound() throws Exception { try (MockedStatic<QueueUtil> queueUtilMockedStatic = Mockito.mockStatic(QueueUtil.class)) { Mockito.when(QueueUtil.getRun(job, 1)).thenReturn(run); WorkflowRun workflowRun = PipelineNodeImpl.getRun(job, 1); assertEquals(workflowRun, run); Mockito.verify(QueueUtil.class, VerificationModeFactory.times(1)); QueueUtil.getRun(job, 1); // need to call again to handle verify } }
public static ImmutableSet<HttpUrl> allSubPaths(String url) { return allSubPaths(HttpUrl.parse(url)); }
@Test public void allSubPaths_whenMultipleSubPathsWithParamsAndFragments_returnsExpectedUrl() { assertThat(allSubPaths("http://localhost/a/b/c/?param=value&param2=value2#abc")) .containsExactly( HttpUrl.parse("http://localhost/"), HttpUrl.parse("http://localhost/a/"), HttpUrl.parse("http://localhost/a/b/"), HttpUrl.parse("http://localhost/a/b/c/")); }
public static String getApplication(Invocation invocation, String defaultValue) { if (invocation == null || invocation.getAttachments() == null) { throw new IllegalArgumentException("Bad invocation instance"); } return invocation.getAttachment(SENTINEL_DUBBO_APPLICATION_KEY, defaultValue); }
@Test(expected = IllegalArgumentException.class) public void testGetApplicationNoAttachments() { Invocation invocation = mock(Invocation.class); when(invocation.getAttachments()).thenReturn(null); when(invocation.getAttachment(DubboUtils.SENTINEL_DUBBO_APPLICATION_KEY, "")) .thenReturn("consumerA"); DubboUtils.getApplication(invocation, ""); fail("No attachments in invocation, IllegalArgumentException should be thrown!"); }
public static void sendHeartbeat(final ThreadId id, final RpcResponseClosure<AppendEntriesResponse> closure) { final Replicator r = (Replicator) id.lock(); if (r == null) { RpcUtils.runClosureInThread(closure, new Status(RaftError.EHOSTDOWN, "Peer %s is not connected", id)); return; } //id unlock in send empty entries. r.sendEmptyEntries(true, closure); }
@Test public void testSendHeartbeat() { final Replicator r = getReplicator(); this.id.unlock(); assertNull(r.getHeartbeatInFly()); final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest(true); Mockito.when( this.rpcService.appendEntries(eq(this.peerId.getEndpoint()), eq(request), eq(this.opts.getElectionTimeoutMs() / 2), Mockito.any())).thenReturn(new FutureImpl<>()); Replicator.sendHeartbeat(this.id, new RpcResponseClosureAdapter<RpcRequests.AppendEntriesResponse>() { @Override public void run(final Status status) { assertTrue(status.isOk()); } }); assertNotNull(r.getHeartbeatInFly()); assertSame(r, this.id.lock()); this.id.unlock(); }
public static List<String> computeNameParts(String loggerName) { List<String> partList = new ArrayList<String>(); int fromIndex = 0; while (true) { int index = getSeparatorIndexOf(loggerName, fromIndex); if (index == -1) { partList.add(loggerName.substring(fromIndex)); break; } partList.add(loggerName.substring(fromIndex, index)); fromIndex = index + 1; } return partList; }
@Test public void dotAtLastPositionShouldReturnAListWithAnEmptyStringAsLastElement() { List<String> witnessList = new ArrayList<String>(); witnessList.add("com"); witnessList.add("foo"); witnessList.add(""); List<String> partList = LoggerNameUtil.computeNameParts("com.foo."); assertEquals(witnessList, partList); }
@Override public void isEqualTo(@Nullable Object expected) { super.isEqualTo(expected); }
@Test public void isEqualTo_WithoutToleranceParameter_Fail_DifferentOrder() { expectFailureWhenTestingThat(array(2.2f, 3.3f)).isEqualTo(array(3.3f, 2.2f)); }
public Span nextSpan(TraceContextOrSamplingFlags extracted) { if (extracted == null) throw new NullPointerException("extracted == null"); TraceContext context = extracted.context(); if (context != null) return newChild(context); TraceIdContext traceIdContext = extracted.traceIdContext(); if (traceIdContext != null) { return _toSpan(null, decorateContext( InternalPropagation.instance.flags(extracted.traceIdContext()), traceIdContext.traceIdHigh(), traceIdContext.traceId(), 0L, 0L, 0L, extracted.extra() )); } SamplingFlags samplingFlags = extracted.samplingFlags(); List<Object> extra = extracted.extra(); TraceContext parent = currentTraceContext.get(); int flags; long traceIdHigh = 0L, traceId = 0L, localRootId = 0L, spanId = 0L; if (parent != null) { // At this point, we didn't extract trace IDs, but do have a trace in progress. Since typical // trace sampling is up front, we retain the decision from the parent. flags = InternalPropagation.instance.flags(parent); traceIdHigh = parent.traceIdHigh(); traceId = parent.traceId(); localRootId = parent.localRootId(); spanId = parent.spanId(); extra = concat(extra, parent.extra()); } else { flags = InternalPropagation.instance.flags(samplingFlags); } return _toSpan(parent, decorateContext(flags, traceIdHigh, traceId, localRootId, spanId, 0L, extra)); }
@Test void localRootId_nextSpan_flags_sampled() { TraceContextOrSamplingFlags flags = TraceContextOrSamplingFlags.SAMPLED; localRootId(flags, flags, ctx -> tracer.nextSpan(ctx)); }
@Override public Long getValue() { return longAdder.longValue(); }
@Test public void getValue() { assertThat(longCounter.getValue()).isEqualTo(INITIAL_VALUE); }
public String parse(Function<String, String> propertyMapping) { init(); boolean inPrepare = false; char[] expression = new char[128]; int expressionPos = 0; while (next()) { if (isPrepare()) { inPrepare = true; } else if (inPrepare && isPrepareEnd()) { inPrepare = false; setParsed(propertyMapping.apply(new String(expression, 0, expressionPos)).toCharArray()); expressionPos = 0; } else if (inPrepare) { if (expression.length <= expressionPos) { expression = Arrays.copyOf(expression, (int)(expression.length * 1.5)); } expression[expressionPos++] = symbol; } else if (!isPreparing()) { setParsed(symbol); } } if (isPrepareEnd() && expressionPos > 0) { setParsed(propertyMapping.apply(new String(expression, 0, expressionPos)).toCharArray()); } else { setParsed(symbol); } return new String(newArr, 0, len); }
@Test public void test() { String result = TemplateParser.parse("test-${name}-${name}", Collections.singletonMap("name", "test")); Assert.assertEquals(result, "test-test-test"); }
public static <KLeftT, KRightT> KTableHolder<KLeftT> build( final KTableHolder<KLeftT> left, final KTableHolder<KRightT> right, final ForeignKeyTableTableJoin<KLeftT, KRightT> join, final RuntimeBuildContext buildContext ) { final LogicalSchema leftSchema = left.getSchema(); final LogicalSchema rightSchema = right.getSchema(); final ProcessingLogger logger = buildContext.getProcessingLogger( join.getProperties().getQueryContext() ); final ExpressionEvaluator expressionEvaluator; final CodeGenRunner codeGenRunner = new CodeGenRunner( leftSchema, buildContext.getKsqlConfig(), buildContext.getFunctionRegistry() ); final Optional<ColumnName> leftColumnName = join.getLeftJoinColumnName(); final Optional<Expression> leftJoinExpression = join.getLeftJoinExpression(); if (leftColumnName.isPresent()) { expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree( new UnqualifiedColumnReferenceExp(leftColumnName.get()), "Left Join Expression" ); } else if (leftJoinExpression.isPresent()) { expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree( leftJoinExpression.get(), "Left Join Expression" ); } else { throw new IllegalStateException("Both leftColumnName and leftJoinExpression are empty."); } final ForeignKeyJoinParams<KRightT> joinParams = ForeignKeyJoinParamsFactory .create(expressionEvaluator, leftSchema, rightSchema, logger); final Formats formats = join.getFormats(); final PhysicalSchema physicalSchema = PhysicalSchema.from( joinParams.getSchema(), formats.getKeyFeatures(), formats.getValueFeatures() ); final Serde<KLeftT> keySerde = left.getExecutionKeyFactory().buildKeySerde( formats.getKeyFormat(), physicalSchema, join.getProperties().getQueryContext() ); final Serde<GenericRow> valSerde = buildContext.buildValueSerde( formats.getValueFormat(), physicalSchema, join.getProperties().getQueryContext() ); final KTable<KLeftT, GenericRow> result; switch (join.getJoinType()) { case INNER: result = left.getTable().join( right.getTable(), joinParams.getKeyExtractor(), joinParams.getJoiner(), buildContext.getMaterializedFactory().create(keySerde, valSerde) ); break; case LEFT: result = left.getTable().leftJoin( right.getTable(), joinParams.getKeyExtractor(), joinParams.getJoiner(), buildContext.getMaterializedFactory().create(keySerde, valSerde) ); break; default: throw new IllegalStateException("invalid join type: " + join.getJoinType()); } return KTableHolder.unmaterialized( result, joinParams.getSchema(), left.getExecutionKeyFactory() ); }
@Test @SuppressWarnings({"unchecked", "rawtypes"}) public void shouldDoInnerJoinOnSubKey() { // Given: givenInnerJoin(leftMultiKey, L_KEY_2); // When: final KTableHolder<Struct> result = join.build(planBuilder, planInfo); // Then: final ArgumentCaptor<KsqlKeyExtractor> ksqlKeyExtractor = ArgumentCaptor.forClass(KsqlKeyExtractor.class); verify(leftKTableMultiKey).join( same(rightKTable), ksqlKeyExtractor.capture(), eq(new KsqlValueJoiner(LEFT_SCHEMA_MULTI_KEY.value().size(), RIGHT_SCHEMA.value().size(), 0)), any(Materialized.class) ); verifyNoMoreInteractions(leftKTable, rightKTable, resultKTable); final GenericKey extractedKey = GenericKey.genericKey(LEFT_KEY_2); assertThat(ksqlKeyExtractor.getValue().apply(LEFT_ROW_MULTI), is(extractedKey)); assertThat(result.getTable(), is(resultKTable)); assertThat(result.getExecutionKeyFactory(), is(executionKeyFactory)); }
public void putUserProperty(final String name, final String value) { if (MessageConst.STRING_HASH_SET.contains(name)) { throw new RuntimeException(String.format( "The Property<%s> is used by system, input another please", name)); } if (value == null || value.trim().isEmpty() || name == null || name.trim().isEmpty()) { throw new IllegalArgumentException( "The name or value of property can not be null or blank string!" ); } this.putProperty(name, value); }
@Test(expected = IllegalArgumentException.class) public void putUserNullNamePropertyWithException() throws Exception { Message m = new Message(); m.putUserProperty(null, "val1"); }
public void terminate( WorkflowSummary workflowSummary, StepRuntimeSummary runtimeSummary, StepInstance.Status status) { try { StepRuntime.Result result = getStepRuntime(runtimeSummary.getType()) .terminate(workflowSummary, cloneSummary(runtimeSummary)); Checks.checkTrue( result.getState() == StepRuntime.State.STOPPED, "terminate call should return a STOPPED state in result: %s", result); runtimeSummary.mergeRuntimeUpdate(result.getTimeline(), result.getArtifacts()); runtimeSummary.markTerminated(status, tracingManager); } catch (RuntimeException e) { metrics.counter( MetricConstants.STEP_RUNTIME_MANAGER_TERMINATE_EXCEPTION, getClass(), MetricConstants.STATUS_TAG, status.name()); runtimeSummary.addTimeline( TimelineLogEvent.warn( "Failed to terminate the step %s of the workflow %s because [%s]", runtimeSummary.getIdentity(), workflowSummary.getIdentity(), e.getMessage())); throw e; } }
@Test public void testTerminate() { StepRuntimeSummary summary = StepRuntimeSummary.builder() .type(StepType.NOOP) .stepRetry(StepInstance.StepRetry.from(Defaults.DEFAULT_RETRY_POLICY)) .build(); runtimeManager.terminate(workflowSummary, summary, StepInstance.Status.STOPPED); assertEquals(StepInstance.Status.STOPPED, summary.getRuntimeState().getStatus()); assertNotNull(summary.getRuntimeState().getEndTime()); assertNotNull(summary.getRuntimeState().getModifyTime()); assertEquals(1, summary.getPendingRecords().size()); assertEquals( StepInstance.Status.NOT_CREATED, summary.getPendingRecords().get(0).getOldStatus()); assertEquals(StepInstance.Status.STOPPED, summary.getPendingRecords().get(0).getNewStatus()); assertEquals(artifact, summary.getArtifacts().get("test-artifact")); assertEquals(1, summary.getTimeline().getTimelineEvents().size()); assertEquals("test termination", summary.getTimeline().getTimelineEvents().get(0).getMessage()); }
public static TypeBuilder<Schema> builder() { return new TypeBuilder<>(new SchemaCompletion(), new NameContext()); }
@Test void nullObjectProp() { assertThrows(AvroRuntimeException.class, () -> { SchemaBuilder.builder().intBuilder().prop("nullProp", (Object) null).endInt(); }); }
public IterableSubject asList() { return checkNoNeedToDisplayBothValues("asList()").that(Chars.asList(checkNotNull(actual))); }
@Test public void asList() { assertThat(array('a', 'q', 'z')).asList().containsAtLeast('a', 'z'); }
protected static long extractTimestampAttribute( String timestampAttribute, @Nullable Map<String, String> attributes) { Preconditions.checkState(!timestampAttribute.isEmpty()); String value = attributes == null ? null : attributes.get(timestampAttribute); checkArgument( value != null, "PubSub message is missing a value for timestamp attribute %s", timestampAttribute); Long timestampMsSinceEpoch = parseTimestampAsMsSinceEpoch(value); checkArgument( timestampMsSinceEpoch != null, "Cannot interpret value of attribute %s as timestamp: %s", timestampAttribute, value); return timestampMsSinceEpoch; }
@Test public void timestampAttributeSetWithMissingAttributeThrowsError() { thrown.expect(RuntimeException.class); thrown.expectMessage("PubSub message is missing a value for timestamp attribute myAttribute"); Map<String, String> map = ImmutableMap.of("otherLabel", "whatever"); PubsubClient.extractTimestampAttribute("myAttribute", map); }
static void validateNewAcl(AclBinding binding) { switch (binding.pattern().resourceType()) { case UNKNOWN: case ANY: throw new InvalidRequestException("Invalid resourceType " + binding.pattern().resourceType()); default: break; } switch (binding.pattern().patternType()) { case LITERAL: case PREFIXED: break; default: throw new InvalidRequestException("Invalid patternType " + binding.pattern().patternType()); } switch (binding.entry().operation()) { case UNKNOWN: case ANY: throw new InvalidRequestException("Invalid operation " + binding.entry().operation()); default: break; } switch (binding.entry().permissionType()) { case DENY: case ALLOW: break; default: throw new InvalidRequestException("Invalid permissionType " + binding.entry().permissionType()); } if (binding.pattern().name() == null || binding.pattern().name().isEmpty()) { throw new InvalidRequestException("Resource name should not be empty"); } }
@Test public void testValidateNewAcl() { AclControlManager.validateNewAcl(new AclBinding( new ResourcePattern(TOPIC, "*", LITERAL), new AccessControlEntry("User:*", "*", ALTER, ALLOW))); assertEquals("Invalid patternType UNKNOWN", assertThrows(InvalidRequestException.class, () -> AclControlManager.validateNewAcl(new AclBinding( new ResourcePattern(TOPIC, "*", PatternType.UNKNOWN), new AccessControlEntry("User:*", "*", ALTER, ALLOW)))). getMessage()); assertEquals("Invalid resourceType UNKNOWN", assertThrows(InvalidRequestException.class, () -> AclControlManager.validateNewAcl(new AclBinding( new ResourcePattern(ResourceType.UNKNOWN, "*", LITERAL), new AccessControlEntry("User:*", "*", ALTER, ALLOW)))). getMessage()); assertEquals("Invalid operation UNKNOWN", assertThrows(InvalidRequestException.class, () -> AclControlManager.validateNewAcl(new AclBinding( new ResourcePattern(TOPIC, "*", LITERAL), new AccessControlEntry("User:*", "*", AclOperation.UNKNOWN, ALLOW)))). getMessage()); assertEquals("Invalid permissionType UNKNOWN", assertThrows(InvalidRequestException.class, () -> AclControlManager.validateNewAcl(new AclBinding( new ResourcePattern(TOPIC, "*", LITERAL), new AccessControlEntry("User:*", "*", ALTER, AclPermissionType.UNKNOWN)))). getMessage()); }
public static SchemaPath schemaPathFromId(String projectId, String schemaId) { return new SchemaPath(projectId, schemaId); }
@Test public void schemaPathFromIdPathWellFormed() { SchemaPath path = PubsubClient.schemaPathFromId("projectId", "schemaId"); assertEquals("projects/projectId/schemas/schemaId", path.getPath()); assertEquals("schemaId", path.getId()); }
static String computeDetailsAsString(SearchRequest searchRequest) { StringBuilder message = new StringBuilder(); message.append(String.format("ES search request '%s'", searchRequest)); if (searchRequest.indices().length > 0) { message.append(String.format(ON_INDICES_MESSAGE, Arrays.toString(searchRequest.indices()))); } return message.toString(); }
@Test public void should_format_IndexRequest() { IndexRequest indexRequest = new IndexRequest() .index("index-1") .id("id-1"); assertThat(EsRequestDetails.computeDetailsAsString(indexRequest)) .isEqualTo("ES index request for key 'id-1' on index 'index-1'"); }
public ExecBinding getBinding() { return binding; }
@Test @DirtiesContext public void testCreateEndpointCustomBinding() throws Exception { ExecEndpoint e = createExecEndpoint("exec:test?binding=#customBinding"); assertSame(customBinding, e.getBinding(), "Expected is the custom customBinding reference from the application context"); }
@VisibleForTesting String upload(Configuration config, String artifactUriStr) throws IOException, URISyntaxException { final URI artifactUri = PackagedProgramUtils.resolveURI(artifactUriStr); if (!"local".equals(artifactUri.getScheme())) { return artifactUriStr; } final String targetDir = config.get(KubernetesConfigOptions.LOCAL_UPLOAD_TARGET); checkArgument( !StringUtils.isNullOrWhitespaceOnly(targetDir), String.format( "Setting '%s' to a valid remote path is required.", KubernetesConfigOptions.LOCAL_UPLOAD_TARGET.key())); final FileSystem.WriteMode writeMode = config.get(KubernetesConfigOptions.LOCAL_UPLOAD_OVERWRITE) ? FileSystem.WriteMode.OVERWRITE : FileSystem.WriteMode.NO_OVERWRITE; final File src = new File(artifactUri.getPath()); final Path target = new Path(targetDir, src.getName()); if (target.getFileSystem().exists(target) && writeMode == FileSystem.WriteMode.NO_OVERWRITE) { LOG.info( "Skip uploading artifact '{}', as it already exists." + " To overwrite existing artifacts, please set the '{}' config option.", target, KubernetesConfigOptions.LOCAL_UPLOAD_OVERWRITE.key()); } else { final long start = System.currentTimeMillis(); final FileSystem fs = target.getFileSystem(); try (FSDataOutputStream os = fs.create(target, writeMode)) { FileUtils.copyFile(src, os); } LOG.debug( "Copied file from {} to {}, cost {} ms", src, target, System.currentTimeMillis() - start); } return target.toString(); }
@Test void testRemoteUri() throws Exception { config.removeConfig(KubernetesConfigOptions.LOCAL_UPLOAD_TARGET); String remoteUri = "s3://my-bucket/my-artifact.jar"; String finalUri = artifactUploader.upload(config, remoteUri); assertThat(finalUri).isEqualTo(remoteUri); }
@Override public void open() throws Exception { super.open(); collector = new TimestampedCollector<>(output); context = new ContextImpl(userFunction); internalTimerService = getInternalTimerService(CLEANUP_TIMER_NAME, StringSerializer.INSTANCE, this); }
@TestTemplate void testReturnsCorrectTimestamp() throws Exception { IntervalJoinOperator<String, TestElem, TestElem, Tuple2<TestElem, TestElem>> op = new IntervalJoinOperator<>( -1, 1, true, true, null, null, TestElem.serializer(), TestElem.serializer(), new ProcessJoinFunction<TestElem, TestElem, Tuple2<TestElem, TestElem>>() { private static final long serialVersionUID = 1L; @Override public void processElement( TestElem left, TestElem right, Context ctx, Collector<Tuple2<TestElem, TestElem>> out) throws Exception { assertThat(ctx.getTimestamp()) .isEqualTo(Math.max(left.ts, right.ts)); } }); try (TestHarness testHarness = new TestHarness( op, (elem) -> elem.key, (elem) -> elem.key, TypeInformation.of(String.class))) { testHarness.setup(); testHarness.open(); processElementsAndWatermarks(testHarness); } }
@Override public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { // Automatically detect the character encoding try (AutoDetectReader reader = new AutoDetectReader(CloseShieldInputStream.wrap(stream), metadata, getEncodingDetector(context))) { //try to get detected content type; could be a subclass of text/plain //such as vcal, etc. String incomingMime = metadata.get(Metadata.CONTENT_TYPE); MediaType mediaType = MediaType.TEXT_PLAIN; if (incomingMime != null) { MediaType tmpMediaType = MediaType.parse(incomingMime); if (tmpMediaType != null) { mediaType = tmpMediaType; } } Charset charset = reader.getCharset(); MediaType type = new MediaType(mediaType, charset); metadata.set(Metadata.CONTENT_TYPE, type.toString()); // deprecated, see TIKA-431 metadata.set(Metadata.CONTENT_ENCODING, charset.name()); XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.startElement("p"); char[] buffer = new char[4096]; int n = reader.read(buffer); while (n != -1) { xhtml.characters(buffer, 0, n); n = reader.read(buffer); } xhtml.endElement("p"); xhtml.endDocument(); } }
@Test public void testUsingCharsetInContentTypeHeader() throws Exception { // Could be ISO 8859-1 or ISO 8859-15 or ... // u00e1 is latin small letter a with acute final String test2 = "the name is \u00e1ndre"; Metadata metadata = new Metadata(); parser.parse(new ByteArrayInputStream(test2.getBytes(ISO_8859_1)), new BodyContentHandler(), metadata, new ParseContext()); assertEquals("text/plain; charset=ISO-8859-1", metadata.get(Metadata.CONTENT_TYPE)); assertEquals("ISO-8859-1", metadata.get(Metadata.CONTENT_ENCODING)); // deprecated metadata = new Metadata(); metadata.set(Metadata.CONTENT_TYPE, "text/html; charset=ISO-8859-15"); parser.parse(new ByteArrayInputStream(test2.getBytes(ISO_8859_1)), new BodyContentHandler(), metadata, new ParseContext()); assertEquals("text/html; charset=ISO-8859-15", metadata.get(Metadata.CONTENT_TYPE)); assertEquals("ISO-8859-15", metadata.get(Metadata.CONTENT_ENCODING)); // deprecated }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentImportExecutor, TokensAndUrlAuthData authData, PhotosContainerResource resource) throws Exception { // Ensure credential is populated getOrCreateCredential(authData); monitor.debug( () -> String .format("%s: Importing %s albums and %s photos before transmogrification", jobId, resource.getAlbums().size(), resource.getPhotos().size())); // Make the data onedrive compatible resource.transmogrify(transmogrificationConfig); monitor.debug( () -> String.format("%s: Importing %s albums and %s photos after transmogrification", jobId, resource.getAlbums().size(), resource.getPhotos().size())); for (PhotoAlbum album : resource.getAlbums()) { // Create a OneDrive folder and then save the id with the mapping data idempotentImportExecutor.executeAndSwallowIOExceptions( album.getId(), album.getName(), () -> createOneDriveFolder(album)); } for (PhotoModel photoModel : resource.getPhotos()) { idempotentImportExecutor.executeAndSwallowIOExceptions( photoModel.getIdempotentId(), photoModel.getTitle(), () -> importSinglePhoto(photoModel, jobId, idempotentImportExecutor)); } return ImportResult.OK; }
@Test public void testImportItemPermissionDenied() throws Exception { List<PhotoAlbum> albums = ImmutableList.of(new PhotoAlbum("id1", "album1.", "This is a fake albumb")); PhotosContainerResource data = new PhotosContainerResource(albums, null); Call call = mock(Call.class); doReturn(call).when(client).newCall(argThat((Request r) -> r.url().toString().equals("https://www.baseurl.com/v1.0/me/drive/special/photos/children") )); Response response = mock(Response.class); ResponseBody body = mock(ResponseBody.class); when(body.bytes()).thenReturn( ResponseBody.create(MediaType.parse("application/json"), "{\"id\": \"id1\"}").bytes()); when(body.string()).thenReturn( ResponseBody.create(MediaType.parse("application/json"), "{\"id\": \"id1\"}").string()); when(response.code()).thenReturn(403); when(response.message()).thenReturn("Access Denied"); when(response.body()).thenReturn(body); when(call.execute()).thenReturn(response); assertThrows(PermissionDeniedException.class, () -> { ImportResult result = importer.importItem(uuid, executor, authData, data); }); }
public boolean load() { String fileName = null; try { fileName = this.configFilePath(); String jsonString = MixAll.file2String(fileName); if (null == jsonString || jsonString.length() == 0) { return this.loadBak(); } else { this.decode(jsonString); log.info("load " + fileName + " OK"); return true; } } catch (Exception e) { log.error("load " + fileName + " failed, and try to load backup file", e); return this.loadBak(); } }
@Test public void testLoad() throws Exception { ConfigManager testConfigManager = buildTestConfigManager(); File file = createAndWriteFile(testConfigManager.configFilePath()); assertTrue(testConfigManager.load()); file.delete(); File fileBak = createAndWriteFile(testConfigManager.configFilePath() + ".bak"); assertTrue(testConfigManager.load()); fileBak.delete(); }
public ConfigurationProperty getProperty(final String key) { return stream().filter(item -> item.getConfigurationKey().getName().equals(key)).findFirst().orElse(null); }
@Test void shouldGetNullIfPropertyNotFoundForGivenKey() { Configuration config = new Configuration(); assertThat(config.getProperty("key2")).isNull(); }
@Override public String formatDouble(Locale locale, Double value) { NumberFormat format = DecimalFormat.getNumberInstance(locale); format.setMinimumFractionDigits(1); format.setMaximumFractionDigits(1); return format.format(value); }
@Test public void format_double() { assertThat(underTest.formatDouble(Locale.FRENCH, 10.56)).isEqualTo("10,6"); assertThat(underTest.formatDouble(Locale.FRENCH, 10d)).isEqualTo("10,0"); }
public static FormattingTuple format(String messagePattern, Object arg) { return arrayFormat(messagePattern, new Object[]{arg}); }
@Test public void verifyOneParameterIsHandledCorrectly() { String result = MessageFormatter.format("Value is {}.", 3).getMessage(); assertEquals("Value is 3.", result); result = MessageFormatter.format("Value is {", 3).getMessage(); assertEquals("Value is {", result); result = MessageFormatter.format("{} is larger than 2.", 3).getMessage(); assertEquals("3 is larger than 2.", result); result = MessageFormatter.format("No subst", 3).getMessage(); assertEquals("No subst", result); result = MessageFormatter.format("Incorrect {subst", 3).getMessage(); assertEquals("Incorrect {subst", result); result = MessageFormatter.format("Value is {bla} {}", 3).getMessage(); assertEquals("Value is {bla} 3", result); result = MessageFormatter.format("Escaped \\{} subst", 3).getMessage(); assertEquals("Escaped {} subst", result); result = MessageFormatter.format("{Escaped", 3).getMessage(); assertEquals("{Escaped", result); result = MessageFormatter.format("\\{}Escaped", 3).getMessage(); assertEquals("{}Escaped", result); result = MessageFormatter.format("File name is {{}}.", "App folder.zip").getMessage(); assertEquals("File name is {App folder.zip}.", result); // escaping the escape character result = MessageFormatter.format("File name is C:\\\\{}.", "App folder.zip").getMessage(); assertEquals("File name is C:\\App folder.zip.", result); }
public Collection<PlanCoordinator> coordinators() { return Collections.unmodifiableCollection(mCoordinators.values()); }
@Test public void testAddAndPurge() throws Exception { assertEquals("tracker should be empty", 0, mTracker.coordinators().size()); fillJobTracker(CAPACITY); try { addJob(100); fail("Should have failed to add a job over capacity"); } catch (ResourceExhaustedException e) { // Empty on purpose } finishAllJobs(); try { addJob(100); } catch (ResourceExhaustedException e) { fail("Should not have failed to add a job over capacity when all are finished"); } }
public static boolean tryFillGap( final UnsafeBuffer logMetaDataBuffer, final UnsafeBuffer termBuffer, final int termId, final int gapOffset, final int gapLength) { int offset = (gapOffset + gapLength) - FRAME_ALIGNMENT; while (offset >= gapOffset) { if (0 != termBuffer.getInt(offset)) { return false; } offset -= FRAME_ALIGNMENT; } applyDefaultHeader(logMetaDataBuffer, termBuffer, gapOffset); frameType(termBuffer, gapOffset, HDR_TYPE_PAD); frameTermOffset(termBuffer, gapOffset); frameTermId(termBuffer, gapOffset, termId); frameLengthOrdered(termBuffer, gapOffset, gapLength); return true; }
@Test void shouldFillGapAfterExistingFrame() { final int gapOffset = 128; final int gapLength = 64; dataFlyweight .sessionId(SESSION_ID) .termId(TERM_ID) .streamId(STREAM_ID) .flags(UNFRAGMENTED) .frameLength(gapOffset); dataFlyweight.setMemory(0, gapOffset - DataHeaderFlyweight.HEADER_LENGTH, (byte)'x'); assertTrue(TermGapFiller.tryFillGap(metaDataBuffer, termBuffer, TERM_ID, gapOffset, gapLength)); dataFlyweight.wrap(termBuffer, gapOffset, termBuffer.capacity() - gapOffset); assertEquals(gapLength, dataFlyweight.frameLength()); assertEquals(gapOffset, dataFlyweight.termOffset()); assertEquals(SESSION_ID, dataFlyweight.sessionId()); assertEquals(TERM_ID, dataFlyweight.termId()); assertEquals(PADDING_FRAME_TYPE, dataFlyweight.headerType()); assertEquals(UNFRAGMENTED, (byte)(dataFlyweight.flags())); }
public PaginationContext createPaginationContext(final LimitSegment limitSegment, final List<Object> params) { return new PaginationContext(limitSegment.getOffset().orElse(null), limitSegment.getRowCount().orElse(null), params); }
@Test void assertPaginationContextCreatedProperlyWhenOffsetAndRowCountAreBothNull() { PaginationContext paginationContext = new LimitPaginationContextEngine().createPaginationContext(new LimitSegment(0, 10, null, null), Collections.emptyList()); assertFalse(paginationContext.isHasPagination()); }
public static boolean isRetryOrDlqTopic(String topic) { if (StringUtils.isBlank(topic)) { return false; } return topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX) || topic.startsWith(MixAll.DLQ_GROUP_TOPIC_PREFIX); }
@Test public void testIsRetryOrDlqTopicWithDlqTopic() { String topic = MixAll.DLQ_GROUP_TOPIC_PREFIX + "TestTopic"; boolean result = BrokerMetricsManager.isRetryOrDlqTopic(topic); assertThat(result).isTrue(); }
public static FixedWindows of(Duration size) { return new FixedWindows(size, Duration.ZERO); }
@Test public void testSimpleFixedWindow() throws Exception { Map<IntervalWindow, Set<String>> expected = new HashMap<>(); expected.put(new IntervalWindow(new Instant(0), new Instant(10)), set(1, 2, 5, 9)); expected.put(new IntervalWindow(new Instant(10), new Instant(20)), set(10, 11)); expected.put(new IntervalWindow(new Instant(100), new Instant(110)), set(100)); assertEquals( expected, runWindowFn( FixedWindows.of(Duration.millis(10)), Arrays.asList(1L, 2L, 5L, 9L, 10L, 11L, 100L))); }
@Override public UrlPattern doGetPattern() { return UrlPattern.builder() .includes(includeUrls) .excludes(excludeUrls) .build(); }
@Test public void match_undeclared_web_services_starting_with_api() { initWebServiceEngine(newWsUrl("api/issues", "search")); assertThat(underTest.doGetPattern().matches("/api/resources/index")).isTrue(); assertThat(underTest.doGetPattern().matches("/api/user_properties")).isTrue(); }
public void generateAcknowledgementPayload( MllpSocketBuffer mllpSocketBuffer, byte[] hl7MessageBytes, String acknowledgementCode) throws MllpAcknowledgementGenerationException { generateAcknowledgementPayload(mllpSocketBuffer, hl7MessageBytes, acknowledgementCode, null); }
@Test public void testGenerateAcknowledgementPayloadTimestamp() throws Exception { MllpSocketBuffer mllpSocketBuffer = new MllpSocketBuffer(new MllpEndpointStub()); hl7util.generateAcknowledgementPayload(mllpSocketBuffer, TEST_MESSAGE.getBytes(), "AA"); String actualMsh7Field = mllpSocketBuffer.toString().split("\\|")[6]; assertThat(actualMsh7Field, matchesPattern("\\d{14}\\.\\d{3}[+-]\\d{4}")); }
public PutObjectResult putObject(String key, String localFilePath) { PutObjectRequest putObjectRequest = new PutObjectRequest(cosClientConfig.getBucket(), key, new File(localFilePath)); return cosClient.putObject(putObjectRequest); }
@Test void putObject() { cosManager.putObject("test", "test.json"); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command(config, MigrationsUtil::getKsqlClient); }
@Test public void shouldCreateMigrationsStream() { // When: final int status = command.command(config, cfg -> client); // Then: assertThat(status, is(0)); verify(client).executeStatement(EXPECTED_CS_STATEMENT); }
public static void smooth(PointList geometry, double maxWindowSize) { if (geometry.size() <= 2) { // geometry consists only of tower nodes, there are no pillar nodes to be smoothed in between return; } // calculate the distance between all points once here to avoid repeated calculation. // for n nodes there are always n-1 edges double[] distances = new double[geometry.size() - 1]; for (int i = 0; i <= geometry.size() - 2; i++) { distances[i] = DistancePlaneProjection.DIST_PLANE.calcDist( geometry.getLat(i), geometry.getLon(i), geometry.getLat(i + 1), geometry.getLon(i + 1) ); } // map that will collect all smoothed elevation values, size is less by 2 // because elevation of start and end point (tower nodes) won't be touched IntDoubleHashMap averagedElevations = new IntDoubleHashMap((geometry.size() - 1) * 4 / 3); // iterate over every pillar node to smooth its elevation // first and last points are left out as they are tower nodes for (int i = 1; i <= geometry.size() - 2; i++) { // first, determine the average window which could be smaller when close to pillar nodes double searchDistance = maxWindowSize / 2.0; double searchDistanceBack = 0.0; for (int j = i - 1; j >= 0; j--) { searchDistanceBack += distances[j]; if (searchDistanceBack > searchDistance) { break; } } // update search distance if pillar node is close to START tower node searchDistance = Math.min(searchDistance, searchDistanceBack); double searchDistanceForward = 0.0; for (int j = i; j < geometry.size() - 1; j++) { searchDistanceForward += distances[j]; if (searchDistanceForward > searchDistance) { break; } } // update search distance if pillar node is close to END tower node searchDistance = Math.min(searchDistance, searchDistanceForward); if (searchDistance <= 0.0) { // there is nothing to smooth. this is an edge case where pillar nodes share exactly the same location // as a tower node. // by doing so we avoid (at least theoretically) a division by zero later in the function call continue; } // area under elevation curve double elevationArea = 0.0; // first going again backwards double distanceBack = 0.0; for (int j = i - 1; j >= 0; j--) { double dist = distances[j]; double searchDistLeft = searchDistance - distanceBack; distanceBack += dist; if (searchDistLeft < dist) { // node lies outside averaging window double elevationDelta = geometry.getEle(j) - geometry.getEle(j + 1); double elevationAtSearchDistance = geometry.getEle(j + 1) + searchDistLeft / dist * elevationDelta; elevationArea += searchDistLeft * (geometry.getEle(j + 1) + elevationAtSearchDistance) / 2.0; break; } else { elevationArea += dist * (geometry.getEle(j + 1) + geometry.getEle(j)) / 2.0; } } // now going forward double distanceForward = 0.0; for (int j = i; j < geometry.size() - 1; j++) { double dist = distances[j]; double searchDistLeft = searchDistance - distanceForward; distanceForward += dist; if (searchDistLeft < dist) { double elevationDelta = geometry.getEle(j + 1) - geometry.getEle(j); double elevationAtSearchDistance = geometry.getEle(j) + searchDistLeft / dist * elevationDelta; elevationArea += searchDistLeft * (geometry.getEle(j) + elevationAtSearchDistance) / 2.0; break; } else { elevationArea += dist * (geometry.getEle(j + 1) + geometry.getEle(j)) / 2.0; } } double elevationAverage = elevationArea / (searchDistance * 2); averagedElevations.put(i, elevationAverage); } // after all pillar nodes got an averaged elevation, elevations are overwritten averagedElevations.forEach((Consumer<IntDoubleCursor>) c -> geometry.setElevation(c.key, c.value)); }
@Test public void testTwoPoints() { // consists of only two tower nodes and no pillar nodes // elevation must stay unchanged PointList pl = new PointList(2, true); pl.add(0, 0, -1); pl.add(1, 1, 100); EdgeElevationSmoothingMovingAverage.smooth(pl, 150.0); assertEquals(2, pl.size()); assertEquals(-1.0, pl.getEle(0), 0.000001); assertEquals(100.0, pl.getEle(1), 0.000001); }
@Override public long read() { return gaugeSource.read(); }
@Test public void whenCreatedForDynamicLongMetricWithProvidedValue() { SomeObject someObject = new SomeObject(); someObject.longField = 42; metricsRegistry.registerDynamicMetricsProvider((descriptor, context) -> context .collect(descriptor.withPrefix("foo"), "longField", INFO, BYTES, 42)); LongGaugeImpl longGauge = metricsRegistry.newLongGauge("foo.longField"); // needed to collect dynamic metrics and update the gauge created from them metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(42, longGauge.read()); }
@Override @NotNull public BTreeMutable getMutableCopy() { final BTreeMutable result = new BTreeMutable(this); result.addExpiredLoggable(rootLoggable); return result; }
@Test public void testPutOverwriteTreeWithoutDuplicates() { // add existing key to tree that supports duplicates tm = new BTreeEmpty(log, createTestSplittingPolicy(), false, 1).getMutableCopy(); for (int i = 0; i < 100; i++) { getTreeMutable().put(kv(i, "v" + i)); } checkTree(getTreeMutable(), 100).run(); // put must add 100 new values for (int i = 0; i < 100; i++) { final INode ln = kv(i, "vv" + i); getTreeMutable().put(ln); } checkTree(getTreeMutable(), "vv", 100).run(); long rootAddress = saveTree(); checkTree(getTreeMutable(), "vv", 100).run(); reopen(); t = new BTree(log, rootAddress, true, 1); checkTree(getTreeMutable(), "vv", 100).run(); }
@Override public boolean ensureCapacity(int numAdditionalBuffers) { checkIsInitialized(); final int numRequestedByGuaranteedReclaimableOwners = tieredMemorySpecs.values().stream() .filter(TieredStorageMemorySpec::isGuaranteedReclaimable) .mapToInt(spec -> numOwnerRequestedBuffer(spec.getOwner())) .sum(); while (bufferQueue.size() + numRequestedByGuaranteedReclaimableOwners < numGuaranteedReclaimableBuffers + numAdditionalBuffers) { if (numRequestedBuffers.get() >= bufferPool.getNumBuffers()) { return false; } MemorySegment memorySegment = requestBufferBlockingFromPool(); if (memorySegment == null) { return false; } bufferQueue.add(memorySegment); } return true; }
@Test void testEnsureCapacity() throws IOException { final int numBuffers = 5; final int guaranteedReclaimableBuffers = 3; BufferPool bufferPool = globalPool.createBufferPool(numBuffers, numBuffers); TieredStorageMemoryManagerImpl storageMemoryManager = createStorageMemoryManager( bufferPool, Arrays.asList( new TieredStorageMemorySpec( new Object(), guaranteedReclaimableBuffers, true), new TieredStorageMemorySpec(this, 0, false))); assertThat(storageMemoryManager.ensureCapacity(0)).isTrue(); assertThat(bufferPool.bestEffortGetNumOfUsedBuffers()) .isEqualTo(guaranteedReclaimableBuffers); assertThat(storageMemoryManager.ensureCapacity(numBuffers - guaranteedReclaimableBuffers)) .isTrue(); assertThat(bufferPool.bestEffortGetNumOfUsedBuffers()).isEqualTo(numBuffers); assertThat( storageMemoryManager.ensureCapacity( numBuffers - guaranteedReclaimableBuffers + 1)) .isFalse(); storageMemoryManager.release(); }
public List<QueryMetadata> sql(final String sql) { return sql(sql, Collections.emptyMap()); }
@Test public void shouldThrowIfParseFails() { // Given: when(ksqlEngine.parse(any())) .thenThrow(new KsqlException("Bad tings happen")); // When: final Exception e = assertThrows( KsqlException.class, () -> ksqlContext.sql("Some SQL", SOME_PROPERTIES) ); // Then: assertThat(e.getMessage(), containsString( "Bad tings happen")); }