focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Object convert(String value) { if (isNullOrEmpty(value)) { return value; } if (value.contains("=")) { final Map<String, String> fields = new HashMap<>(); Matcher m = PATTERN.matcher(value); while (m.find()) { if (m.groupCount() != 2) { continue; } fields.put(removeQuotes(m.group(1)), removeQuotes(m.group(2))); } return fields; } else { return Collections.emptyMap(); } }
@Test public void testFilterWithIDAdditionalField() { TokenizerConverter f = new TokenizerConverter(new HashMap<String, Object>()); @SuppressWarnings("unchecked") Map<String, String> result = (Map<String, String>) f.convert("otters _id=123 more otters"); assertEquals(1, result.size()); assertEquals("123", result.get("_id")); }
@Override public EncodedMessage transform(ActiveMQMessage message) throws Exception { if (message == null) { return null; } long messageFormat = 0; Header header = null; Properties properties = null; Map<Symbol, Object> daMap = null; Map<Symbol, Object> maMap = null; Map<String,Object> apMap = null; Map<Object, Object> footerMap = null; Section body = convertBody(message); if (message.isPersistent()) { if (header == null) { header = new Header(); } header.setDurable(true); } byte priority = message.getPriority(); if (priority != Message.DEFAULT_PRIORITY) { if (header == null) { header = new Header(); } header.setPriority(UnsignedByte.valueOf(priority)); } String type = message.getType(); if (type != null) { if (properties == null) { properties = new Properties(); } properties.setSubject(type); } MessageId messageId = message.getMessageId(); if (messageId != null) { if (properties == null) { properties = new Properties(); } properties.setMessageId(getOriginalMessageId(message)); } ActiveMQDestination destination = message.getDestination(); if (destination != null) { if (properties == null) { properties = new Properties(); } properties.setTo(destination.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination)); } ActiveMQDestination replyTo = message.getReplyTo(); if (replyTo != null) { if (properties == null) { properties = new Properties(); } properties.setReplyTo(replyTo.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo)); } String correlationId = message.getCorrelationId(); if (correlationId != null) { if (properties == null) { properties = new Properties(); } try { properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId)); } catch (AmqpProtocolException e) { properties.setCorrelationId(correlationId); } } long expiration = message.getExpiration(); if (expiration != 0) { long ttl = expiration - System.currentTimeMillis(); if (ttl < 0) { ttl = 1; } if (header == null) { header = new Header(); } header.setTtl(new UnsignedInteger((int) ttl)); if (properties == null) { properties = new Properties(); } properties.setAbsoluteExpiryTime(new Date(expiration)); } long timeStamp = message.getTimestamp(); if (timeStamp != 0) { if (properties == null) { properties = new Properties(); } properties.setCreationTime(new Date(timeStamp)); } // JMSX Message Properties int deliveryCount = message.getRedeliveryCounter(); if (deliveryCount > 0) { if (header == null) { header = new Header(); } header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount)); } String userId = message.getUserID(); if (userId != null) { if (properties == null) { properties = new Properties(); } properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8))); } String groupId = message.getGroupID(); if (groupId != null) { if (properties == null) { properties = new Properties(); } properties.setGroupId(groupId); } int groupSequence = message.getGroupSequence(); if (groupSequence > 0) { if (properties == null) { properties = new Properties(); } properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence)); } final Map<String, Object> entries; try { entries = message.getProperties(); } catch (IOException e) { throw JMSExceptionSupport.create(e); } for (Map.Entry<String, Object> entry : entries.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.startsWith(JMS_AMQP_PREFIX)) { if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) { messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class); continue; } else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } continue; } else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } continue; } else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (maMap == null) { maMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length()); maMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class)); continue; } else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class)); continue; } else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (daMap == null) { daMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length()); daMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (footerMap == null) { footerMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length()); footerMap.put(Symbol.valueOf(name), value); continue; } } else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) { // strip off the scheduled message properties continue; } // The property didn't map into any other slot so we store it in the // Application Properties section of the message. if (apMap == null) { apMap = new HashMap<>(); } apMap.put(key, value); int messageType = message.getDataStructureType(); if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) { // Type of command to recognize advisory message Object data = message.getDataStructure(); if(data != null) { apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName()); } } } final AmqpWritableBuffer buffer = new AmqpWritableBuffer(); encoder.setByteBuffer(buffer); if (header != null) { encoder.writeObject(header); } if (daMap != null) { encoder.writeObject(new DeliveryAnnotations(daMap)); } if (maMap != null) { encoder.writeObject(new MessageAnnotations(maMap)); } if (properties != null) { encoder.writeObject(properties); } if (apMap != null) { encoder.writeObject(new ApplicationProperties(apMap)); } if (body != null) { encoder.writeObject(body); } if (footerMap != null) { encoder.writeObject(new Footer(footerMap)); } return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength()); }
@Test public void testConvertCompressedObjectMessageToAmqpMessageWithDataBody() throws Exception { ActiveMQObjectMessage outbound = createObjectMessage(TEST_OBJECT_VALUE, true); outbound.onSend(); outbound.storeContent(); JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer(); EncodedMessage encoded = transformer.transform(outbound); assertNotNull(encoded); Message amqp = encoded.decode(); assertNotNull(amqp.getBody()); assertTrue(amqp.getBody() instanceof Data); assertFalse(0 == ((Data) amqp.getBody()).getValue().getLength()); Object value = deserialize(((Data) amqp.getBody()).getValue().getArray()); assertNotNull(value); assertTrue(value instanceof UUID); }
public Set<String> makeReady(final Map<String, InternalTopicConfig> topics) { // we will do the validation / topic-creation in a loop, until we have confirmed all topics // have existed with the expected number of partitions, or some create topic returns fatal errors. log.debug("Starting to validate internal topics {} in partition assignor.", topics); long currentWallClockMs = time.milliseconds(); final long deadlineMs = currentWallClockMs + retryTimeoutMs; Set<String> topicsNotReady = new HashSet<>(topics.keySet()); final Set<String> newlyCreatedTopics = new HashSet<>(); while (!topicsNotReady.isEmpty()) { final Set<String> tempUnknownTopics = new HashSet<>(); topicsNotReady = validateTopics(topicsNotReady, topics, tempUnknownTopics); newlyCreatedTopics.addAll(topicsNotReady); if (!topicsNotReady.isEmpty()) { final Set<NewTopic> newTopics = new HashSet<>(); for (final String topicName : topicsNotReady) { if (tempUnknownTopics.contains(topicName)) { // for the tempUnknownTopics, don't create topic for them // we'll check again later if remaining retries > 0 continue; } final InternalTopicConfig internalTopicConfig = Objects.requireNonNull(topics.get(topicName)); final Map<String, String> topicConfig = internalTopicConfig.properties(defaultTopicConfigs, windowChangeLogAdditionalRetention); log.debug("Going to create topic {} with {} partitions and config {}.", internalTopicConfig.name(), internalTopicConfig.numberOfPartitions(), topicConfig); newTopics.add( new NewTopic( internalTopicConfig.name(), internalTopicConfig.numberOfPartitions(), Optional.of(replicationFactor)) .configs(topicConfig)); } // it's possible that although some topics are not ready yet because they // are temporarily not available, not that they do not exist; in this case // the new topics to create may be empty and hence we can skip here if (!newTopics.isEmpty()) { final CreateTopicsResult createTopicsResult = adminClient.createTopics(newTopics); for (final Map.Entry<String, KafkaFuture<Void>> createTopicResult : createTopicsResult.values().entrySet()) { final String topicName = createTopicResult.getKey(); try { createTopicResult.getValue().get(); topicsNotReady.remove(topicName); } catch (final InterruptedException fatalException) { // this should not happen; if it ever happens it indicate a bug Thread.currentThread().interrupt(); log.error(INTERRUPTED_ERROR_MESSAGE, fatalException); throw new IllegalStateException(INTERRUPTED_ERROR_MESSAGE, fatalException); } catch (final ExecutionException executionException) { final Throwable cause = executionException.getCause(); if (cause instanceof TopicExistsException) { // This topic didn't exist earlier or its leader not known before; just retain it for next round of validation. log.info( "Could not create topic {}. Topic is probably marked for deletion (number of partitions is unknown).\n" + "Will retry to create this topic in {} ms (to let broker finish async delete operation first).\n" + "Error message was: {}", topicName, retryBackOffMs, cause.toString()); } else { log.error("Unexpected error during topic creation for {}.\n" + "Error message was: {}", topicName, cause.toString()); if (cause instanceof UnsupportedVersionException) { final String errorMessage = cause.getMessage(); if (errorMessage != null && errorMessage.startsWith("Creating topics with default partitions/replication factor are only supported in CreateTopicRequest version 4+")) { throw new StreamsException(String.format( "Could not create topic %s, because brokers don't support configuration replication.factor=-1." + " You can change the replication.factor config or upgrade your brokers to version 2.4 or newer to avoid this error.", topicName) ); } } else if (cause instanceof TimeoutException) { log.error("Creating topic {} timed out.\n" + "Error message was: {}", topicName, cause.toString()); } else { throw new StreamsException( String.format("Could not create topic %s.", topicName), cause ); } } } } } } if (!topicsNotReady.isEmpty()) { currentWallClockMs = time.milliseconds(); if (currentWallClockMs >= deadlineMs) { final String timeoutError = String.format("Could not create topics within %d milliseconds. " + "This can happen if the Kafka cluster is temporarily not available.", retryTimeoutMs); log.error(timeoutError); throw new TimeoutException(timeoutError); } log.info( "Topics {} could not be made ready. Will retry in {} milliseconds. Remaining time in milliseconds: {}", topicsNotReady, retryBackOffMs, deadlineMs - currentWallClockMs ); Utils.sleep(retryBackOffMs); } } log.debug("Completed validating internal topics and created {}", newlyCreatedTopics); return newlyCreatedTopics; }
@Test public void shouldThrowInformativeExceptionForOlderBrokers() { final AdminClient admin = new MockAdminClient() { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final CreatableTopic topicToBeCreated = new CreatableTopic(); topicToBeCreated.setAssignments(new CreatableReplicaAssignmentCollection()); topicToBeCreated.setNumPartitions((short) 1); // set unsupported replication factor for older brokers topicToBeCreated.setReplicationFactor((short) -1); final CreatableTopicCollection topicsToBeCreated = new CreatableTopicCollection(); topicsToBeCreated.add(topicToBeCreated); try { new CreateTopicsRequest.Builder( new CreateTopicsRequestData() .setTopics(topicsToBeCreated) .setTimeoutMs(0) .setValidateOnly(options.shouldValidateOnly())) .build((short) 3); // pass in old unsupported request version for old brokers throw new IllegalStateException("Building CreateTopicRequest should have thrown."); } catch (final UnsupportedVersionException expected) { final KafkaFutureImpl<TopicMetadataAndConfig> future = new KafkaFutureImpl<>(); future.completeExceptionally(expected); return new CreateTopicsResult(Collections.singletonMap(topic1, future)) { }; } } }; final StreamsConfig streamsConfig = new StreamsConfig(config); final InternalTopicManager topicManager = new InternalTopicManager(time, admin, streamsConfig); final InternalTopicConfig topicConfig = new RepartitionTopicConfig(topic1, Collections.emptyMap()); topicConfig.setNumberOfPartitions(1); final StreamsException exception = assertThrows( StreamsException.class, () -> topicManager.makeReady(Collections.singletonMap(topic1, topicConfig)) ); assertThat( exception.getMessage(), equalTo("Could not create topic " + topic1 + ", because brokers don't support configuration replication.factor=-1." + " You can change the replication.factor config or upgrade your brokers to version 2.4 or newer to avoid this error.")); }
public synchronized Map<String, Object> getSubtaskOnWorkerProgress(String subtaskState, Executor executor, HttpClientConnectionManager connMgr, Map<String, String> selectedMinionWorkerEndpoints, Map<String, String> requestHeaders, int timeoutMs) throws JsonProcessingException { return getSubtaskOnWorkerProgress(subtaskState, new CompletionServiceHelper(executor, connMgr, HashBiMap.create(0)), selectedMinionWorkerEndpoints, requestHeaders, timeoutMs); }
@Test public void testGetSubtaskWithGivenStateProgress() throws IOException { CompletionServiceHelper httpHelper = mock(CompletionServiceHelper.class); CompletionServiceHelper.CompletionServiceResponse httpResp = new CompletionServiceHelper.CompletionServiceResponse(); String taskIdPrefix = "Task_SegmentGenerationAndPushTask_someone"; String workerIdPrefix = "worker"; String[] subtaskIds = new String[6]; String[] workerIds = new String[3]; Map<String, String> selectedMinionWorkerEndpoints = new HashMap<>(); for (int i = 0; i < 3; i++) { workerIds[i] = workerIdPrefix + i; String workerEndpoint = "http://" + workerIds[i] + ":9000"; selectedMinionWorkerEndpoints.put(workerIds[i], workerEndpoint); subtaskIds[2 * i] = taskIdPrefix + "_" + (2 * i); subtaskIds[2 * i + 1] = taskIdPrefix + "_" + (2 * i + 1); // Notice that for testing purpose, we map subtask names to empty strings. In reality, subtask names will be // mapped to jsonized org.apache.pinot.minion.event.MinionEventObserver httpResp._httpResponses.put( String.format("%s/tasks/subtask/state/progress?subTaskState=IN_PROGRESS", workerEndpoint), JsonUtils.objectToString(ImmutableMap.of(subtaskIds[2 * i], "", subtaskIds[2 * i + 1], ""))); } httpResp._failedResponseCount = 1; ArgumentCaptor<List<String>> workerEndpointCaptor = ArgumentCaptor.forClass(List.class); when(httpHelper.doMultiGetRequest(workerEndpointCaptor.capture(), any(), anyBoolean(), any(), anyInt())).thenReturn( httpResp); PinotHelixTaskResourceManager mgr = new PinotHelixTaskResourceManager(mock(PinotHelixResourceManager.class), mock(TaskDriver.class)); Map<String, Object> progress = mgr.getSubtaskOnWorkerProgress("IN_PROGRESS", httpHelper, selectedMinionWorkerEndpoints, Collections.emptyMap(), 1000); List<String> value = workerEndpointCaptor.getValue(); Set<String> expectedWorkerUrls = selectedMinionWorkerEndpoints.values().stream().map( workerEndpoint -> String.format("%s/tasks/subtask/state/progress?subTaskState=IN_PROGRESS", workerEndpoint)) .collect(Collectors.toSet()); assertEquals(new HashSet<>(value), expectedWorkerUrls); assertEquals(progress.size(), 3); for (int i = 0; i < 3; i++) { Object responseFromMinionWorker = progress.get(workerIds[i]); Map<String, Object> subtaskProgressMap = (Map<String, Object>) responseFromMinionWorker; assertEquals(subtaskProgressMap.size(), 2); assertTrue(subtaskProgressMap.containsKey(subtaskIds[2 * i])); assertTrue(subtaskProgressMap.containsKey(subtaskIds[2 * i + 1])); } }
public static MonthsWindows months(int number) { return new MonthsWindows(number, 1, DEFAULT_START_DATE, DateTimeZone.UTC); }
@Test public void testDefaultWindowMappingFnGlobal() { MonthsWindows windowFn = CalendarWindows.months(2); WindowMappingFn<?> mapping = windowFn.getDefaultWindowMappingFn(); thrown.expect(IllegalArgumentException.class); mapping.getSideInputWindow(GlobalWindow.INSTANCE); }
@Nullable @Override public String getMainClassFromJarPlugin() { Plugin mavenJarPlugin = project.getPlugin("org.apache.maven.plugins:maven-jar-plugin"); if (mavenJarPlugin != null) { return getChildValue( (Xpp3Dom) mavenJarPlugin.getConfiguration(), "archive", "manifest", "mainClass") .orElse(null); } return null; }
@Test public void testGetMainClassFromJar_missingConfiguration() { when(mockMavenProject.getPlugin("org.apache.maven.plugins:maven-jar-plugin")) .thenReturn(mockPlugin); assertThat(mavenProjectProperties.getMainClassFromJarPlugin()).isNull(); }
public <T> T create(Class<T> clazz) { return create(clazz, new Class<?>[]{}, new Object[]{}); }
@Test void testProxyHandlesErrors() { assertThatIllegalStateException().isThrownBy(()-> new UnitOfWorkAwareProxyFactory("default", sessionFactory) .create(BrokenAuthenticator.class) .authenticate("b812ae4")) .withMessage("Session cluster is down"); }
public Path getAtomicWorkPath() { return atomicWorkPath; }
@Test public void testSetWorkPath() { final DistCpOptions.Builder builder = new DistCpOptions.Builder( Collections.singletonList(new Path("hdfs://localhost:8020/source")), new Path("hdfs://localhost:8020/target/")); Assert.assertNull(builder.build().getAtomicWorkPath()); builder.withAtomicCommit(true); Assert.assertNull(builder.build().getAtomicWorkPath()); final Path workPath = new Path("hdfs://localhost:8020/work"); builder.withAtomicWorkPath(workPath); Assert.assertEquals(workPath, builder.build().getAtomicWorkPath()); }
@Override public double variance() { return variance; }
@Test public void testVariance() { System.out.println("variance"); WeibullDistribution instance = new WeibullDistribution(1.5, 1.0); instance.rand(); assertEquals(0.37569028, instance.variance(), 1E-7); }
void print(List<Line> lines, AnsiEscapes... border) { int maxLength = lines.stream().map(Line::length).max(comparingInt(a -> a)) .orElseThrow(NoSuchElementException::new); StringBuilder out = new StringBuilder(); Format borderFormat = monochrome ? monochrome() : color(border); out.append(borderFormat.text("โ”Œ" + times('โ”€', maxLength + 2) + "โ”")).append("\n"); for (Line line : lines) { int rightPad = maxLength - line.length(); out.append(borderFormat.text("โ”‚")) .append(' '); for (Span span : line.spans) { Format format = monochrome ? monochrome() : color(span.escapes); out.append(format.text(span.text)); } out.append(times(' ', rightPad)) .append(' ') .append(borderFormat.text("โ”‚")) .append("\n"); } out.append(borderFormat.text("โ””" + times('โ”€', maxLength + 2) + "โ”˜")).append("\n"); this.out.print(out); }
@Test void printsMonochromeBanner() throws Exception { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); Banner banner = new Banner(new PrintStream(bytes, false, StandardCharsets.UTF_8.name()), true); banner.print(asList( new Banner.Line("Bla"), new Banner.Line( new Banner.Span("Bla "), new Banner.Span("Bla", AnsiEscapes.BLUE), new Banner.Span(" "), new Banner.Span("Bla", AnsiEscapes.RED)), new Banner.Line("Bla Bla")), AnsiEscapes.CYAN); assertThat(bytes, bytes(equalTo("" + "โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”\n" + "โ”‚ Bla โ”‚\n" + "โ”‚ Bla Bla Bla โ”‚\n" + "โ”‚ Bla Bla โ”‚\n" + "โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜\n"))); }
public void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ) { updateCheckboxes( false, permissionEnumSet ); }
@Test public void testUpdateCheckboxesNoPermissionsAppropriateFalse() { permissionsCheckboxHandler.updateCheckboxes( false, EnumSet.noneOf( RepositoryFilePermission.class ) ); verify( readCheckbox, times( 1 ) ).setChecked( false ); verify( writeCheckbox, times( 1 ) ).setChecked( false ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); verify( readCheckbox, never() ).setDisabled( false ); }
@Override public RecordCursor cursor() { return new JdbcRecordCursor(jdbcClient, session, split, columnHandles); }
@Test public void testIdempotentClose() { RecordSet recordSet = new JdbcRecordSet(jdbcClient, session, split, ImmutableList.of( columnHandles.get("value"), columnHandles.get("value"), columnHandles.get("text"))); RecordCursor cursor = recordSet.cursor(); cursor.close(); cursor.close(); }
@VisibleForTesting static File doUnpackNar(final File nar, final File baseWorkingDirectory, Runnable extractCallback) throws IOException { File parentDirectory = new File(baseWorkingDirectory, nar.getName() + "-unpacked"); if (!parentDirectory.exists()) { if (parentDirectory.mkdirs()) { log.info("Created directory {}", parentDirectory); } else if (!parentDirectory.exists()) { throw new IOException("Cannot create " + parentDirectory); } } String md5Sum = Base64.getUrlEncoder().withoutPadding().encodeToString(calculateMd5sum(nar)); // ensure that one process can extract the files File lockFile = new File(parentDirectory, "." + md5Sum + ".lock"); // prevent OverlappingFileLockException by ensuring that one thread tries to create a lock in this JVM Object localLock = CURRENT_JVM_FILE_LOCKS.computeIfAbsent(lockFile.getAbsolutePath(), key -> new Object()); synchronized (localLock) { // create file lock that ensures that other processes // using the same lock file don't execute concurrently try (FileChannel channel = new RandomAccessFile(lockFile, "rw").getChannel(); FileLock lock = channel.lock()) { File narWorkingDirectory = new File(parentDirectory, md5Sum); if (narWorkingDirectory.mkdir()) { try { log.info("Extracting {} to {}", nar, narWorkingDirectory); if (extractCallback != null) { extractCallback.run(); } unpack(nar, narWorkingDirectory); } catch (IOException e) { log.error("There was a problem extracting the nar file. Deleting {} to clean up state.", narWorkingDirectory, e); FileUtils.deleteFile(narWorkingDirectory, true); throw e; } } return narWorkingDirectory; } } }
@Test void shouldExtractFilesOnceInSameProcess() throws InterruptedException { int threads = 20; CountDownLatch countDownLatch = new CountDownLatch(threads); AtomicInteger exceptionCounter = new AtomicInteger(); AtomicInteger extractCounter = new AtomicInteger(); for (int i = 0; i < threads; i++) { new Thread(() -> { try { NarUnpacker.doUnpackNar(sampleZipFile, extractDirectory, extractCounter::incrementAndGet); } catch (Exception e) { log.error("Unpacking failed", e); exceptionCounter.incrementAndGet(); } finally { countDownLatch.countDown(); } }).start(); } assertTrue(countDownLatch.await(30, TimeUnit.SECONDS)); assertEquals(exceptionCounter.get(), 0); assertEquals(extractCounter.get(), 1); }
public ClientSession toClientSession() { return new ClientSession( parseServer(server), user, source, Optional.empty(), parseClientTags(clientTags), clientInfo, catalog, schema, TimeZone.getDefault().getID(), Locale.getDefault(), toResourceEstimates(resourceEstimates), toProperties(sessionProperties), emptyMap(), emptyMap(), toExtraCredentials(extraCredentials), null, clientRequestTimeout, disableCompression, emptyMap(), emptyMap(), validateNextUriSource); }
@Test public void testDefault() { ClientSession session = new ClientOptions().toClientSession(); assertEquals(session.getServer().toString(), "http://localhost:8080"); assertEquals(session.getSource(), "presto-cli"); }
public Span nextSpan(TraceContextOrSamplingFlags extracted) { if (extracted == null) throw new NullPointerException("extracted == null"); TraceContext context = extracted.context(); if (context != null) return newChild(context); TraceIdContext traceIdContext = extracted.traceIdContext(); if (traceIdContext != null) { return _toSpan(null, decorateContext( InternalPropagation.instance.flags(extracted.traceIdContext()), traceIdContext.traceIdHigh(), traceIdContext.traceId(), 0L, 0L, 0L, extracted.extra() )); } SamplingFlags samplingFlags = extracted.samplingFlags(); List<Object> extra = extracted.extra(); TraceContext parent = currentTraceContext.get(); int flags; long traceIdHigh = 0L, traceId = 0L, localRootId = 0L, spanId = 0L; if (parent != null) { // At this point, we didn't extract trace IDs, but do have a trace in progress. Since typical // trace sampling is up front, we retain the decision from the parent. flags = InternalPropagation.instance.flags(parent); traceIdHigh = parent.traceIdHigh(); traceId = parent.traceId(); localRootId = parent.localRootId(); spanId = parent.spanId(); extra = concat(extra, parent.extra()); } else { flags = InternalPropagation.instance.flags(samplingFlags); } return _toSpan(parent, decorateContext(flags, traceIdHigh, traceId, localRootId, spanId, 0L, extra)); }
@Test void localRootId_nextSpan_flags_empty() { TraceContextOrSamplingFlags flags = TraceContextOrSamplingFlags.EMPTY; localRootId(flags, flags, ctx -> tracer.nextSpan(ctx)); }
@VisibleForTesting void validateExperienceOutRange(List<MemberLevelDO> list, Long id, Integer level, Integer experience) { for (MemberLevelDO levelDO : list) { if (levelDO.getId().equals(id)) { continue; } if (levelDO.getLevel() < level) { // ็ป้ชŒๅคงไบŽๅ‰ไธ€ไธช็ญ‰็บง if (experience <= levelDO.getExperience()) { throw exception(LEVEL_EXPERIENCE_MIN, levelDO.getName(), levelDO.getExperience()); } } else if (levelDO.getLevel() > level) { //ๅฐไบŽไธ‹ไธ€ไธช็บงๅˆซ if (experience >= levelDO.getExperience()) { throw exception(LEVEL_EXPERIENCE_MAX, levelDO.getName(), levelDO.getExperience()); } } } }
@Test public void testUpdateLevel_experienceOutRange() { // ๅ‡†ๅค‡ๅ‚ๆ•ฐ int level = 10; int experience = 10; Long id = randomLongId(); String name = randomString(); // mock ๆ•ฐๆฎ memberlevelMapper.insert(randomLevelDO(o -> { o.setLevel(level); o.setExperience(experience); o.setName(name); })); List<MemberLevelDO> list = memberlevelMapper.selectList(); // ่ฐƒ็”จ๏ผŒๆ ก้ชŒๅผ‚ๅธธ assertServiceException(() -> levelService.validateExperienceOutRange(list, id, level + 1, experience - 1), LEVEL_EXPERIENCE_MIN, name, level); // ่ฐƒ็”จ๏ผŒๆ ก้ชŒๅผ‚ๅธธ assertServiceException(() -> levelService.validateExperienceOutRange(list, id, level - 1, experience + 1), LEVEL_EXPERIENCE_MAX, name, level); }
public void check(@NotNull Set<Long> partitionIds, long currentTimeMs) throws CommitRateExceededException, CommitFailedException { Preconditions.checkNotNull(partitionIds, "partitionIds is null"); // Does not limit the commit rate of compaction transactions if (transactionState.getSourceType() == TransactionState.LoadJobSourceType.LAKE_COMPACTION) { return; } updateWriteDuration(transactionState); setAllowCommitTimeOnce(partitionIds); long txnId = transactionState.getTransactionId(); long abortTime = transactionState.getPrepareTime() + transactionState.getTimeoutMs(); if (transactionState.getAllowCommitTimeMs() >= abortTime) { throw new CommitFailedException("Txn " + txnId + " timed out due to ingestion slowdown", txnId); } if (transactionState.getAllowCommitTimeMs() > currentTimeMs) { LOG.info("delay commit of txn {} for {}ms, write took {}ms", transactionState.getTransactionId(), transactionState.getAllowCommitTimeMs() - currentTimeMs, transactionState.getWriteDurationMs()); throw new CommitRateExceededException(txnId, transactionState.getAllowCommitTimeMs()); } long upperBound = compactionScoreUpperBound(); if (upperBound > 0 && anyCompactionScoreExceedsUpperBound(partitionIds, upperBound)) { throw new CommitRateExceededException(txnId, currentTimeMs + 1000/* delay 1s */); } }
@Test public void testPartitionHasNoStatistics() throws CommitRateExceededException { long partitionId = 54321; Set<Long> partitions = new HashSet<>(Collections.singletonList(partitionId)); long currentTimeMs = System.currentTimeMillis(); transactionState.setPrepareTime(currentTimeMs - 100); transactionState.setWriteEndTimeMs(currentTimeMs); Assert.assertTrue(ratio > 0.01); Assert.assertTrue(threshold > 0); limiter.check(partitions, currentTimeMs); Assert.assertEquals(transactionState.getWriteEndTimeMs(), transactionState.getAllowCommitTimeMs()); }
@Override public void startIt() { // do nothing }
@Test public void startIt_does_nothing() { new NoopDatabaseMigrationImpl().startIt(); }
void handleFinish(Resp response, Span span) { if (response == null) throw new NullPointerException("response == null"); if (span.isNoop()) return; if (response.error() != null) { span.error(response.error()); // Ensures MutableSpan.error() for SpanHandler } try { parseResponse(response, span); } catch (Throwable t) { propagateIfFatal(t); Platform.get().log("error parsing response {0}", response, t); } finally { long finishTimestamp = response.finishTimestamp(); if (finishTimestamp == 0L) { span.finish(); } else { span.finish(finishTimestamp); } } }
@Test void handleFinish_nothingOnNoop() { when(span.isNoop()).thenReturn(true); handler.handleFinish(response, span); verify(span, never()).finish(); }
@Command(description = "Starts a new Hazelcast member", mixinStandardHelpOptions = true, sortOptions = false) void start( @Option(names = {"-c", "--config"}, paramLabel = "<file>", description = "Use <file> for Hazelcast " + "configuration. " + "Accepted formats are XML and YAML. ") String configFilePath, @Option(names = {"-p", "--port"}, paramLabel = "<port>", description = "Bind to the specified <port>. Please note that if the specified port is in use, " + "it will auto-increment to the first free port. (default: 5701)") String port, @Option(names = {"-i", "--interface"}, paramLabel = "<interface>", description = "Bind to the specified <interface>.") String hzInterface) { if (!isNullOrEmpty(configFilePath)) { System.setProperty("hazelcast.config", configFilePath); } if (!isNullOrEmpty(port)) { System.setProperty("hz.network.port.port", port); } if (!isNullOrEmpty(hzInterface)) { System.setProperty("hz.network.interfaces.enabled", "true"); System.setProperty("hz.socket.bind.any", "false"); System.setProperty("hz.network.interfaces.interfaces.interface1", hzInterface); } start.run(); }
@Test void test_start() { // when hazelcastServerCommandLine.start(null, null, null); // then verify(start, times(1)).run(); }
public static Applications mergeApplications(Applications first, Applications second) { Set<String> firstNames = selectApplicationNames(first); Set<String> secondNames = selectApplicationNames(second); Set<String> allNames = new HashSet<>(firstNames); allNames.addAll(secondNames); Applications merged = new Applications(); for (String appName : allNames) { if (firstNames.contains(appName)) { if (secondNames.contains(appName)) { merged.addApplication(mergeApplication(first.getRegisteredApplications(appName), second.getRegisteredApplications(appName))); } else { merged.addApplication(copyApplication(first.getRegisteredApplications(appName))); } } else { merged.addApplication(copyApplication(second.getRegisteredApplications(appName))); } } return updateMeta(merged); }
@Test public void testMergeApplicationsIfNotNullAndHasAppNameReturnApplications() { Application application = createSingleInstanceApp("foo", "foo", InstanceInfo.ActionType.ADDED); Applications applications = createApplications(application); Assert.assertEquals(1, EurekaEntityFunctions.mergeApplications( applications, applications).size()); }
protected static String anonymizeLog(String log) { return log.replaceAll( "(/((home)|(Users))/[^/\n]*)|(\\\\Users\\\\[^\\\\\n]*)", "/ANONYMIZED_HOME_DIR"); // NOI18N }
@Test public void testAnonymizeWindowsLog() { String log = "" + " Java Home = C:\\Program Files\\Java\\jre1.8.0_311\n" + " System Locale; Encoding = en_US (gephi); Cp1254\n" + " Home Directory = C:\\Users\\RickAstley\n" + " Current Directory = C:\\Program Files\\Gephi-0.9.2\n" + " User Directory = C:\\Users\\RickAstley\\AppData\\Roaming\\.gephi\\0.9.2\\dev\n" + " Cache Directory = C:\\Users\\RickAstley\\AppData\\Roaming\\.gephi\\0.9.2\\dev\\var\\cache\n" + " Installation = C:\\Program Files\\Gephi-0.9.2\\platform"; String expected = "" + " Java Home = C:\\Program Files\\Java\\jre1.8.0_311\n" + " System Locale; Encoding = en_US (gephi); Cp1254\n" + " Home Directory = C:/ANONYMIZED_HOME_DIR\n" + " Current Directory = C:\\Program Files\\Gephi-0.9.2\n" + " User Directory = C:/ANONYMIZED_HOME_DIR\\AppData\\Roaming\\.gephi\\0.9.2\\dev\n" + " Cache Directory = C:/ANONYMIZED_HOME_DIR\\AppData\\Roaming\\.gephi\\0.9.2\\dev\\var\\cache\n" + " Installation = C:\\Program Files\\Gephi-0.9.2\\platform"; String anonymized = ReportController.anonymizeLog(log); Assert.assertEquals(expected, anonymized); }
@Override public ICardinality merge(ICardinality... estimators) throws LinearCountingMergeException { if (estimators == null) { return new LinearCounting(map); } LinearCounting[] lcs = Arrays.copyOf(estimators, estimators.length + 1, LinearCounting[].class); lcs[lcs.length - 1] = this; return LinearCounting.mergeEstimators(lcs); }
@Test public void testMerge() throws LinearCountingMergeException { int numToMerge = 5; int size = 65536; int cardinality = 1000; LinearCounting[] lcs = new LinearCounting[numToMerge]; LinearCounting baseline = new LinearCounting(size); for (int i = 0; i < numToMerge; i++) { lcs[i] = new LinearCounting(size); for (int j = 0; j < cardinality; j++) { double val = Math.random(); lcs[i].offer(val); baseline.offer(val); } } int expectedCardinality = numToMerge * cardinality; long mergedEstimate = LinearCounting.mergeEstimators(lcs).cardinality(); double error = Math.abs(mergedEstimate - expectedCardinality) / (double) expectedCardinality; assertEquals(0.01, error, 0.01); LinearCounting lc = lcs[0]; lcs = Arrays.asList(lcs).subList(1, lcs.length).toArray(new LinearCounting[0]); mergedEstimate = lc.merge(lcs).cardinality(); error = Math.abs(mergedEstimate - expectedCardinality) / (double) expectedCardinality; assertEquals(0.01, error, 0.01); long baselineEstimate = baseline.cardinality(); assertEquals(baselineEstimate, mergedEstimate); }
public final BarcodeParameters getParams() { return params; }
@Test final void testConstructorWithImageType() throws IOException { try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(BarcodeImageType.JPG)) { this.checkParams(BarcodeImageType.JPG, BarcodeParameters.WIDTH, BarcodeParameters.HEIGHT, BarcodeParameters.FORMAT, barcodeDataFormat.getParams()); } }
@Override public V get() { return result; }
@Test public void testGet_WithTimeout() throws Exception { try { promise.get(100); failBecauseExceptionWasNotThrown(OMSRuntimeException.class); } catch (OMSRuntimeException e) { assertThat(e).hasMessageContaining("Get request result is timeout or interrupted"); } }
@Override public Optional<Entity> exportEntity(EntityDescriptor entityDescriptor, EntityDescriptorIds entityDescriptorIds) { final ModelId modelId = entityDescriptor.id(); return cacheService.get(modelId.id()).map(cacheDto -> exportNativeEntity(cacheDto, entityDescriptorIds)); }
@Test @MongoDBFixtures("LookupCacheFacadeTest.json") public void exportEntity() { final EntityDescriptor descriptor = EntityDescriptor.create("5adf24b24b900a0fdb4e52dd", ModelTypes.LOOKUP_CACHE_V1); final EntityDescriptorIds entityDescriptorIds = EntityDescriptorIds.of(descriptor); final Entity entity = facade.exportEntity(descriptor, entityDescriptorIds).orElseThrow(AssertionError::new); assertThat(entity).isInstanceOf(EntityV1.class); assertThat(entity.id()).isEqualTo(ModelId.of(entityDescriptorIds.get(descriptor).orElse(null))); assertThat(entity.type()).isEqualTo(ModelTypes.LOOKUP_CACHE_V1); final EntityV1 entityV1 = (EntityV1) entity; final LookupCacheEntity lookupCacheEntity = objectMapper.convertValue(entityV1.data(), LookupCacheEntity.class); assertThat(lookupCacheEntity.name()).isEqualTo(ValueReference.of("no-op-cache")); assertThat(lookupCacheEntity.title()).isEqualTo(ValueReference.of("No-op cache")); assertThat(lookupCacheEntity.description()).isEqualTo(ValueReference.of("No-op cache")); assertThat(lookupCacheEntity.configuration()).containsEntry("type", ValueReference.of("none")); }
@SuppressWarnings("unchecked") @Override public void configure(final Map<String, ?> configs, final boolean isKey) { //check to see if the window size config is set and the window size is already set from the constructor final Long configWindowSize; if (configs.get(StreamsConfig.WINDOW_SIZE_MS_CONFIG) instanceof String) { configWindowSize = Long.parseLong((String) configs.get(StreamsConfig.WINDOW_SIZE_MS_CONFIG)); } else { configWindowSize = (Long) configs.get(StreamsConfig.WINDOW_SIZE_MS_CONFIG); } if (windowSize != null && configWindowSize != null) { throw new IllegalArgumentException("Window size should not be set in both the time windowed deserializer constructor and the window.size.ms config"); } else if (windowSize == null && configWindowSize == null) { throw new IllegalArgumentException("Window size needs to be set either through the time windowed deserializer " + "constructor or the window.size.ms config but not both"); } else { windowSize = windowSize == null ? configWindowSize : windowSize; } final String windowedInnerClassSerdeConfig = (String) configs.get(StreamsConfig.WINDOWED_INNER_CLASS_SERDE); Serde<T> windowInnerClassSerde = null; if (windowedInnerClassSerdeConfig != null) { try { windowInnerClassSerde = Utils.newInstance(windowedInnerClassSerdeConfig, Serde.class); } catch (final ClassNotFoundException e) { throw new ConfigException(StreamsConfig.WINDOWED_INNER_CLASS_SERDE, windowedInnerClassSerdeConfig, "Serde class " + windowedInnerClassSerdeConfig + " could not be found."); } } if (inner != null && windowedInnerClassSerdeConfig != null) { if (!inner.getClass().getName().equals(windowInnerClassSerde.deserializer().getClass().getName())) { throw new IllegalArgumentException("Inner class deserializer set using constructor " + "(" + inner.getClass().getName() + ")" + " is different from the one set in windowed.inner.class.serde config " + "(" + windowInnerClassSerde.deserializer().getClass().getName() + ")."); } } else if (inner == null && windowedInnerClassSerdeConfig == null) { throw new IllegalArgumentException("Inner class deserializer should be set either via constructor " + "or via the windowed.inner.class.serde config"); } else if (inner == null) inner = windowInnerClassSerde.deserializer(); }
@Test public void shouldThrowErrorIfWindowSizeIsNotSet() { props.put(StreamsConfig.WINDOWED_INNER_CLASS_SERDE, Serdes.ByteArraySerde.class.getName()); final TimeWindowedDeserializer<?> deserializer = new TimeWindowedDeserializer<>(); assertThrows(IllegalArgumentException.class, () -> deserializer.configure(props, false)); }
@Override public long clear() { return get(clearAsync()); }
@Test public void testClear() { RJsonBucket<TestType> al = redisson.getJsonBucket("test", new JacksonCodec<>(TestType.class)); TestType t = new TestType(); t.setName("name1"); al.set(t); assertThat(al.clear()).isEqualTo(1); TestType n = al.get(new JacksonCodec<>(new TypeReference<TestType>() {})); assertThat(n.getName()).isNull(); TestType t1 = new TestType(); t1.setName("name1"); NestedType nt = new NestedType(); nt.setValues(Arrays.asList("t1", "t2", "t4", "t5", "t6")); t1.setType(nt); al.set(t1); assertThat(al.clear("type.values")).isEqualTo(1); TestType n2 = al.get(new JacksonCodec<>(new TypeReference<TestType>() {})); n2.setName("name2"); assertThat(n2.getName()).isEqualTo("name2"); assertThat(n2.getType().getValues()).isEmpty(); }
@Override public void sync() throws IOException { lock(); try { fileStream.flush(); openNewPartIfNecessary(userDefinedMinPartSize); Committer committer = upload.snapshotAndGetCommitter(); committer.commitAfterRecovery(); closeForCommit(); } finally { unlock(); } }
@Test(expected = Exception.class) public void testSync() throws IOException { streamUnderTest.write(bytesOf("hello")); streamUnderTest.write(bytesOf(" world")); streamUnderTest.sync(); assertThat(multipartUploadUnderTest, hasContent(bytesOf("hello world"))); streamUnderTest.write(randomBuffer(RefCountedBufferingFileStream.BUFFER_SIZE + 1)); assertThat(multipartUploadUnderTest, hasContent(bytesOf("hello world"))); }
@Override public EntityExcerpt createExcerpt(RuleDao ruleDao) { return EntityExcerpt.builder() .id(ModelId.of(ruleDao.id())) .type(ModelTypes.PIPELINE_RULE_V1) .title(ruleDao.title()) .build(); }
@Test public void createExcerpt() { final RuleDao pipelineRule = RuleDao.builder() .id("id") .title("title") .description("description") .source("rule \"debug\"\nwhen\n true\nthen\n debug($message.message);\nend") .build(); final EntityExcerpt excerpt = facade.createExcerpt(pipelineRule); assertThat(excerpt.id()).isEqualTo(ModelId.of("id")); assertThat(excerpt.type()).isEqualTo(ModelTypes.PIPELINE_RULE_V1); assertThat(excerpt.title()).isEqualTo("title"); }
public static Status unblock( final UnsafeBuffer logMetaDataBuffer, final UnsafeBuffer termBuffer, final int blockedOffset, final int tailOffset, final int termId) { Status status = NO_ACTION; int frameLength = frameLengthVolatile(termBuffer, blockedOffset); if (frameLength < 0) { resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, -frameLength); status = UNBLOCKED; } else if (0 == frameLength) { int currentOffset = blockedOffset + FRAME_ALIGNMENT; while (currentOffset < tailOffset) { frameLength = frameLengthVolatile(termBuffer, currentOffset); if (frameLength != 0) { if (scanBackToConfirmZeroed(termBuffer, currentOffset, blockedOffset)) { final int length = currentOffset - blockedOffset; resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, length); status = UNBLOCKED; } break; } currentOffset += FRAME_ALIGNMENT; } if (currentOffset == termBuffer.capacity()) { if (0 == frameLengthVolatile(termBuffer, blockedOffset)) { final int length = currentOffset - blockedOffset; resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, length); status = UNBLOCKED_TO_END; } } } return status; }
@Test void shouldTakeNoActionToEndOfPartitionIfMessageCompleteAfterScan() { final int messageLength = HEADER_LENGTH * 4; final int termOffset = TERM_BUFFER_CAPACITY - messageLength; final int tailOffset = TERM_BUFFER_CAPACITY; when(mockTermBuffer.getIntVolatile(termOffset)) .thenReturn(0) .thenReturn(messageLength); assertEquals( NO_ACTION, TermUnblocker.unblock(mockLogMetaDataBuffer, mockTermBuffer, termOffset, tailOffset, TERM_ID)); }
@Override public CurrentStateInformation trigger(MigrationStep step, Map<String, Object> args) { context.setCurrentStep(step); if (Objects.nonNull(args) && !args.isEmpty()) { context.addActionArguments(step, args); } String errorMessage = null; try { stateMachine.fire(step); } catch (Exception e) { errorMessage = Objects.nonNull(e.getMessage()) ? e.getMessage() : e.toString(); } persistenceService.saveStateMachineContext(context); return new CurrentStateInformation(getState(), nextSteps(), errorMessage, context.getResponse()); }
@Test public void smSetsErrorOnExceptionInAction() { String errorMessage = "Error 40: Insufficient Coffee."; StateMachine<MigrationState, MigrationStep> stateMachine = testStateMachineWithAction((context) -> { throw new RuntimeException(errorMessage); }); migrationStateMachine = new MigrationStateMachineImpl(stateMachine, persistenceService, context); CurrentStateInformation context = migrationStateMachine.trigger(MIGRATION_STEP, Map.of()); assertThat(context.hasErrors()).isTrue(); assertThat(context.errorMessage()).isEqualTo(errorMessage); }
public static String parseNormalTopic(String topic, String cid) { if (topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) { if (topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX + cid + POP_RETRY_SEPARATOR_V2)) { return topic.substring((MixAll.RETRY_GROUP_TOPIC_PREFIX + cid + POP_RETRY_SEPARATOR_V2).length()); } return topic.substring((MixAll.RETRY_GROUP_TOPIC_PREFIX + cid + POP_RETRY_SEPARATOR_V1).length()); } else { return topic; } }
@Test public void testParseNormalTopic() { String popRetryTopic = KeyBuilder.buildPopRetryTopicV2(topic, group); assertThat(KeyBuilder.parseNormalTopic(popRetryTopic, group)).isEqualTo(topic); String popRetryTopicV1 = KeyBuilder.buildPopRetryTopicV1(topic, group); assertThat(KeyBuilder.parseNormalTopic(popRetryTopicV1, group)).isEqualTo(topic); popRetryTopic = KeyBuilder.buildPopRetryTopicV2(topic, group); assertThat(KeyBuilder.parseNormalTopic(popRetryTopic)).isEqualTo(topic); }
@Override public int size() { return 0; }
@Test public void testIntSpliteratorForEachRemaining() { Set<Integer> results = new HashSet<>(); es.intSpliterator().forEachRemaining((IntConsumer) results::add); assertEquals(0, results.size()); }
static Object actualCoerceValue(DMNType requiredType, Object valueToCoerce) { Object toReturn = valueToCoerce; if (!requiredType.isCollection() && valueToCoerce instanceof Collection && ((Collection) valueToCoerce).size() == 1) { // spec defines that "a=[a]", i.e., singleton collections should be treated as the single element // and vice-versa return ((Collection) valueToCoerce).toArray()[0]; } if (valueToCoerce instanceof LocalDate localDate && requiredType instanceof SimpleTypeImpl simpleType && simpleType.getFeelType() == BuiltInType.DATE_TIME) { return DateTimeEvalHelper.coerceDateTime(localDate); } return toReturn; }
@Test void actualCoerceValueCollectionToArray() { Object item = "TESTED_OBJECT"; Object value = Collections.singleton(item); DMNType requiredType = new SimpleTypeImpl("http://www.omg.org/spec/DMN/20180521/FEEL/", "string", null, false, null, null, null, BuiltInType.STRING); Object retrieved = CoerceUtil.actualCoerceValue(requiredType, value); assertNotNull(retrieved); assertEquals(item, retrieved); }
public final void setStrictness(Strictness strictness) { this.strictness = Objects.requireNonNull(strictness); }
@Test public void testNonFiniteFloatsWhenStrict() throws IOException { StringWriter stringWriter = new StringWriter(); JsonWriter jsonWriter = new JsonWriter(stringWriter); jsonWriter.setStrictness(Strictness.STRICT); assertNonFiniteFloatsExceptions(jsonWriter); }
public static <T> String render(ClassPluginDocumentation<T> classPluginDocumentation) throws IOException { return render("task", JacksonMapper.toMap(classPluginDocumentation)); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test void defaultBool() throws IOException { PluginScanner pluginScanner = new PluginScanner(ClassPluginDocumentationTest.class.getClassLoader()); RegisteredPlugin scan = pluginScanner.scan(); Class bash = scan.findClass(Subflow.class.getName()).orElseThrow(); ClassPluginDocumentation<? extends Task> doc = ClassPluginDocumentation.of(jsonSchemaGenerator, scan, bash, Task.class); String render = DocumentationGenerator.render(doc); assertThat(render, containsString("* **Default:** `false`")); }
@Override public void unsubscribeService(Service service, Subscriber subscriber, String clientId) { Service singleton = ServiceManager.getInstance().getSingletonIfExist(service).orElse(service); Client client = clientManager.getClient(clientId); checkClientIsLegal(client, clientId); client.removeServiceSubscriber(singleton); client.setLastUpdatedTime(); NotifyCenter.publishEvent(new ClientOperationEvent.ClientUnsubscribeServiceEvent(singleton, clientId)); }
@Test void testUnSubscribeWhenClientPersistent() { assertThrows(NacosRuntimeException.class, () -> { Client persistentClient = new IpPortBasedClient(ipPortBasedClientId, false); when(clientManager.getClient(anyString())).thenReturn(persistentClient); // Excepted exception ephemeralClientOperationServiceImpl.unsubscribeService(service, subscriber, ipPortBasedClientId); }); }
private PortDescription toPortDescription(HierarchicalConfiguration component) { try { return toPortDescriptionInternal(component); } catch (Exception e) { log.error("Unexpected exception parsing component {} on {}", component.getString("name"), data().deviceId(), e); return null; } }
@Test public void testToPortDescription() throws ConfigurationException, IOException { // CHECKSTYLE:OFF String input = "<data>\n" + " <interfaces xmlns=\"http://openconfig.net/yang/interfaces\">\n" + " <interface>\n" + " <name>CARRIERCTP.1-L1-1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>CARRIERCTP.1-L1-1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>CARRIERCTP.1-L1-2</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>CARRIERCTP.1-L1-2</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>CARRIERCTP.1-L1-3</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>CARRIERCTP.1-L1-3</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>CARRIERCTP.1-L1-4</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>CARRIERCTP.1-L1-4</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>CARRIERCTP.1-L1-5</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>CARRIERCTP.1-L1-5</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>CARRIERCTP.1-L1-6</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>CARRIERCTP.1-L1-6</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>GIGECLIENTCTP.1-A-2-T1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>GIGECLIENTCTP.1-A-2-T1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>GIGECLIENTCTP.1-A-2-T2</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>GIGECLIENTCTP.1-A-2-T2</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>GIGECLIENTCTP.1-L1-1-1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>GIGECLIENTCTP.1-L1-1-1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>GIGECLIENTCTP.1-L2-1-1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>GIGECLIENTCTP.1-L2-1-1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>NCTGIGE.1-NCT-1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:ethernetCsmacd</type>\n" + " <name>NCTGIGE.1-NCT-1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>NCTGIGE.1-NCT-2</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:ethernetCsmacd</type>\n" + " <name>NCTGIGE.1-NCT-2</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>OCHCTP.1-L1-1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>OCHCTP.1-L1-1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>SCHCTP.1-L1-1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>SCHCTP.1-L1-1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>TRIBPTP.1-A-2-T1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>TRIBPTP.1-A-2-T1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>TRIBPTP.1-A-2-T2</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>TRIBPTP.1-A-2-T2</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " <interface>\n" + " <name>XTSCGPTP.1-L1</name>\n" + " <config>\n" + " <type xmlns:ianaift=\"urn:ietf:params:xml:ns:yang:iana-if-type\">ianaift:opticalTransport</type>\n" + " <name>XTSCGPTP.1-L1</name>\n" + " <description/>\n" + " <enabled>true</enabled>\n" + " </config>\n" + " </interface>\n" + " </interfaces>\n" + "</data>\n"; // CHECKSTYLE:ON InfineraOpenConfigDeviceDiscovery sut = new InfineraOpenConfigDeviceDiscovery(); XMLConfiguration cfg = new XMLConfiguration(); cfg.load(CharSource.wrap(input).openStream()); List<PortDescription> ports = sut.discoverPorts(cfg); assertThat(ports, hasSize(4)); PortDescription portDescription; portDescription = ports.get(0); assertThat(portDescription.portNumber().toLong(), is(1L)); assertThat(portDescription.portNumber().name(), is("GIGECLIENTCTP.1-A-2-T1")); assertThat(portDescription.annotations().value(OC_NAME), is("GIGECLIENTCTP.1-A-2-T1")); assertThat(portDescription.annotations().value(OC_TYPE), is("GIGECLIENTCTP.1-A-2-T1")); assertThat(portDescription.annotations().value(ODTN_PORT_TYPE), is(OdtnDeviceDescriptionDiscovery.OdtnPortType.CLIENT.value())); portDescription = ports.get(3); assertThat(portDescription.portNumber().toLong(), is(102L)); assertThat(portDescription.portNumber().name(), is("GIGECLIENTCTP.1-L2-1-1")); assertThat(portDescription.annotations().value(OC_NAME), is("GIGECLIENTCTP.1-L2-1-1")); assertThat(portDescription.annotations().value(OC_TYPE), is("GIGECLIENTCTP.1-L2-1-1")); assertThat(portDescription.annotations().value(ODTN_PORT_TYPE), is(OdtnDeviceDescriptionDiscovery.OdtnPortType.LINE.value())); }
public boolean isChild(final Path directory) { if(directory.isFile()) { // If a file we don't have any children at all return false; } return new SimplePathPredicate(this).isChild(new SimplePathPredicate(directory)); }
@Test public void testIsChild() { Path p = new Path("/a/t", EnumSet.of(Path.Type.file)); assertTrue(p.isChild(new Path("/a", EnumSet.of(Path.Type.directory)))); assertTrue(p.isChild(new Path("/", EnumSet.of(Path.Type.directory)))); assertFalse(p.isChild(new Path("/a", EnumSet.of(Path.Type.file)))); final Path d = new Path("/a", EnumSet.of(Path.Type.directory)); d.attributes().setVersionId("1"); assertTrue(p.isChild(d)); }
public static Builder<String, String> builder(String bootstrapServers, String... topics) { return new Builder<String, String>(bootstrapServers, topics).withStringDeserializers(); }
@Test public void testThrowsIfEnableAutoCommitIsSet() { Assertions.assertThrows(IllegalStateException.class, () -> KafkaSpoutConfig.builder("localhost:1234", "topic") .setProp(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true) .build()); }
@Override public void onMsg(TbContext ctx, TbMsg msg) throws TbNodeException { ctx.tellNext(msg, checkMatches(msg) ? TbNodeConnectionType.TRUE : TbNodeConnectionType.FALSE); }
@Test void givenTypeCircleAndConfigWithCircleDefined_whenOnMsg_thenFalse() throws TbNodeException { // GIVEN var config = new TbGpsGeofencingFilterNodeConfiguration().defaultConfiguration(); config.setFetchPerimeterInfoFromMessageMetadata(false); config.setPerimeterType(PerimeterType.CIRCLE); config.setCenterLatitude(CIRCLE_CENTER.getLatitude()); config.setCenterLongitude(CIRCLE_CENTER.getLongitude()); config.setRange(CIRCLE_RANGE); config.setRangeUnit(RangeUnit.KILOMETER); node.init(ctx, new TbNodeConfiguration(JacksonUtil.valueToTree(config))); DeviceId deviceId = new DeviceId(UUID.randomUUID()); TbMsg msg = getTbMsg(deviceId, TbMsgMetaData.EMPTY, POINT_OUTSIDE_CIRCLE.getLatitude(), POINT_OUTSIDE_CIRCLE.getLongitude()); // WHEN node.onMsg(ctx, msg); // THEN ArgumentCaptor<TbMsg> newMsgCaptor = ArgumentCaptor.forClass(TbMsg.class); verify(ctx, times(1)).tellNext(newMsgCaptor.capture(), eq(TbNodeConnectionType.FALSE)); verify(ctx, never()).tellFailure(any(), any()); TbMsg newMsg = newMsgCaptor.getValue(); assertThat(newMsg).isNotNull(); assertThat(newMsg).isSameAs(msg); }
@Override public State state() { return task.state(); }
@Test public void shouldDelegateState() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.state(); verify(task).state(); }
public static String between(final String text, String after, String before) { String ret = after(text, after); if (ret == null) { return null; } return before(ret, before); }
@Test public void testBetween() { assertEquals("foo bar", StringHelper.between("Hello 'foo bar' how are you", "'", "'")); assertEquals("foo bar", StringHelper.between("Hello ${foo bar} how are you", "${", "}")); assertNull(StringHelper.between("Hello ${foo bar} how are you", "'", "'")); assertTrue(StringHelper.between("begin:mykey:end", "begin:", ":end", "mykey"::equals).orElse(false)); assertFalse(StringHelper.between("begin:ignore:end", "begin:", ":end", "mykey"::equals).orElse(false)); }
public double calculateDensity(Graph graph, boolean isGraphDirected) { double result; double edgesCount = graph.getEdgeCount(); double nodesCount = graph.getNodeCount(); double multiplier = 1; if (!isGraphDirected) { multiplier = 2; } result = (multiplier * edgesCount) / (nodesCount * nodesCount - nodesCount); return result; }
@Test public void testNullGraphDensity() { GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(5); Graph graph = graphModel.getGraph(); GraphDensity d = new GraphDensity(); double density = d.calculateDensity(graph, false); assertEquals(density, 0.0); }
public RepositoryList getRepos(String serverUrl, String token, @Nullable String project, @Nullable String repo) { String projectOrEmpty = Optional.ofNullable(project).orElse(""); String repoOrEmpty = Optional.ofNullable(repo).orElse(""); HttpUrl url = buildUrl(serverUrl, format("/rest/api/1.0/repos?projectname=%s&name=%s", projectOrEmpty, repoOrEmpty)); return doGet(token, url, body -> buildGson().fromJson(body, RepositoryList.class)); }
@Test public void get_repos() { server.enqueue(new MockResponse() .setHeader("Content-Type", "application/json;charset=UTF-8") .setBody("{\n" + " \"isLastPage\": true,\n" + " \"values\": [\n" + " {\n" + " \"slug\": \"banana\",\n" + " \"id\": 2,\n" + " \"name\": \"banana\",\n" + " \"project\": {\n" + " \"key\": \"HOY\",\n" + " \"id\": 2,\n" + " \"name\": \"hoy\"\n" + " }\n" + " },\n" + " {\n" + " \"slug\": \"potato\",\n" + " \"id\": 1,\n" + " \"name\": \"potato\",\n" + " \"project\": {\n" + " \"key\": \"HEY\",\n" + " \"id\": 1,\n" + " \"name\": \"hey\"\n" + " }\n" + " }\n" + " ]\n" + "}")); RepositoryList gsonBBSRepoList = underTest.getRepos(server.url("/").toString(), "token", "", ""); assertThat(gsonBBSRepoList.isLastPage()).isTrue(); assertThat(gsonBBSRepoList.getValues()).hasSize(2); assertThat(gsonBBSRepoList.getValues()).extracting(Repository::getId, Repository::getName, Repository::getSlug, g -> g.getProject().getId(), g -> g.getProject().getKey(), g -> g.getProject().getName()) .containsExactlyInAnyOrder( tuple(2L, "banana", "banana", 2L, "HOY", "hoy"), tuple(1L, "potato", "potato", 1L, "HEY", "hey")); }
public String getFormattedMessage() { if (formattedMessage != null) { return formattedMessage; } if (argumentArray != null) { formattedMessage = MessageFormatter.arrayFormat(message, argumentArray).getMessage(); } else { formattedMessage = message; } return formattedMessage; }
@Test public void testFormattingOneArg() { String message = "x={}"; Throwable throwable = null; Object[] argArray = new Object[] { 12 }; LoggingEvent event = new LoggingEvent("", logger, Level.INFO, message, throwable, argArray); assertNull(event.formattedMessage); assertEquals("x=12", event.getFormattedMessage()); }
@SuppressWarnings({"SimplifyBooleanReturn"}) public static Map<String, ParamDefinition> cleanupParams(Map<String, ParamDefinition> params) { if (params == null || params.isEmpty()) { return params; } Map<String, ParamDefinition> mapped = params.entrySet().stream() .collect( MapHelper.toListMap( Map.Entry::getKey, p -> { ParamDefinition param = p.getValue(); if (param.getType() == ParamType.MAP) { MapParamDefinition mapParamDef = param.asMapParamDef(); if (mapParamDef.getValue() == null && (mapParamDef.getInternalMode() == InternalParamMode.OPTIONAL)) { return mapParamDef; } return MapParamDefinition.builder() .name(mapParamDef.getName()) .value(cleanupParams(mapParamDef.getValue())) .expression(mapParamDef.getExpression()) .name(mapParamDef.getName()) .validator(mapParamDef.getValidator()) .tags(mapParamDef.getTags()) .mode(mapParamDef.getMode()) .meta(mapParamDef.getMeta()) .build(); } else { return param; } })); Map<String, ParamDefinition> filtered = mapped.entrySet().stream() .filter( p -> { ParamDefinition param = p.getValue(); if (param.getInternalMode() == InternalParamMode.OPTIONAL) { if (param.getValue() == null && param.getExpression() == null) { return false; } else if (param.getType() == ParamType.MAP && param.asMapParamDef().getValue() != null && param.asMapParamDef().getValue().isEmpty()) { return false; } else { return true; } } else { Checks.checkTrue( param.getValue() != null || param.getExpression() != null, String.format( "[%s] is a required parameter (type=[%s])", p.getKey(), param.getType())); return true; } }) .collect(MapHelper.toListMap(Map.Entry::getKey, Map.Entry::getValue)); return cleanIntermediateMetadata(filtered); }
@Test public void testCleanupOptionalEmptyParams() throws JsonProcessingException { Map<String, ParamDefinition> allParams = parseParamDefMap("{'optional': {'type': 'STRING', 'internal_mode': 'OPTIONAL'}}"); Map<String, ParamDefinition> cleanedParams = ParamsMergeHelper.cleanupParams(allParams); assertEquals(0, cleanedParams.size()); }
public static String toUriAuthority(NetworkEndpoint networkEndpoint) { return toHostAndPort(networkEndpoint).toString(); }
@Test public void toUriString_withHostnameAndPortEndpoint_returnsHostnameAndPort() { NetworkEndpoint hostnameAndPortEndpoint = NetworkEndpoint.newBuilder() .setType(NetworkEndpoint.Type.HOSTNAME_PORT) .setPort(Port.newBuilder().setPortNumber(8888)) .setHostname(Hostname.newBuilder().setName("localhost")) .build(); assertThat(NetworkEndpointUtils.toUriAuthority(hostnameAndPortEndpoint)) .isEqualTo("localhost:8888"); }
@SuppressWarnings("checkstyle:magicnumber") @Nonnull public static String idToString(long id) { char[] buf = Arrays.copyOf(ID_TEMPLATE, ID_TEMPLATE.length); String hexStr = Long.toHexString(id); for (int i = hexStr.length() - 1, j = 18; i >= 0; i--, j--) { buf[j] = hexStr.charAt(i); if (j == 15 || j == 10 || j == 5) { j--; } } return new String(buf); }
@Test public void when_idToString() { assertEquals("0000-0000-0000-0000", idToString(0)); assertEquals("0000-0000-0000-0001", idToString(1)); assertEquals("7fff-ffff-ffff-ffff", idToString(Long.MAX_VALUE)); assertEquals("8000-0000-0000-0000", idToString(Long.MIN_VALUE)); assertEquals("ffff-ffff-ffff-ffff", idToString(-1)); assertEquals("1122-10f4-7de9-8115", idToString(1234567890123456789L)); assertEquals("eedd-ef0b-8216-7eeb", idToString(-1234567890123456789L)); }
@Override public void onHeartbeatSuccess(ConsumerGroupHeartbeatResponseData response) { if (response.errorCode() != Errors.NONE.code()) { String errorMessage = String.format( "Unexpected error in Heartbeat response. Expected no error, but received: %s", Errors.forCode(response.errorCode()) ); throw new IllegalArgumentException(errorMessage); } MemberState state = state(); if (state == MemberState.LEAVING) { log.debug("Ignoring heartbeat response received from broker. Member {} with epoch {} is " + "already leaving the group.", memberId, memberEpoch); return; } if (state == MemberState.UNSUBSCRIBED && maybeCompleteLeaveInProgress()) { log.debug("Member {} with epoch {} received a successful response to the heartbeat " + "to leave the group and completed the leave operation. ", memberId, memberEpoch); return; } if (isNotInGroup()) { log.debug("Ignoring heartbeat response received from broker. Member {} is in {} state" + " so it's not a member of the group. ", memberId, state); return; } // Update the group member id label in the client telemetry reporter if the member id has // changed. Initially the member id is empty, and it is updated when the member joins the // group. This is done here to avoid updating the label on every heartbeat response. Also // check if the member id is null, as the schema defines it as nullable. if (response.memberId() != null && !response.memberId().equals(memberId)) { clientTelemetryReporter.ifPresent(reporter -> reporter.updateMetricsLabels( Collections.singletonMap(ClientTelemetryProvider.GROUP_MEMBER_ID, response.memberId()))); } this.memberId = response.memberId(); updateMemberEpoch(response.memberEpoch()); ConsumerGroupHeartbeatResponseData.Assignment assignment = response.assignment(); if (assignment != null) { if (!state.canHandleNewAssignment()) { // New assignment received but member is in a state where it cannot take new // assignments (ex. preparing to leave the group) log.debug("Ignoring new assignment {} received from server because member is in {} state.", assignment, state); return; } Map<Uuid, SortedSet<Integer>> newAssignment = new HashMap<>(); assignment.topicPartitions().forEach(topicPartition -> newAssignment.put(topicPartition.topicId(), new TreeSet<>(topicPartition.partitions()))); processAssignmentReceived(newAssignment); } }
@Test public void testSameAssignmentReconciledAgainWhenFenced() { ConsumerMembershipManager membershipManager = createMemberInStableState(); Uuid topic1 = Uuid.randomUuid(); final Assignment assignment1 = new ConsumerGroupHeartbeatResponseData.Assignment(); final Assignment assignment2 = new ConsumerGroupHeartbeatResponseData.Assignment() .setTopicPartitions(Collections.singletonList( new TopicPartitions() .setTopicId(topic1) .setPartitions(Arrays.asList(0, 1, 2)) )); when(metadata.topicNames()).thenReturn(Collections.singletonMap(topic1, "topic1")); assertEquals(toTopicIdPartitionMap(assignment1), membershipManager.currentAssignment().partitions); // Receive assignment, wait on commit membershipManager.onHeartbeatSuccess(createConsumerGroupHeartbeatResponse(assignment2).data()); assertEquals(MemberState.RECONCILING, membershipManager.state()); CompletableFuture<Void> commitResult = new CompletableFuture<>(); when(commitRequestManager.maybeAutoCommitSyncBeforeRevocation(anyLong())).thenReturn(commitResult); membershipManager.poll(time.milliseconds()); // Get fenced, commit completes membershipManager.transitionToFenced(); assertEquals(MemberState.JOINING, membershipManager.state()); assertTrue(membershipManager.currentAssignment().isNone()); assertTrue(subscriptionState.assignedPartitions().isEmpty()); commitResult.complete(null); assertEquals(MemberState.JOINING, membershipManager.state()); assertTrue(membershipManager.currentAssignment().isNone()); assertTrue(subscriptionState.assignedPartitions().isEmpty()); // We have to reconcile & ack the assignment again membershipManager.onHeartbeatSuccess(createConsumerGroupHeartbeatResponse(assignment1).data()); assertEquals(MemberState.RECONCILING, membershipManager.state()); membershipManager.poll(time.milliseconds()); assertEquals(MemberState.ACKNOWLEDGING, membershipManager.state()); membershipManager.onHeartbeatRequestGenerated(); assertEquals(MemberState.STABLE, membershipManager.state()); assertEquals(toTopicIdPartitionMap(assignment1), membershipManager.currentAssignment().partitions); }
@Override public void register(@NonNull Scheme scheme) { if (!schemes.contains(scheme)) { indexSpecRegistry.indexFor(scheme); schemes.add(scheme); getWatchers().forEach(watcher -> watcher.onChange(new SchemeRegistered(scheme))); } }
@Test void shouldThrowExceptionWhenNoGvkAnnotation() { class WithoutGvkExtension extends AbstractExtension { } assertThrows(IllegalArgumentException.class, () -> schemeManager.register(WithoutGvkExtension.class)); }
public ConfigResponse resolveConfig(GetConfigRequest req, ConfigResponseFactory responseFactory) { long start = System.currentTimeMillis(); metricUpdater.incrementRequests(); ConfigKey<?> configKey = req.getConfigKey(); String defMd5 = req.getRequestDefMd5(); if (defMd5 == null || defMd5.isEmpty()) { defMd5 = ConfigUtils.getDefMd5(req.getDefContent().asList()); } ConfigCacheKey cacheKey = new ConfigCacheKey(configKey, defMd5); log.log(Level.FINE, () -> TenantRepository.logPre(getId()) + ("Resolving config " + cacheKey)); ConfigResponse config; if (useCache(req)) { config = cache.computeIfAbsent(cacheKey, (ConfigCacheKey key) -> { var response = createConfigResponse(configKey, req, responseFactory); metricUpdater.setCacheConfigElems(cache.configElems()); metricUpdater.setCacheChecksumElems(cache.checkSumElems()); return response; }); } else { config = createConfigResponse(configKey, req, responseFactory); } metricUpdater.incrementProcTime(System.currentTimeMillis() - start); return config; }
@Test public void require_that_known_config_defs_are_found() { handler.resolveConfig(createSimpleConfigRequest()); }
@CanIgnoreReturnValue public GsonBuilder setDateFormat(String pattern) { if (pattern != null) { try { new SimpleDateFormat(pattern); } catch (IllegalArgumentException e) { // Throw exception if it is an invalid date format throw new IllegalArgumentException("The date pattern '" + pattern + "' is not valid", e); } } this.datePattern = pattern; return this; }
@Test public void testSetDateFormatWithValidPattern() { GsonBuilder builder = new GsonBuilder(); String validPattern = "yyyy-MM-dd"; // Should not throw an exception builder.setDateFormat(validPattern); }
@Override public boolean match(Message msg, StreamRule rule) { if (msg.getField(rule.getField()) == null) return rule.getInverted(); try { final Pattern pattern = patternCache.get(rule.getValue()); final CharSequence charSequence = new InterruptibleCharSequence(msg.getField(rule.getField()).toString()); return rule.getInverted() ^ pattern.matcher(charSequence).find(); } catch (ExecutionException e) { LOG.error("Unable to get pattern from regex cache: ", e); } return false; }
@Test public void testSuccessfulMatchInArray() { StreamRule rule = getSampleRule(); rule.setValue("foobar"); Message msg = getSampleMessage(); msg.addField("something", Collections.singleton("foobar")); StreamRuleMatcher matcher = getMatcher(rule); assertTrue(matcher.match(msg, rule)); }
@Transactional public boolean block(ResourceId resourceId, TimeSlot timeSlot, Owner requester) { ResourceGroupedAvailability toBlock = findGrouped(resourceId, timeSlot); return block(requester, toBlock); }
@Test void cantBlockWhenNoSlotsCreated() { //given ResourceId resourceId = ResourceId.newOne(); TimeSlot oneDay = TimeSlot.createDailyTimeSlotAtUTC(2021, 1, 1); Owner owner = Owner.newOne(); //when boolean result = availabilityFacade.block(resourceId, oneDay, owner); //then assertFalse(result); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM) { String message = Text.removeTags(event.getMessage()); Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message); Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message); Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message); Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message); Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message); Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message); Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message); Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message); Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message); Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message); Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message); Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message); Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message); Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message); Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message); Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message); Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message); Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message); if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE)) { notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered"); } else if (dodgyBreakMatcher.find()) { notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust."); updateDodgyNecklaceCharges(MAX_DODGY_CHARGES); } else if (dodgyCheckMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1))); } else if (dodgyProtectMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1))); } else if (amuletOfChemistryCheckMatcher.find()) { updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1))); } else if (amuletOfChemistryUsedMatcher.find()) { final String match = amuletOfChemistryUsedMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateAmuletOfChemistryCharges(charges); } else if (amuletOfChemistryBreakMatcher.find()) { notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust."); updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES); } else if (amuletOfBountyCheckMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1))); } else if (amuletOfBountyUsedMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1))); } else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT)) { updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES); } else if (message.contains(BINDING_BREAK_TEXT)) { notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1); } else if (bindingNecklaceUsedMatcher.find()) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); if (equipment.contains(ItemID.BINDING_NECKLACE)) { updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1); } } else if (bindingNecklaceCheckMatcher.find()) { final String match = bindingNecklaceCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateBindingNecklaceCharges(charges); } else if (ringOfForgingCheckMatcher.find()) { final String match = ringOfForgingCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateRingOfForgingCharges(charges); } else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player smelted with a Ring of Forging equipped. if (equipment == null) { return; } if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1)) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES); updateRingOfForgingCharges(charges); } } else if (message.equals(RING_OF_FORGING_BREAK_TEXT)) { notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted."); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1); } else if (chronicleAddMatcher.find()) { final String match = chronicleAddMatcher.group(1); if (match.equals("one")) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match)); } } else if (chronicleUseAndCheckMatcher.find()) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1))); } else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0); } else if (message.equals(CHRONICLE_FULL_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000); } else if (slaughterActivateMatcher.find()) { final String found = slaughterActivateMatcher.group(1); if (found == null) { updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT); } else { updateBraceletOfSlaughterCharges(Integer.parseInt(found)); } } else if (slaughterCheckMatcher.find()) { updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1))); } else if (expeditiousActivateMatcher.find()) { final String found = expeditiousActivateMatcher.group(1); if (found == null) { updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT); } else { updateExpeditiousBraceletCharges(Integer.parseInt(found)); } } else if (expeditiousCheckMatcher.find()) { updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1))); } else if (bloodEssenceCheckMatcher.find()) { updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1))); } else if (bloodEssenceExtractMatcher.find()) { updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1))); } else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT)) { updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES); } else if (braceletOfClayCheckMatcher.find()) { updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1))); } else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN)) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player mined with a Bracelet of Clay equipped. if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); // Charge is not used if only 1 inventory slot is available when mining in Prifddinas boolean ignore = inventory != null && inventory.count() == 27 && message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN); if (!ignore) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES); updateBraceletOfClayCharges(charges); } } } else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT)) { notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust"); updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES); } } }
@Test public void testChronicleAddSingleCharge() { ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", CHRONICLE_ADD_SINGLE_CHARGE, "", 0); itemChargePlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_CHRONICLE, 1); }
public static CoderProvider fromStaticMethods(Class<?> rawType, Class<?> coderClazz) { checkArgument( Coder.class.isAssignableFrom(coderClazz), "%s is not a subtype of %s", coderClazz.getName(), Coder.class.getSimpleName()); return new CoderProviderFromStaticMethods(rawType, coderClazz); }
@Test public void testIterableCoderProvider() throws Exception { TypeDescriptor<Iterable<Double>> type = TypeDescriptors.iterables(TypeDescriptors.doubles()); CoderProvider iterableCoderProvider = CoderProviders.fromStaticMethods(Iterable.class, IterableCoder.class); assertEquals( IterableCoder.of(DoubleCoder.of()), iterableCoderProvider.coderFor(type, Arrays.asList(DoubleCoder.of()))); }
@Override public boolean match(Message msg, StreamRule rule) { Double msgVal = getDouble(msg.getField(rule.getField())); if (msgVal == null) { return false; } Double ruleVal = getDouble(rule.getValue()); if (ruleVal == null) { return false; } return rule.getInverted() ^ (msgVal < ruleVal); }
@Test public void testSuccessfulDoubleMatchWithNegativeValue() { StreamRule rule = getSampleRule(); rule.setValue("-54354.42"); Message msg = getSampleMessage(); msg.addField("something", "-90000.12"); StreamRuleMatcher matcher = getMatcher(rule); assertTrue(matcher.match(msg, rule)); }
public static Map<String, Object> compare(byte[] baselineImg, byte[] latestImg, Map<String, Object> options, Map<String, Object> defaultOptions) throws MismatchException { boolean allowScaling = toBool(defaultOptions.get("allowScaling")); ImageComparison imageComparison = new ImageComparison(baselineImg, latestImg, options, allowScaling); imageComparison.configure(defaultOptions); if (imageComparison.baselineMissing) { imageComparison.result.put("isBaselineMissing", true); throw new MismatchException("baseline image was empty or not found", imageComparison.result); } if (imageComparison.scaleMismatch) { imageComparison.result.put("isScaleMismatch", true); throw new MismatchException("latest image dimensions != baseline image dimensions", imageComparison.result); } double mismatchPercentage = 100.0; for (String engine : imageComparison.engines) { double currentMismatchPercentage; switch (engine) { case RESEMBLE: currentMismatchPercentage = imageComparison.execResemble(); break; case SSIM: currentMismatchPercentage = imageComparison.execSSIM(); break; default: logger.error("skipping unsupported image comparison engine: {}", engine); continue; } if (currentMismatchPercentage <= mismatchPercentage) { mismatchPercentage = currentMismatchPercentage; } if (mismatchPercentage < imageComparison.stopWhenMismatchIsLessThan) { break; } } return imageComparison.checkMismatch(mismatchPercentage); }
@Test void testScaleMismatch() { ImageComparison.MismatchException exception = assertThrows(ImageComparison.MismatchException.class, () -> ImageComparison.compare(R_1x1_IMG, R_2x2_IMG, opts(), opts())); assertTrue(exception.getMessage().contains("latest image dimensions != baseline image dimensions")); assertEquals(Boolean.TRUE, exception.data.get("isScaleMismatch")); }
@Override public List<SimpleColumn> toColumns( final ParsedSchema schema, final SerdeFeatures serdeFeatures, final boolean isKey) { SerdeUtils.throwOnUnsupportedFeatures(serdeFeatures, format.supportedFeatures()); Schema connectSchema = connectSrTranslator.toConnectSchema(schema); if (serdeFeatures.enabled(SerdeFeature.UNWRAP_SINGLES)) { connectSchema = SerdeUtils.wrapSingle(connectSchema, isKey); } if (connectSchema.type() != Type.STRUCT) { if (isKey) { throw new IllegalStateException("Key schemas are always unwrapped."); } throw new KsqlException("Schema returned from schema registry is anonymous type. " + "To use this schema with ksqlDB, set '" + CommonCreateConfigs.WRAP_SINGLE_VALUE + "=false' in the WITH clause properties."); } final Schema rowSchema = connectKsqlTranslator.toKsqlSchema(connectSchema); return rowSchema.fields().stream() .map(ConnectFormatSchemaTranslator::toColumn) .collect(Collectors.toList()); }
@Test public void shouldPassConnectSchemaReturnedBySubclassToTranslator() { // When: translator.toColumns(parsedSchema, SerdeFeatures.of(), false); // Then: verify(connectKsqlTranslator).toKsqlSchema(connectSchema); }
@Override public List<Service> getServiceDefinitions() throws MockRepositoryImportException { List<Service> result = new ArrayList<>(); List<Element> interfaceNodes = getConfigDirectChildren(projectElement, "interface"); for (Element interfaceNode : interfaceNodes) { // Filter complete interface definition with name as attribute. if (interfaceNode.getAttribute(NAME_ATTRIBUTE) != null) { log.info("Found a service interface named: {}", interfaceNode.getAttribute(NAME_ATTRIBUTE)); interfaces.put(interfaceNode.getAttribute(NAME_ATTRIBUTE), interfaceNode); serviceInterface = interfaceNode; } } // Try loading definitions from Soap mock services. List<Element> mockServices = getConfigDirectChildren(projectElement, MOCK_SERVICE_TAG); if (!mockServices.isEmpty()) { result.addAll(getSoapServicesDefinitions(mockServices)); } // Then try loading from Rest mock services. List<Element> restMockServices = getConfigDirectChildren(projectElement, REST_MOCK_SERVICE_TAG); if (!restMockServices.isEmpty()) { result.addAll(getRestServicesDefinitions(restMockServices)); } return result; }
@Test void testHelloAPIProjectImport() { SoapUIProjectImporter importer = null; try { importer = new SoapUIProjectImporter( "target/test-classes/io/github/microcks/util/soapui/HelloAPI-soapui-project.xml"); } catch (Exception e) { fail("Exception should not be thrown"); } // Check that basic service properties are there. List<Service> services = null; try { services = importer.getServiceDefinitions(); } catch (MockRepositoryImportException e) { fail("Exception should not be thrown"); } assertEquals(1, services.size()); }
public static GoPluginBundleDescriptor parseXML(InputStream pluginXml, BundleOrPluginFileDetails bundleOrPluginJarFile) throws IOException, JAXBException, XMLStreamException, SAXException { return parseXML(pluginXml, bundleOrPluginJarFile.file().getAbsolutePath(), bundleOrPluginJarFile.extractionLocation(), bundleOrPluginJarFile.isBundledPlugin()); }
@Test void shouldPerformPluginXsdValidationAndFailWhenVersionIsNotPresent() throws IOException { try (InputStream pluginXml = IOUtils.toInputStream("<go-plugin id=\"some\"></go-plugin>", StandardCharsets.UTF_8)) { JAXBException e = assertThrows(JAXBException.class, () -> GoPluginDescriptorParser.parseXML(pluginXml, "/tmp/", new File("/tmp/"), true)); assertTrue(e.getCause().getMessage().contains("Attribute 'version' must appear on element 'go-plugin'"), format("Message not correct: [%s]", e.getCause().getMessage())); } }
@Deprecated public TransMeta getTransMeta( Repository rep, VariableSpace space ) throws KettleException { return getTransMeta( rep, null, space ); }
@Test public void testGetTransMeta() throws KettleException { String param1 = "param1"; String param2 = "param2"; String param3 = "param3"; String parentValue1 = "parentValue1"; String parentValue2 = "parentValue2"; String childValue3 = "childValue3"; JobEntryTrans jobEntryTrans = spy( getJobEntryTrans() ); JobMeta parentJobMeta = spy( new JobMeta() ); when( parentJobMeta.getNamedClusterEmbedManager() ).thenReturn( mock( NamedClusterEmbedManager.class ) ); jobEntryTrans.setParentJobMeta( parentJobMeta); Repository rep = Mockito.mock( Repository.class ); RepositoryDirectory repositoryDirectory = Mockito.mock( RepositoryDirectory.class ); RepositoryDirectoryInterface repositoryDirectoryInterface = Mockito.mock( RepositoryDirectoryInterface.class ); Mockito.doReturn( repositoryDirectoryInterface ).when( rep ).loadRepositoryDirectoryTree(); Mockito.doReturn( repositoryDirectory ).when( repositoryDirectoryInterface ).findDirectory( "/home/admin" ); TransMeta meta = new TransMeta(); meta.setVariable( param2, "childValue2 should be override" ); meta.setVariable( param3, childValue3 ); Mockito.doReturn( meta ).when( rep ) .loadTransformation( Mockito.eq( "test.ktr" ), Mockito.any(), Mockito.any(), Mockito.anyBoolean(), Mockito.any() ); VariableSpace parentSpace = new Variables(); parentSpace.setVariable( param1, parentValue1 ); parentSpace.setVariable( param2, parentValue2 ); jobEntryTrans.setFileName( "/home/admin/test.ktr" ); Mockito.doNothing().when( jobEntryTrans ).logBasic( Mockito.anyString() ); jobEntryTrans.setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME ); TransMeta transMeta; jobEntryTrans.setPassingAllParameters( false ); transMeta = jobEntryTrans.getTransMeta( rep, null, parentSpace ); Assert.assertEquals( null, transMeta.getVariable( param1 ) ); Assert.assertEquals( parentValue2, transMeta.getVariable( param2 ) ); Assert.assertEquals( childValue3, transMeta.getVariable( param3 ) ); jobEntryTrans.setPassingAllParameters( true ); transMeta = jobEntryTrans.getTransMeta( rep, null, parentSpace ); Assert.assertEquals( parentValue1, transMeta.getVariable( param1 ) ); Assert.assertEquals( parentValue2, transMeta.getVariable( param2 ) ); Assert.assertEquals( childValue3, transMeta.getVariable( param3 ) ); }
public static UserAgent parse(String userAgentString) { return UserAgentParser.parse(userAgentString); }
@Test public void parseWindows10WithIeMobileLumia520Test() { final String uaStr = "Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 520) like iPhone OS 7_0_3 Mac OS X AppleWebKit/537 (KHTML, like Gecko) Mobile Safari/537 "; final UserAgent ua = UserAgentUtil.parse(uaStr); assertEquals("IEMobile", ua.getBrowser().toString()); assertEquals("11.0", ua.getVersion()); assertEquals("Trident", ua.getEngine().toString()); assertEquals("7.0", ua.getEngineVersion()); assertEquals("Windows Phone", ua.getOs().toString()); assertEquals("8.1", ua.getOsVersion()); assertEquals("Windows Phone", ua.getPlatform().toString()); assertTrue(ua.isMobile()); }
@ApiOperation(value = "Create Device (saveDevice) with credentials ", notes = "Create or update the Device. When creating device, platform generates Device Id as " + UUID_WIKI_LINK + "Requires to provide the Device Credentials object as well as an existing device profile ID or use \"default\".\n" + "You may find the example of device with different type of credentials below: \n\n" + "- Credentials type: <b>\"Access token\"</b> with <b>device profile ID</b> below: \n\n" + DEVICE_WITH_DEVICE_CREDENTIALS_PARAM_ACCESS_TOKEN_DESCRIPTION_MARKDOWN + "\n\n" + "- Credentials type: <b>\"Access token\"</b> with <b>device profile default</b> below: \n\n" + DEVICE_WITH_DEVICE_CREDENTIALS_PARAM_ACCESS_TOKEN_DEFAULT_DESCRIPTION_MARKDOWN + "\n\n" + "- Credentials type: <b>\"X509\"</b> with <b>device profile ID</b> below: \n\n" + "Note: <b>credentialsId</b> - format <b>Sha3Hash</b>, <b>certificateValue</b> - format <b>PEM</b> (with \"--BEGIN CERTIFICATE----\" and -\"----END CERTIFICATE-\").\n\n" + DEVICE_WITH_DEVICE_CREDENTIALS_PARAM_X509_CERTIFICATE_DESCRIPTION_MARKDOWN + "\n\n" + "- Credentials type: <b>\"MQTT_BASIC\"</b> with <b>device profile ID</b> below: \n\n" + DEVICE_WITH_DEVICE_CREDENTIALS_PARAM_MQTT_BASIC_DESCRIPTION_MARKDOWN + "\n\n" + "- You may find the example of <b>LwM2M</b> device and <b>RPK</b> credentials below: \n\n" + "Note: LwM2M device - only existing device profile ID (Transport configuration -> Transport type: \"LWM2M\".\n\n" + DEVICE_WITH_DEVICE_CREDENTIALS_PARAM_LVM2M_RPK_DESCRIPTION_MARKDOWN + "\n\n" + "Remove 'id', 'tenantId' and optionally 'customerId' from the request body example (below) to create new Device entity. " + TENANT_OR_CUSTOMER_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/device-with-credentials", method = RequestMethod.POST) @ResponseBody public Device saveDeviceWithCredentials(@Parameter(description = "The JSON object with device and credentials. See method description above for example.") @Valid @RequestBody SaveDeviceWithCredentialsRequest deviceAndCredentials) throws ThingsboardException { Device device = deviceAndCredentials.getDevice(); DeviceCredentials credentials = deviceAndCredentials.getCredentials(); device.setTenantId(getCurrentUser().getTenantId()); checkEntity(device.getId(), device, Resource.DEVICE); return tbDeviceService.saveDeviceWithCredentials(device, credentials, getCurrentUser()); }
@Test public void testSaveDeviceWithCredentials() throws Exception { String testToken = "TEST_TOKEN"; Device device = new Device(); device.setName("My device"); device.setType("default"); DeviceCredentials deviceCredentials = new DeviceCredentials(); deviceCredentials.setCredentialsType(DeviceCredentialsType.ACCESS_TOKEN); deviceCredentials.setCredentialsId(testToken); SaveDeviceWithCredentialsRequest saveRequest = new SaveDeviceWithCredentialsRequest(device, deviceCredentials); Mockito.reset(tbClusterService, auditLogService, gatewayNotificationsService); Device savedDevice = readResponse(doPost("/api/device-with-credentials", saveRequest).andExpect(status().isOk()), Device.class); Device oldDevice = new Device(savedDevice); testNotifyEntityAllOneTime(savedDevice, savedDevice.getId(), savedDevice.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.ADDED); testNotificationUpdateGatewayNever(); Assert.assertNotNull(savedDevice); Assert.assertNotNull(savedDevice.getId()); Assert.assertTrue(savedDevice.getCreatedTime() > 0); Assert.assertEquals(savedTenant.getId(), savedDevice.getTenantId()); Assert.assertNotNull(savedDevice.getCustomerId()); Assert.assertEquals(NULL_UUID, savedDevice.getCustomerId().getId()); Assert.assertEquals(device.getName(), savedDevice.getName()); DeviceCredentials foundDeviceCredentials = doGet("/api/device/" + savedDevice.getId().getId() + "/credentials", DeviceCredentials.class); Assert.assertNotNull(foundDeviceCredentials); Assert.assertNotNull(foundDeviceCredentials.getId()); Assert.assertEquals(savedDevice.getId(), foundDeviceCredentials.getDeviceId()); Assert.assertEquals(DeviceCredentialsType.ACCESS_TOKEN, foundDeviceCredentials.getCredentialsType()); Assert.assertEquals(testToken, foundDeviceCredentials.getCredentialsId()); Mockito.reset(tbClusterService, auditLogService, gatewayNotificationsService); savedDevice.setName("My new device"); savedDevice = doPost("/api/device", savedDevice, Device.class); testNotifyEntityAllOneTime(savedDevice, savedDevice.getId(), savedDevice.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.UPDATED); testNotificationUpdateGatewayOneTime(savedDevice, oldDevice); }
public static DeletionTask convertProtoToDeletionTask( DeletionServiceDeleteTaskProto proto, DeletionService deletionService) { int taskId = proto.getId(); if (proto.hasTaskType() && proto.getTaskType() != null) { if (proto.getTaskType().equals(DeletionTaskType.FILE.name())) { LOG.debug("Converting recovered FileDeletionTask"); return convertProtoToFileDeletionTask(proto, deletionService, taskId); } else if (proto.getTaskType().equals( DeletionTaskType.DOCKER_CONTAINER.name())) { LOG.debug("Converting recovered DockerContainerDeletionTask"); return convertProtoToDockerContainerDeletionTask(proto, deletionService, taskId); } } LOG.debug("Unable to get task type, trying FileDeletionTask"); return convertProtoToFileDeletionTask(proto, deletionService, taskId); }
@Test public void testConvertProtoToDeletionTask() throws Exception { DeletionService deletionService = mock(DeletionService.class); DeletionServiceDeleteTaskProto.Builder protoBuilder = DeletionServiceDeleteTaskProto.newBuilder(); int id = 0; protoBuilder.setId(id); DeletionServiceDeleteTaskProto proto = protoBuilder.build(); DeletionTask deletionTask = NMProtoUtils.convertProtoToDeletionTask(proto, deletionService); assertEquals(DeletionTaskType.FILE, deletionTask.getDeletionTaskType()); assertEquals(id, deletionTask.getTaskId()); }
public synchronized boolean insertDocument(String tableName, Map<String, Object> document) { return insertDocuments(tableName, ImmutableList.of(document)); }
@Test public void testInsertDocumentsShouldThrowErrorWhenCassandraThrowsException() { doThrow(RejectedExecutionException.class) .when(cassandraClient) .execute(any(SimpleStatement.class)); assertThrows( CassandraResourceManagerException.class, () -> testManager.insertDocument(COLLECTION_NAME, new HashMap<>())); }
public final void fail() { metadata().fail(ImmutableList.<Fact>of()); }
@Test public void failNoMessage() { expectFailure.whenTesting().fail(); assertThatFailure().hasMessageThat().isEmpty(); }
public static Config merge(Config config, Config fallback) { var root1 = config.root(); var root2 = fallback.root(); var origin = new ContainerConfigOrigin(config.origin(), fallback.origin()); var path = ConfigValuePath.root(); var newRoot = mergeObjects(origin, path, root1, root2); return new SimpleConfig(origin, newRoot); }
@Test void testSubobjectsMerge() { var config1 = MapConfigFactory.fromMap(Map.of( "field1", Map.of( "f1", "v1", "f2", "v2" ) )); var config2 = MapConfigFactory.fromMap(Map.of( "field1", Map.of( "f2", "v3", "f3", "v4" ) )); var config = MergeConfigFactory.merge(config1, config2); assertThat(config.get(ConfigValuePath.root().child("field1").child("f1"))) .isInstanceOf(ConfigValue.StringValue.class) .hasFieldOrPropertyWithValue("value", "v1"); assertThat(config.get(ConfigValuePath.root().child("field1").child("f2"))) .isInstanceOf(ConfigValue.StringValue.class) .hasFieldOrPropertyWithValue("value", "v2"); assertThat(config.get(ConfigValuePath.root().child("field1").child("f3"))) .isInstanceOf(ConfigValue.StringValue.class) .hasFieldOrPropertyWithValue("value", "v4"); }
public static NamenodeRole convert(NamenodeRoleProto role) { switch (role) { case NAMENODE: return NamenodeRole.NAMENODE; case BACKUP: return NamenodeRole.BACKUP; case CHECKPOINT: return NamenodeRole.CHECKPOINT; } return null; }
@Test public void TestConvertDatanodeStorage() { DatanodeStorage dns1 = new DatanodeStorage( "id1", DatanodeStorage.State.NORMAL, StorageType.SSD); DatanodeStorageProto proto = PBHelperClient.convert(dns1); DatanodeStorage dns2 = PBHelperClient.convert(proto); compare(dns1, dns2); }
@Override public Mono<CategoryVo> getByName(String name) { return client.fetch(Category.class, name) .map(CategoryVo::from); }
@Test void getByName() throws JSONException { when(client.fetch(eq(Category.class), eq("hello"))) .thenReturn(Mono.just(category())); CategoryVo categoryVo = categoryFinder.getByName("hello").block(); categoryVo.getMetadata().setCreationTimestamp(null); JSONAssert.assertEquals(""" { "metadata": { "name": "hello", "annotations": { "K1": "V1" } }, "spec": { "displayName": "displayName-1", "slug": "slug-1", "description": "description-1", "cover": "cover-1", "template": "template-1", "priority": 0, "children": [ "C1", "C2" ], "preventParentPostCascadeQuery": false, "hideFromList": false } } """, JsonUtils.objectToJson(categoryVo), true); }
@Override public ParseResult parsePath(String path) { String original = path; path = path.replace('/', '\\'); if (WORKING_DIR_WITH_DRIVE.matcher(path).matches()) { throw new InvalidPathException( original, "Jimfs does not currently support the Windows syntax for a relative path " + "on a specific drive (e.g. \"C:foo\\bar\")"); } String root; if (path.startsWith("\\\\")) { root = parseUncRoot(path, original); } else if (path.startsWith("\\")) { throw new InvalidPathException( original, "Jimfs does not currently support the Windows syntax for an absolute path " + "on the current drive (e.g. \"\\foo\\bar\")"); } else { root = parseDriveRoot(path); } // check for root.length() > 3 because only "C:\" type roots are allowed to have : int startIndex = root == null || root.length() > 3 ? 0 : root.length(); for (int i = startIndex; i < path.length(); i++) { char c = path.charAt(i); if (isReserved(c)) { throw new InvalidPathException(original, "Illegal char <" + c + ">", i); } } Matcher trailingSpaceMatcher = TRAILING_SPACES.matcher(path); if (trailingSpaceMatcher.find()) { throw new InvalidPathException(original, "Trailing char < >", trailingSpaceMatcher.start()); } if (root != null) { path = path.substring(root.length()); if (!root.endsWith("\\")) { root = root + "\\"; } } return new ParseResult(root, splitter().split(path)); }
@Test public void testWindows_uncPaths() { PathType windows = PathType.windows(); PathType.ParseResult path = windows.parsePath("\\\\host\\share"); assertParseResult(path, "\\\\host\\share\\"); path = windows.parsePath("\\\\HOST\\share\\foo\\bar"); assertParseResult(path, "\\\\HOST\\share\\", "foo", "bar"); try { windows.parsePath("\\\\"); fail(); } catch (InvalidPathException expected) { assertThat(expected.getInput()).isEqualTo("\\\\"); assertThat(expected.getReason()).isEqualTo("UNC path is missing hostname"); } try { windows.parsePath("\\\\host"); fail(); } catch (InvalidPathException expected) { assertThat(expected.getInput()).isEqualTo("\\\\host"); assertThat(expected.getReason()).isEqualTo("UNC path is missing sharename"); } try { windows.parsePath("\\\\host\\"); fail(); } catch (InvalidPathException expected) { assertThat(expected.getInput()).isEqualTo("\\\\host\\"); assertThat(expected.getReason()).isEqualTo("UNC path is missing sharename"); } try { windows.parsePath("//host"); fail(); } catch (InvalidPathException expected) { assertThat(expected.getInput()).isEqualTo("//host"); assertThat(expected.getReason()).isEqualTo("UNC path is missing sharename"); } }
public QueryParseResult parse(String sql, @Nonnull SqlSecurityContext ssc) { try { return parse0(sql, ssc); } catch (QueryException e) { throw e; } catch (Exception e) { String message; // Check particular type of exception which causes typical long multiline error messages. if (e instanceof SqlParseException && e.getCause() instanceof ParseException) { message = trimMessage(e.getMessage()); } else { message = e.getMessage(); } throw QueryException.error(SqlErrorCode.PARSING, message, e); } }
@Test public void when_multipleStatements_then_fails() { assertThatThrownBy(() -> parser.parse("SELECT * FROM t; SELECT * FROM t")) .isInstanceOf(QueryException.class) .hasMessage("The command must contain a single statement"); }
@Override public List<User> loadByIds(Collection<String> ids) { final HashSet<String> userIds = new HashSet<>(ids); final List<User> users = new ArrayList<>(); // special case for the locally defined user, we don't store that in MongoDB. if (!configuration.isRootUserDisabled() && userIds.stream().anyMatch(UserImpl.LocalAdminUser.LOCAL_ADMIN_ID::equals)) { // The local admin ID is not a valid ObjectId so we have to remove it from the query userIds.remove(UserImpl.LocalAdminUser.LOCAL_ADMIN_ID); users.add(userFactory.createLocalAdminUser(roleService.getAdminRoleObjectId())); } final DBObject query = new BasicDBObject(); query.put("_id", new BasicDBObject("$in", userIds.stream().map(ObjectId::new).collect(Collectors.toSet()))); final List<DBObject> result = query(UserImpl.class, query); if (result == null || result.isEmpty()) { return users; } for (final DBObject dbObject : result) { //noinspection unchecked users.add(userFactory.create((ObjectId) dbObject.get("_id"), dbObject.toMap())); } return users; }
@Test @MongoDBFixtures("UserServiceImplTest.json") public void testLoadByUserIds() throws Exception { final List<User> users = userService.loadByIds(ImmutableSet.of( "54e3deadbeefdeadbeef0001", "54e3deadbeefdeadbeef0002", UserImpl.LocalAdminUser.LOCAL_ADMIN_ID )); assertThat(users).hasSize(3); assertThat(users.get(0).getId()).isEqualTo("local:admin"); assertThat(users.get(0).getName()).isEqualTo("admin"); assertThat(users.get(0).getEmail()).isEmpty(); assertThat(users.get(1).getId()).isEqualTo("54e3deadbeefdeadbeef0001"); assertThat(users.get(1).getName()).isEqualTo("user1"); assertThat(users.get(1).getEmail()).isEqualTo("user1@example.com"); assertThat(users.get(2).getId()).isEqualTo("54e3deadbeefdeadbeef0002"); assertThat(users.get(2).getName()).isEqualTo("user2"); assertThat(users.get(2).getEmail()).isEqualTo("user2@example.com"); }
public double distanceToAsDouble(final IGeoPoint other) { final double lat1 = DEG2RAD * getLatitude(); final double lat2 = DEG2RAD * other.getLatitude(); final double lon1 = DEG2RAD * getLongitude(); final double lon2 = DEG2RAD * other.getLongitude(); return RADIUS_EARTH_METERS * 2 * Math.asin(Math.min(1, Math.sqrt( Math.pow(Math.sin((lat2 - lat1) / 2), 2) + Math.cos(lat1) * Math.cos(lat2) * Math.pow(Math.sin((lon2 - lon1) / 2), 2) ))); }
@Test public void test_distanceTo_Parallels() { final double ratioDelta = 1E-5; final int iterations = 100; for (int i = 0; i < iterations; i++) { final double latitude = getRandomLatitude(); final double longitude1 = getRandomLongitude(); final double longitude2 = getRandomLongitude(); final GeoPoint target = new GeoPoint(latitude, longitude1); final GeoPoint other = new GeoPoint(latitude, longitude2); final double diff = getCleanLongitudeDiff(longitude1, longitude2); final double expected = GeoConstants.RADIUS_EARTH_METERS * 2 * Math.asin( Math.cos(latitude * MathConstants.DEG2RAD) * Math.sin(diff * MathConstants.DEG2RAD / 2)); if (expected < minimumDistance) { continue; } final double delta = expected * ratioDelta; assertEquals("distance between " + target + " and " + other, expected, target.distanceToAsDouble(other), delta); } }
public static String describe(List<org.apache.iceberg.expressions.Expression> exprs) { return exprs.stream().map(Spark3Util::describe).collect(Collectors.joining(", ")); }
@Test public void testDescribeSortOrder() { Schema schema = new Schema( required(1, "data", Types.StringType.get()), required(2, "time", Types.TimestampType.withoutZone())); Assert.assertEquals( "Sort order isn't correct.", "data DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("Identity", schema, 1))); Assert.assertEquals( "Sort order isn't correct.", "bucket(1, data) DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("bucket[1]", schema, 1))); Assert.assertEquals( "Sort order isn't correct.", "truncate(data, 3) DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("truncate[3]", schema, 1))); Assert.assertEquals( "Sort order isn't correct.", "years(time) DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("year", schema, 2))); Assert.assertEquals( "Sort order isn't correct.", "months(time) DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("month", schema, 2))); Assert.assertEquals( "Sort order isn't correct.", "days(time) DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("day", schema, 2))); Assert.assertEquals( "Sort order isn't correct.", "hours(time) DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("hour", schema, 2))); Assert.assertEquals( "Sort order isn't correct.", "unknown(data) DESC NULLS FIRST", Spark3Util.describe(buildSortOrder("unknown", schema, 1))); // multiple sort orders SortOrder multiOrder = SortOrder.builderFor(schema).asc("time", NULLS_FIRST).asc("data", NULLS_LAST).build(); Assert.assertEquals( "Sort order isn't correct.", "time ASC NULLS FIRST, data ASC NULLS LAST", Spark3Util.describe(multiOrder)); }
public List<Ce.Task> formatQueue(DbSession dbSession, List<CeQueueDto> dtos) { DtoCache cache = DtoCache.forQueueDtos(dbClient, dbSession, dtos); return dtos.stream().map(input -> formatQueue(input, cache)).toList(); }
@Test public void formatQueues() { CeQueueDto dto1 = new CeQueueDto(); dto1.setUuid("UUID1"); dto1.setTaskType("TYPE1"); dto1.setStatus(CeQueueDto.Status.IN_PROGRESS); dto1.setCreatedAt(1_450_000_000_000L); CeQueueDto dto2 = new CeQueueDto(); dto2.setUuid("UUID2"); dto2.setTaskType("TYPE2"); dto2.setStatus(CeQueueDto.Status.PENDING); dto2.setCreatedAt(1_451_000_000_000L); Iterable<Ce.Task> wsTasks = underTest.formatQueue(db.getSession(), asList(dto1, dto2)); assertThat(wsTasks).extracting("id").containsExactly("UUID1", "UUID2"); }
static Function<SeaTunnelRow, SeaTunnelRow> createKeyExtractor(int[] pkFields) { return row -> { Object[] fields = new Object[pkFields.length]; for (int i = 0; i < pkFields.length; i++) { fields[i] = row.getField(pkFields[i]); } SeaTunnelRow newRow = new SeaTunnelRow(fields); newRow.setTableId(row.getTableId()); return newRow; }; }
@Test public void testKeyExtractor() { SeaTunnelRowType rowType = new SeaTunnelRowType( new String[] {"id", "name", "age"}, new SeaTunnelDataType[] { BasicType.INT_TYPE, BasicType.STRING_TYPE, BasicType.INT_TYPE }); SeaTunnelRowType pkType = new SeaTunnelRowType( new String[] {"id"}, new SeaTunnelDataType[] {BasicType.INT_TYPE}); int[] pkFields = Arrays.stream(pkType.getFieldNames()).mapToInt(rowType::indexOf).toArray(); SeaTunnelRow insertRow = new SeaTunnelRow(new Object[] {1, "a", 60}); insertRow.setTableId("test"); insertRow.setRowKind(RowKind.INSERT); SeaTunnelRow updateBefore = new SeaTunnelRow(new Object[] {1, "a"}); updateBefore.setTableId("test"); updateBefore.setRowKind(RowKind.UPDATE_BEFORE); SeaTunnelRow updateAfter = new SeaTunnelRow(new Object[] {1, "b"}); updateAfter.setTableId("test"); updateAfter.setRowKind(RowKind.UPDATE_AFTER); SeaTunnelRow deleteRow = new SeaTunnelRow(new Object[] {1}); deleteRow.setTableId("test"); deleteRow.setRowKind(RowKind.DELETE); Function<SeaTunnelRow, SeaTunnelRow> keyExtractor = JdbcOutputFormatBuilder.createKeyExtractor(pkFields); keyExtractor.apply(insertRow); Assertions.assertEquals(keyExtractor.apply(insertRow), keyExtractor.apply(insertRow)); Assertions.assertEquals(keyExtractor.apply(insertRow), keyExtractor.apply(updateBefore)); Assertions.assertEquals(keyExtractor.apply(insertRow), keyExtractor.apply(updateAfter)); Assertions.assertEquals(keyExtractor.apply(insertRow), keyExtractor.apply(deleteRow)); updateBefore.setTableId("test1"); Assertions.assertNotEquals(keyExtractor.apply(insertRow), keyExtractor.apply(updateBefore)); updateAfter.setField(0, "2"); Assertions.assertNotEquals(keyExtractor.apply(insertRow), keyExtractor.apply(updateAfter)); }
public Optional<EventProcessorStateDto> setState(EventProcessorStateDto dto) { return setState(dto.eventDefinitionId(), dto.minProcessedTimestamp(), dto.maxProcessedTimestamp()); }
@Test public void persistence() { final DateTime now = DateTime.now(DateTimeZone.UTC); final DateTime min = now.minusHours(1); final DateTime max = now; final EventProcessorStateDto stateDto = EventProcessorStateDto.builder() .eventDefinitionId("abc123") .minProcessedTimestamp(min) .maxProcessedTimestamp(max) .build(); assertThat(stateService.setState(stateDto)).isPresent().get().satisfies(dto -> { assertThat(dto.id()).isNotBlank(); assertThat(dto.eventDefinitionId()).isEqualTo("abc123"); assertThat(dto.minProcessedTimestamp()).isEqualTo(min); assertThat(dto.maxProcessedTimestamp()).isEqualTo(max); }); assertThatThrownBy(() -> stateService.setState("", min, max)) .hasMessageContaining("eventDefinitionId") .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> stateService.setState(null, min, max)) .hasMessageContaining("eventDefinitionId") .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> stateService.setState("a", null, max)) .hasMessageContaining("minProcessedTimestamp") .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> stateService.setState("a", min, null)) .hasMessageContaining("maxProcessedTimestamp") .isInstanceOf(IllegalArgumentException.class); // A max timestamp that is older than the min timestamp is an error! (e.g. mixing up arguments) assertThatThrownBy(() -> stateService.setState("a", max, min)) .hasMessageContaining("minProcessedTimestamp") .hasMessageContaining("maxProcessedTimestamp") .isInstanceOf(IllegalArgumentException.class); }
protected Object getValidJMSHeaderValue(String headerName, Object headerValue) { if (headerValue instanceof String) { return headerValue; } else if (headerValue instanceof BigInteger) { return headerValue.toString(); } else if (headerValue instanceof BigDecimal) { return headerValue.toString(); } else if (headerValue instanceof Number) { return headerValue; } else if (headerValue instanceof Character) { return headerValue; } else if (headerValue instanceof CharSequence) { return headerValue.toString(); } else if (headerValue instanceof Boolean) { return headerValue; } else if (headerValue instanceof Date) { if (this.endpoint.getConfiguration().isFormatDateHeadersToIso8601()) { return ZonedDateTime.ofInstant(((Date) headerValue).toInstant(), ZoneOffset.UTC).toString(); } else { return headerValue.toString(); } } return null; }
@Test public void testGetValidJmsHeaderValueWithDateShouldSucceed() { Object value = jmsBindingUnderTest.getValidJMSHeaderValue("foo", Date.from(instant)); assertNotNull(value); // We can't assert further as the returned value is bound to the machine time zone and locale }
@Override public ItemChangeSets resolve(long namespaceId, String configText, List<ItemDTO> baseItems) { Map<Integer, ItemDTO> oldLineNumMapItem = BeanUtils.mapByKey("lineNum", baseItems); Map<String, ItemDTO> oldKeyMapItem = BeanUtils.mapByKey("key", baseItems); //remove comment and blank item map. oldKeyMapItem.remove(""); String[] newItems = configText.split(ITEM_SEPARATOR); Set<String> repeatKeys = new HashSet<>(); if (isHasRepeatKey(newItems, repeatKeys)) { throw new BadRequestException("Config text has repeated keys: %s, please check your input.", repeatKeys); } ItemChangeSets changeSets = new ItemChangeSets(); Map<Integer, String> newLineNumMapItem = new HashMap<>();//use for delete blank and comment item int lineCounter = 1; for (String newItem : newItems) { newItem = newItem.trim(); newLineNumMapItem.put(lineCounter, newItem); ItemDTO oldItemByLine = oldLineNumMapItem.get(lineCounter); //comment item if (isCommentItem(newItem)) { handleCommentLine(namespaceId, oldItemByLine, newItem, lineCounter, changeSets); //blank item } else if (isBlankItem(newItem)) { handleBlankLine(namespaceId, oldItemByLine, lineCounter, changeSets); //normal item } else { handleNormalLine(namespaceId, oldKeyMapItem, newItem, lineCounter, changeSets); } lineCounter++; } deleteCommentAndBlankItem(oldLineNumMapItem, newLineNumMapItem, changeSets); deleteNormalKVItem(oldKeyMapItem, changeSets); return changeSets; }
@Test public void testUpdateItem() { ItemChangeSets changeSets = resolver.resolve(1, "a=d", mockBaseItemHas3Key()); List<ItemDTO> updateItems = changeSets.getUpdateItems(); Assert.assertEquals(1, updateItems.size()); Assert.assertEquals("d", updateItems.get(0).getValue()); }
@Override public boolean authorize(P principal, String role, @Nullable ContainerRequestContext requestContext) { try (Timer.Context context = getsTimer.time()) { final AuthorizationContext<P> cacheKey = getAuthorizationContext(principal, role, requestContext); return Boolean.TRUE.equals(cache.get(cacheKey)); } catch (CompletionException e) { Throwable cause = e.getCause(); if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } if (cause instanceof Error) { throw (Error) cause; } throw e; } }
@Test void cachesTheFirstReturnedPrincipal() throws Exception { assertThat(cached.authorize(principal, role, requestContext)).isTrue(); assertThat(cached.authorize(principal, role, requestContext)).isTrue(); verify(underlying, times(1)).authorize(principal, role, requestContext); }
public MetricSampleAggregationResult<String, PartitionEntity> aggregate(Cluster cluster, long now, OperationProgress operationProgress) throws NotEnoughValidWindowsException { ModelCompletenessRequirements requirements = new ModelCompletenessRequirements(1, 0.0, false); return aggregate(cluster, -1L, now, requirements, operationProgress); }
@Test public void testTooManyFlaws() throws NotEnoughValidWindowsException { KafkaCruiseControlConfig config = new KafkaCruiseControlConfig(getLoadMonitorProperties()); Metadata metadata = getMetadata(Collections.singleton(TP)); KafkaPartitionMetricSampleAggregator metricSampleAggregator = new KafkaPartitionMetricSampleAggregator(config, metadata); // Only give two samples to the aggregator. CruiseControlUnitTestUtils.populateSampleAggregator(NUM_WINDOWS - 2, MIN_SAMPLES_PER_WINDOW, metricSampleAggregator, PE, 3, WINDOW_MS, KafkaMetricDef.commonMetricDef()); MetricSampleAggregationResult<String, PartitionEntity> result = metricSampleAggregator.aggregate(metadata.fetch(), NUM_WINDOWS * WINDOW_MS, new OperationProgress()); // Partition "topic-0" is expected to be a valid partition in result, with valid sample values collected for window [1, NUM_WINDOW - 3]. assertEquals(NUM_WINDOWS - 3, result.valuesAndExtrapolations().get(PE).windows().size()); }
@Override public AuthorityRuleConfiguration swapToObject(final YamlAuthorityRuleConfiguration yamlConfig) { Collection<ShardingSphereUser> users = yamlConfig.getUsers().stream().map(userSwapper::swapToObject).collect(Collectors.toList()); AlgorithmConfiguration provider = algorithmSwapper.swapToObject(yamlConfig.getPrivilege()); if (null == provider) { provider = new DefaultAuthorityRuleConfigurationBuilder().build().getPrivilegeProvider(); } Map<String, AlgorithmConfiguration> authenticators = yamlConfig.getAuthenticators().entrySet().stream() .collect(Collectors.toMap(Entry::getKey, entry -> algorithmSwapper.swapToObject(entry.getValue()))); return new AuthorityRuleConfiguration(users, provider, authenticators, yamlConfig.getDefaultAuthenticator()); }
@Test void assertSwapToObject() { YamlAuthorityRuleConfiguration authorityRuleConfig = new YamlAuthorityRuleConfiguration(); authorityRuleConfig.setUsers(Collections.singletonList(getYamlUser())); authorityRuleConfig.setPrivilege(createYamlAlgorithmConfiguration()); authorityRuleConfig.setDefaultAuthenticator("scram_sha256"); authorityRuleConfig.setAuthenticators(Collections.singletonMap("md5", createYamlAlgorithmConfiguration())); AuthorityRuleConfiguration actual = swapper.swapToObject(authorityRuleConfig); assertThat(actual.getUsers().size(), is(1)); assertNotNull(actual.getPrivilegeProvider()); assertThat(actual.getDefaultAuthenticator(), is("scram_sha256")); assertThat(actual.getAuthenticators().size(), is(1)); }
public static void getSemanticPropsSingleFromString( SingleInputSemanticProperties result, String[] forwarded, String[] nonForwarded, String[] readSet, TypeInformation<?> inType, TypeInformation<?> outType) { getSemanticPropsSingleFromString( result, forwarded, nonForwarded, readSet, inType, outType, false); }
@Test void testReadFieldsPojo() { String[] readFields = {"int2; string1"}; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, null, readFields, pojoType, threeIntTupleType); FieldSet fs = sp.getReadFields(0); assertThat(fs).containsExactly(1, 3); readFields[0] = "*"; sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, null, readFields, pojoType, intType); fs = sp.getReadFields(0); assertThat(fs).containsExactly(0, 1, 2, 3); }
@Override public boolean checkCredentials(String username, String password) { if (username == null || password == null) { return false; } Credentials credentials = new Credentials(username, password); if (validCredentialsCache.contains(credentials)) { return true; } else if (invalidCredentialsCache.contains(credentials)) { return false; } boolean isValid = this.username.equals(username) && this.passwordHash.equals( generatePasswordHash(algorithm, salt, password)); if (isValid) { validCredentialsCache.add(credentials); } else { invalidCredentialsCache.add(credentials); } return isValid; }
@Test public void test_upperCase() throws Exception { String[] algorithms = new String[] {"SHA-1", "SHA-256", "SHA-512"}; for (String algorithm : algorithms) { String hash = hash(algorithm, VALID_PASSWORD, SALT).toUpperCase(); MessageDigestAuthenticator messageDigestAuthenticator = new MessageDigestAuthenticator("/", VALID_USERNAME, hash, algorithm, SALT); for (String username : TEST_USERNAMES) { for (String password : TEST_PASSWORDS) { boolean expectedIsAuthenticated = VALID_USERNAME.equals(username) && VALID_PASSWORD.equals(password); boolean actualIsAuthenticated = messageDigestAuthenticator.checkCredentials(username, password); assertEquals(expectedIsAuthenticated, actualIsAuthenticated); } } } }
@VisibleForTesting static List<Reporter> getReporters() { return self.reporters; }
@Test public void allReportersHiveConfig() throws Exception { String jsonFile = System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "TestMetricsOutput.json"; Configuration conf = MetastoreConf.newMetastoreConf(); conf.set(MetastoreConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES.getHiveName(), "org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter," + "org.apache.hadoop.hive.common.metrics.metrics2.JmxMetricsReporter," + "org.apache.hadoop.hive.common.metrics.metrics2.ConsoleMetricsReporter," + "org.apache.hadoop.hive.common.metrics.metrics2.Metrics2Reporter"); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile); initializeMetrics(conf); Assert.assertEquals(4, Metrics.getReporters().size()); }
@Override public void processWatermark(Instant watermark, OpEmitter<OutT> emitter) { // propagate watermark immediately if no bundle is in progress and all the previous bundles have // completed. if (shouldProcessWatermark()) { LOG.debug("Propagating watermark: {} directly since no bundle in progress.", watermark); bundleProgressListener.onWatermark(watermark, emitter); return; } // hold back the watermark since there is either a bundle in progress or previously closed // bundles are unfinished. this.bundleWatermarkHold = watermark; }
@Test public void testProcessWatermarkWhenBundleNotStarted() { Instant watermark = new Instant(); portableBundleManager = new PortableBundleManager<>(bundleProgressListener, 4, 1, bundleTimerScheduler, TIMER_ID); portableBundleManager.processWatermark(watermark, emitter); verify(bundleProgressListener, times(1)).onWatermark(eq(watermark), eq(emitter)); }
@Override public void childEntriesWillBecomeVisible(final Entry submenu) { UserRole userRole = currentUserRole(); childEntriesWillBecomeVisible(submenu, userRole); }
@Test public void activatesSelectOnPopup_forCheckSelectionOnPopup() { Entry menuEntry = new Entry(); Entry actionEntry = new Entry(); menuEntry.addChild(actionEntry); final AFreeplaneAction someAction = Mockito.mock(AFreeplaneAction.class); when(someAction.checkSelectionOnPopup()).thenReturn(true); when(someAction.isEnabled()).thenReturn(true); new EntryAccessor().setAction(actionEntry, someAction); final ActionStatusUpdater actionSelectListener = new ActionStatusUpdater(); actionSelectListener.childEntriesWillBecomeVisible(menuEntry, UserRole.EDITOR); verify(someAction).setSelected(); }
public Properties apply(final Properties properties) { if (properties == null) { throw new IllegalArgumentException("properties must not be null"); } else { if (properties.isEmpty()) { return new Properties(); } else { final Properties filtered = new Properties(); for (Map.Entry<Object, Object> entry : properties.entrySet()) { final Object key = entry.getKey(); final Object value = entry.getValue(); if (!keysToRemove.contains(key)) { filtered.put(key, value); } } return filtered; } } }
@Test public void filtersMatchingKey() { // Given Properties properties = new Properties(); properties.put("one", 1); properties.put("two", 2); Set<String> removeOneKey = new HashSet<>(); removeOneKey.add("one"); Filter f = new Filter(removeOneKey); // When Properties filtered = f.apply(properties); // Then assertEquals(properties.size() - 1, filtered.size()); assertFalse(filtered.containsKey("one")); assertTrue(filtered.containsKey("two")); assertEquals(properties.get("two"), filtered.get("two")); }
public Rule<ProjectNode> projectNodeRule() { return new PullUpExpressionInLambdaProjectNodeRule(); }
@Test public void testRegexpLikeExpression() { tester().assertThat(new PullUpExpressionInLambdaRules(getFunctionManager()).projectNodeRule()) .setSystemProperty(PULL_EXPRESSION_FROM_LAMBDA_ENABLED, "true") .on(p -> { p.variable("expr", new ArrayType(BOOLEAN)); p.variable("col", VARCHAR); p.variable("arr1", new ArrayType(VARCHAR)); return p.project( Assignments.builder().put(p.variable("expr", new ArrayType(BOOLEAN)), p.rowExpression("transform(arr1, x-> regexp_like(x, concat(col, 'a')))")).build(), p.values(p.variable("arr1", new ArrayType(VARCHAR)), p.variable("col", VARCHAR))); }) .matches( project( ImmutableMap.of("expr", expression("transform(arr1, x -> regexp_like(x, concat_1))")), project( ImmutableMap.of("concat_1", expression("concat(col, 'a')")), values("arr1", "col")))); }
public Pet getPetById(Long petId) throws RestClientException { return getPetByIdWithHttpInfo(petId).getBody(); }
@Test public void getPetByIdTest() { Long petId = null; Pet response = api.getPetById(petId); // TODO: test validations }
@Override public void run() { if (backgroundJobServer.isNotReadyToProcessJobs()) return; try (PeriodicTaskRunInfo runInfo = taskStatistics.startRun(backgroundJobServerConfiguration())) { tasks.forEach(task -> task.run(runInfo)); runInfo.markRunAsSucceeded(); } catch (Exception e) { taskStatistics.handleException(e); if (taskStatistics.hasTooManyExceptions()) { if (e instanceof StorageException) { LOGGER.error("FATAL - JobRunr encountered too many storage exceptions. Shutting down. Did you know JobRunr Pro has built-in database fault tolerance? Check out https://www.jobrunr.io/en/documentation/pro/database-fault-tolerance/", e); } else { LOGGER.error("FATAL - JobRunr encountered too many processing exceptions. Shutting down.", shouldNotHappenException(e)); } backgroundJobServer.stop(); } else { LOGGER.warn(JobRunrException.SHOULD_NOT_HAPPEN_MESSAGE + " - Processing will continue.", e); } } }
@Test void jobHandlerDoesNothingIfItIsNotInitialized() { Task mockedTask = mock(Task.class); JobHandler jobHandler = createJobHandlerWithTask(mockedTask); when(backgroundJobServer.isNotReadyToProcessJobs()).thenReturn(true); jobHandler.run(); verifyNoInteractions(mockedTask); }
public static boolean sortAndMerge(short[] array, short[] mergeArray, int pivotIndex, int toIndex, ShortComparator comparator) { if (array.length == 1) return false; sort(array, 0, pivotIndex, comparator); if (pivotIndex == toIndex || comparator.compare(array[pivotIndex - 1], array[pivotIndex]) <= 0) { return false; } merge(array, mergeArray, pivotIndex, toIndex, comparator); return true; }
@Test void test_sortandmerge_returns_false_when_sort_is_in_place() { short[] array = {3, 2, 1, 0, 4, 5, 6}; short[] mergeArray = new short[array.length]; assertFalse(PrimitiveArraySorter.sortAndMerge(array, mergeArray, 4, 7, Short::compare)); assertIsSorted(array); array = new short[]{3, 2, 1, 0, 4, 5, 6}; assertTrue(PrimitiveArraySorter.sortAndMerge(array, mergeArray, 3, 7, Short::compare)); assertIsSorted(mergeArray); }
@SuppressWarnings("MethodLength") static void dissectControlRequest( final ArchiveEventCode eventCode, final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder); HEADER_DECODER.wrap(buffer, offset + encodedLength); encodedLength += MessageHeaderDecoder.ENCODED_LENGTH; switch (eventCode) { case CMD_IN_CONNECT: CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendConnect(builder); break; case CMD_IN_CLOSE_SESSION: CLOSE_SESSION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendCloseSession(builder); break; case CMD_IN_START_RECORDING: START_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording(builder); break; case CMD_IN_STOP_RECORDING: STOP_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecording(builder); break; case CMD_IN_REPLAY: REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplay(builder); break; case CMD_IN_STOP_REPLAY: STOP_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplay(builder); break; case CMD_IN_LIST_RECORDINGS: LIST_RECORDINGS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordings(builder); break; case CMD_IN_LIST_RECORDINGS_FOR_URI: LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingsForUri(builder); break; case CMD_IN_LIST_RECORDING: LIST_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecording(builder); break; case CMD_IN_EXTEND_RECORDING: EXTEND_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording(builder); break; case CMD_IN_RECORDING_POSITION: RECORDING_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendRecordingPosition(builder); break; case CMD_IN_TRUNCATE_RECORDING: TRUNCATE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTruncateRecording(builder); break; case CMD_IN_STOP_RECORDING_SUBSCRIPTION: STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingSubscription(builder); break; case CMD_IN_STOP_POSITION: STOP_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopPosition(builder); break; case CMD_IN_FIND_LAST_MATCHING_RECORD: FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendFindLastMatchingRecord(builder); break; case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS: LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingSubscriptions(builder); break; case CMD_IN_START_BOUNDED_REPLAY: BOUNDED_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartBoundedReplay(builder); break; case CMD_IN_STOP_ALL_REPLAYS: STOP_ALL_REPLAYS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopAllReplays(builder); break; case CMD_IN_REPLICATE: REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate(builder); break; case CMD_IN_STOP_REPLICATION: STOP_REPLICATION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplication(builder); break; case CMD_IN_START_POSITION: START_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartPosition(builder); break; case CMD_IN_DETACH_SEGMENTS: DETACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDetachSegments(builder); break; case CMD_IN_DELETE_DETACHED_SEGMENTS: DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDeleteDetachedSegments(builder); break; case CMD_IN_PURGE_SEGMENTS: PURGE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeSegments(builder); break; case CMD_IN_ATTACH_SEGMENTS: ATTACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAttachSegments(builder); break; case CMD_IN_MIGRATE_SEGMENTS: MIGRATE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendMigrateSegments(builder); break; case CMD_IN_AUTH_CONNECT: AUTH_CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAuthConnect(builder); break; case CMD_IN_KEEP_ALIVE: KEEP_ALIVE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendKeepAlive(builder); break; case CMD_IN_TAGGED_REPLICATE: TAGGED_REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTaggedReplicate(builder); break; case CMD_IN_START_RECORDING2: START_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording2(builder); break; case CMD_IN_EXTEND_RECORDING2: EXTEND_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording2(builder); break; case CMD_IN_STOP_RECORDING_BY_IDENTITY: STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingByIdentity(builder); break; case CMD_IN_PURGE_RECORDING: PURGE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeRecording(builder); break; case CMD_IN_REPLICATE2: REPLICATE_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate2(builder); break; case CMD_IN_REQUEST_REPLAY_TOKEN: REPLAY_TOKEN_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplayToken(builder); break; default: builder.append(": unknown command"); } }
@Test void controlRequestListRecording() { internalEncodeLogHeader(buffer, 0, 32, 32, () -> 100_000_000L); final ListRecordingRequestEncoder requestEncoder = new ListRecordingRequestEncoder(); requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder) .controlSessionId(19) .correlationId(178) .recordingId(1010101); dissectControlRequest(CMD_IN_LIST_RECORDING, buffer, 0, builder); assertEquals("[0.100000000] " + CONTEXT + ": " + CMD_IN_LIST_RECORDING.name() + " [32/32]:" + " controlSessionId=19" + " correlationId=178" + " recordingId=1010101", builder.toString()); }
@Override public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer) { return aggregate(initializer, Materialized.with(null, null)); }
@Test public void shouldNotHaveNullInitializerTwoOptionMaterializedOnAggregate() { assertThrows(NullPointerException.class, () -> windowedCogroupedStream.aggregate(null, Materialized.as("test"))); }
@Override public Collection<RedisServer> slaves(NamedNode master) { List<Map<String, String>> slaves = connection.sync(StringCodec.INSTANCE, RedisCommands.SENTINEL_SLAVES, master.getName()); return toRedisServersList(slaves); }
@Test public void testSlaves() { Collection<RedisServer> masters = connection.masters(); Collection<RedisServer> slaves = connection.slaves(masters.iterator().next()); assertThat(slaves).hasSize(2); }
@SuppressWarnings("NullAway") protected final @PolyNull V copyValue(@PolyNull Expirable<V> expirable) { if (expirable == null) { return null; } V copy = copier.copy(expirable.get(), cacheManager.getClassLoader()); return requireNonNull(copy); }
@Test public void copyValue_null() { assertThat(jcache.copyValue(null)).isNull(); }
public SearchQuery parse(String encodedQueryString) { if (Strings.isNullOrEmpty(encodedQueryString) || "*".equals(encodedQueryString)) { return new SearchQuery(encodedQueryString); } final var queryString = URLDecoder.decode(encodedQueryString, StandardCharsets.UTF_8); final Matcher matcher = querySplitterMatcher(requireNonNull(queryString).trim()); final ImmutableMultimap.Builder<String, FieldValue> builder = ImmutableMultimap.builder(); final ImmutableSet.Builder<String> disallowedKeys = ImmutableSet.builder(); while (matcher.find()) { final String entry = matcher.group(); if (!entry.contains(":")) { builder.put(withPrefixIfNeeded(defaultField), createFieldValue(defaultFieldKey.getFieldType(), entry, false)); continue; } final Iterator<String> entryFields = FIELD_VALUE_SPLITTER.splitToList(entry).iterator(); checkArgument(entryFields.hasNext(), INVALID_ENTRY_MESSAGE, entry); final String key = entryFields.next(); // Skip if there are no valid k/v pairs. (i.e. "action:") if (!entryFields.hasNext()) { continue; } final boolean negate = key.startsWith("-"); final String cleanKey = key.replaceFirst("^-", ""); final String value = entryFields.next(); VALUE_SPLITTER.splitToList(value).forEach(v -> { if (!dbFieldMapping.containsKey(cleanKey)) { disallowedKeys.add(cleanKey); } final SearchQueryField translatedKey = dbFieldMapping.get(cleanKey); if (translatedKey != null) { builder.put(withPrefixIfNeeded(translatedKey.getDbField()), createFieldValue(translatedKey.getFieldType(), v, negate)); } else { builder.put(withPrefixIfNeeded(defaultField), createFieldValue(defaultFieldKey.getFieldType(), v, negate)); } }); checkArgument(!entryFields.hasNext(), INVALID_ENTRY_MESSAGE, entry); } return new SearchQuery(queryString, builder.build(), disallowedKeys.build()); }
@Test void emptyFieldPrefixDoesNotChangeDefaultBehavior() { final SearchQueryParser parser = new SearchQueryParser("name", ImmutableSet.of("name", "breed"), ""); final SearchQuery searchQuery = parser.parse("Bobby breed:terrier"); final Multimap<String, SearchQueryParser.FieldValue> queryMap = searchQuery.getQueryMap(); assertThat(queryMap.keySet().size()).isEqualTo(2); assertThat(queryMap.keySet()).containsOnly("name", "breed"); assertThat(queryMap.get("name")).containsOnly(new SearchQueryParser.FieldValue("Bobby", false)); assertThat(queryMap.get("breed")).containsOnly(new SearchQueryParser.FieldValue("terrier", false)); assertThat(searchQuery.hasDisallowedKeys()).isFalse(); }