focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static String getInputParameters(Properties properties) { boolean logAllParameters = ConvertUtils.toBoolean(properties.getProperty(PropertyKeyConst.LOG_ALL_PROPERTIES), false); StringBuilder result = new StringBuilder(); if (logAllParameters) { result.append( "Log nacos client init properties with Full mode, This mode is only used for debugging and troubleshooting. "); result.append( "Please close this mode by removing properties `logAllProperties` after finishing debug or troubleshoot.\n"); result.append("Nacos client all init properties: \n"); properties.forEach( (key, value) -> result.append("\t").append(key.toString()).append("=").append(value.toString()) .append("\n")); } else { result.append("Nacos client key init properties: \n"); appendKeyParameters(result, properties, PropertyKeyConst.SERVER_ADDR); appendKeyParameters(result, properties, PropertyKeyConst.NAMESPACE); appendKeyParameters(result, properties, PropertyKeyConst.ENDPOINT); appendKeyParameters(result, properties, PropertyKeyConst.ENDPOINT_PORT); appendKeyParameters(result, properties, PropertyKeyConst.USERNAME); appendKeyParameters(result, properties, PropertyKeyConst.PASSWORD); appendKeyParameters(result, properties, PropertyKeyConst.ACCESS_KEY); appendKeyParameters(result, properties, PropertyKeyConst.SECRET_KEY); appendKeyParameters(result, properties, PropertyKeyConst.RAM_ROLE_NAME); appendKeyParameters(result, properties, PropertyKeyConst.SIGNATURE_REGION_ID); } return result.toString(); }
@Test void testGetInputParameters() { Properties properties = new Properties(); properties.setProperty("testKey", "testValue"); properties.setProperty(PropertyKeyConst.SERVER_ADDR, "localhost:8848"); NacosClientProperties clientProperties = NacosClientProperties.PROTOTYPE.derive(properties); String actual = ParamUtil.getInputParameters(clientProperties.asProperties()); assertEquals("Nacos client key init properties: \n\tserverAddr=localhost:8848\n", actual); }
@VisibleForTesting public ConfigDO validateConfigExists(Long id) { if (id == null) { return null; } ConfigDO config = configMapper.selectById(id); if (config == null) { throw exception(CONFIG_NOT_EXISTS); } return config; }
@Test public void testValidateConfigExist_notExists() { assertServiceException(() -> configService.validateConfigExists(randomLongId()), CONFIG_NOT_EXISTS); }
@ExecuteOn(TaskExecutors.IO) @Post(uri = "/labels/by-query") @Operation(tags = {"Executions"}, summary = "Set label on executions filter by query parameters") public HttpResponse<?> setLabelsByQuery( @Parameter(description = "A string filter") @Nullable @QueryValue(value = "q") String query, @Parameter(description = "A namespace filter prefix") @Nullable @QueryValue String namespace, @Parameter(description = "A flow id filter") @Nullable @QueryValue String flowId, @Parameter(description = "The start datetime") @Nullable @Format("yyyy-MM-dd'T'HH:mm[:ss][.SSS][XXX]") @QueryValue ZonedDateTime startDate, @Parameter(description = "The end datetime") @Nullable @Format("yyyy-MM-dd'T'HH:mm[:ss][.SSS][XXX]") @QueryValue ZonedDateTime endDate, @Parameter(description = "A time range filter relative to the current time", examples = { @ExampleObject(name = "Filter last 5 minutes", value = "PT5M"), @ExampleObject(name = "Filter last 24 hours", value = "P1D") }) @Nullable @QueryValue Duration timeRange, @Parameter(description = "A state filter") @Nullable @QueryValue List<State.Type> state, @Parameter(description = "A labels filter as a list of 'key:value'") @Nullable @QueryValue @Format("MULTI") List<String> labels, @Parameter(description = "The trigger execution id") @Nullable @QueryValue String triggerExecutionId, @Parameter(description = "A execution child filter") @Nullable @QueryValue ExecutionRepositoryInterface.ChildFilter childFilter, @Parameter(description = "The labels to add to the execution") @Body @NotNull @Valid List<Label> setLabels ) { validateTimeline(startDate, endDate); var ids = executionRepository .find( query, tenantService.resolveTenant(), namespace, flowId, resolveAbsoluteDateTime(startDate, timeRange, ZonedDateTime.now()), endDate, state, RequestUtils.toMap(labels), triggerExecutionId, childFilter ) .map(Execution::getId) .collectList() .block(); return setLabelsByIds(new SetLabelsByIdsRequest(ids, setLabels)); }
@Test void setLabelsByQuery() { Execution result1 = triggerInputsFlowExecution(true); Execution result2 = triggerInputsFlowExecution(true); Execution result3 = triggerInputsFlowExecution(true); BulkResponse response = client.toBlocking().retrieve( HttpRequest.POST("/api/v1/executions/labels/by-query?namespace=" + result1.getNamespace(), List.of(new Label("key", "value")) ), BulkResponse.class ); assertThat(response.getCount(), is(3)); var exception = assertThrows( HttpClientResponseException.class, () -> client.toBlocking().exchange(HttpRequest.POST( "/api/v1/executions/labels/by-query?namespace=" + result1.getNamespace(), List.of(new Label(null, null))) ) ); assertThat(exception.getStatus(), is(HttpStatus.UNPROCESSABLE_ENTITY)); }
protected final void safeRegister(final Class type, final Serializer serializer) { safeRegister(type, createSerializerAdapter(serializer)); }
@Test(expected = IllegalArgumentException.class) public void testSafeRegister_ConstantType() { abstractSerializationService.safeRegister(Integer.class, new StringBufferSerializer(true)); }
public static void initSSL(Properties consumerProps) { // Check if one-way SSL is enabled. In this scenario, the client validates the server certificate. String trustStoreLocation = consumerProps.getProperty(SSL_TRUSTSTORE_LOCATION); String trustStorePassword = consumerProps.getProperty(SSL_TRUSTSTORE_PASSWORD); String serverCertificate = consumerProps.getProperty(STREAM_KAFKA_SSL_SERVER_CERTIFICATE); if (StringUtils.isAnyEmpty(trustStoreLocation, trustStorePassword, serverCertificate)) { LOGGER.info("Skipping auto SSL server validation since it's not configured."); return; } if (shouldRenewTrustStore(consumerProps)) { initTrustStore(consumerProps); } // Set the security protocol String securityProtocol = consumerProps.getProperty(SECURITY_PROTOCOL, DEFAULT_SECURITY_PROTOCOL); consumerProps.setProperty(SECURITY_PROTOCOL, securityProtocol); // Check if two-way SSL is enabled. In this scenario, the client validates the server's certificate and the server // validates the client's certificate. String keyStoreLocation = consumerProps.getProperty(SSL_KEYSTORE_LOCATION); String keyStorePassword = consumerProps.getProperty(SSL_KEYSTORE_PASSWORD); String keyPassword = consumerProps.getProperty(SSL_KEY_PASSWORD); String clientCertificate = consumerProps.getProperty(STREAM_KAFKA_SSL_CLIENT_CERTIFICATE); if (StringUtils.isAnyEmpty(keyStoreLocation, keyStorePassword, keyPassword, clientCertificate)) { LOGGER.info("Skipping auto SSL client validation since it's not configured."); return; } if (shouldRenewKeyStore(consumerProps)) { initKeyStore(consumerProps); } }
@Test public void testInitSSLTrustStoreAndKeyStore() throws CertificateException, NoSuchAlgorithmException, OperatorCreationException, NoSuchProviderException, KeyStoreException, IOException { Properties consumerProps = new Properties(); setTrustStoreProps(consumerProps); setKeyStoreProps(consumerProps); // should not throw any exceptions KafkaSSLUtils.initSSL(consumerProps); // validate validateTrustStoreCertificateCount(1); validateKeyStoreCertificateCount(1); }
@Override public WebhookPayload create(ProjectAnalysis analysis) { Writer string = new StringWriter(); try (JsonWriter writer = JsonWriter.of(string)) { writer.beginObject(); writeServer(writer); writeTask(writer, analysis.getCeTask()); writeAnalysis(writer, analysis, system2); writeProject(analysis, writer, analysis.getProject()); analysis.getBranch().ifPresent(b -> writeBranch(writer, analysis.getProject(), b)); analysis.getQualityGate().ifPresent(qualityGate -> writeQualityGate(writer, qualityGate)); writeAnalysisProperties(writer, analysis.getProperties()); writer.endObject().close(); return new WebhookPayload(analysis.getProject().getKey(), string.toString()); } }
@Test public void create_payload_for_successful_analysis() { CeTask task = new CeTask("#1", CeTask.Status.SUCCESS); Condition condition = new Condition("coverage", Condition.Operator.GREATER_THAN, "70.0"); EvaluatedQualityGate gate = EvaluatedQualityGate.newBuilder() .setQualityGate(new QualityGate("G1", "Gate One", singleton(condition))) .setStatus(Metric.Level.ERROR) .addEvaluatedCondition(condition, EvaluatedCondition.EvaluationStatus.ERROR, "74.0") .build(); ProjectAnalysis analysis = newAnalysis(task, gate, null, 1_500_000_000_000L, emptyMap()); WebhookPayload payload = underTest.create(analysis); assertThat(payload.getProjectKey()).isEqualTo(PROJECT_KEY); assertJson(payload.getJson()) .isSimilarTo("{" + " \"serverUrl\": \"http://foo\"," + " \"taskId\": \"#1\"," + " \"status\": \"SUCCESS\"," + " \"analysedAt\": \"2017-07-14T04:40:00+0200\"," + " \"revision\": \"sha1\"," + " \"changedAt\": \"2017-07-14T04:40:00+0200\"," + " \"project\": {" + " \"key\": \"P1\"," + " \"name\": \"Project One\"," + " \"url\": \"http://foo/dashboard?id=P1\"" + " }," + " \"qualityGate\": {" + " \"name\": \"Gate One\"," + " \"status\": \"ERROR\"," + " \"conditions\": [" + " {" + " \"metric\": \"coverage\"," + " \"operator\": \"GREATER_THAN\"," + " \"value\": \"74.0\"," + " \"status\": \"ERROR\"," + " \"errorThreshold\": \"70.0\"" + " }" + " ]" + " }," + " \"properties\": {" + " }" + "}"); }
public Set<PropertyKey> keySet() { Set<PropertyKey> keySet = new HashSet<>(PropertyKey.defaultKeys()); keySet.addAll(mUserProps.keySet()); return Collections.unmodifiableSet(keySet); }
@Test public void keySet() { Set<PropertyKey> expected = new HashSet<>(PropertyKey.defaultKeys()); assertThat(mProperties.keySet(), is(expected)); PropertyKey newKey = stringBuilder("keySetNew").build(); mProperties.put(newKey, "value", Source.RUNTIME); expected.add(newKey); assertThat(mProperties.keySet(), is(expected)); }
@Override public void pluginUnLoaded(GoPluginDescriptor pluginDescriptor) { repositoryMetadataStore.removeMetadata(pluginDescriptor.id()); packageMetadataStore.removeMetadata(pluginDescriptor.id()); }
@Test public void shouldRemoveMetadataOnPluginUnLoadedCallback() throws Exception { RepositoryMetadataStore.getInstance().addMetadataFor(pluginDescriptor.id(), new PackageConfigurations()); PackageMetadataStore.getInstance().addMetadataFor(pluginDescriptor.id(), new PackageConfigurations()); when(packageRepositoryExtension.canHandlePlugin(pluginDescriptor.id())).thenReturn(true); metadataLoader.pluginUnLoaded(pluginDescriptor); assertThat(RepositoryMetadataStore.getInstance().getMetadata(pluginDescriptor.id()), is(nullValue())); assertThat(PackageMetadataStore.getInstance().getMetadata(pluginDescriptor.id()), is(nullValue())); }
public static ColumnSegment bind(final ColumnSegment segment, final SegmentType parentSegmentType, final SQLStatementBinderContext binderContext, final Map<String, TableSegmentBinderContext> tableBinderContexts, final Map<String, TableSegmentBinderContext> outerTableBinderContexts) { if (EXCLUDE_BIND_COLUMNS.contains(segment.getIdentifier().getValue().toUpperCase())) { return segment; } ColumnSegment result = copy(segment); Collection<TableSegmentBinderContext> tableSegmentBinderContexts = getTableSegmentBinderContexts(segment, parentSegmentType, binderContext, tableBinderContexts, outerTableBinderContexts); Optional<ColumnSegment> inputColumnSegment = findInputColumnSegment(segment, parentSegmentType, tableSegmentBinderContexts, outerTableBinderContexts, binderContext); inputColumnSegment.ifPresent(optional -> result.setVariable(optional.isVariable())); result.setColumnBoundInfo(createColumnSegmentBoundInfo(segment, inputColumnSegment.orElse(null))); return result; }
@Test void assertBindWithMultiTablesJoinAndNoOwner() { Map<String, TableSegmentBinderContext> tableBinderContexts = new LinkedHashMap<>(2, 1F); ColumnSegment boundOrderIdColumn = new ColumnSegment(0, 0, new IdentifierValue("order_id")); boundOrderIdColumn.setColumnBoundInfo(new ColumnSegmentBoundInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("t_order"), new IdentifierValue("order_id"))); tableBinderContexts.put("t_order", new SimpleTableSegmentBinderContext(Collections.singleton(new ColumnProjectionSegment(boundOrderIdColumn)))); ColumnSegment boundItemIdColumn = new ColumnSegment(0, 0, new IdentifierValue("item_id")); boundItemIdColumn.setColumnBoundInfo(new ColumnSegmentBoundInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("t_order_item"), new IdentifierValue("item_id"))); tableBinderContexts.put("t_order_item", new SimpleTableSegmentBinderContext(Collections.singleton(new ColumnProjectionSegment(boundItemIdColumn)))); ColumnSegment columnSegment = new ColumnSegment(0, 0, new IdentifierValue("order_id")); SQLStatementBinderContext binderContext = new SQLStatementBinderContext(mock(ShardingSphereMetaData.class), DefaultDatabase.LOGIC_NAME, TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), Collections.emptySet()); ColumnSegment actual = ColumnSegmentBinder.bind(columnSegment, SegmentType.JOIN_ON, binderContext, tableBinderContexts, Collections.emptyMap()); assertNotNull(actual.getColumnBoundInfo()); assertNull(actual.getOtherUsingColumnBoundInfo()); assertThat(actual.getColumnBoundInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertThat(actual.getColumnBoundInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertThat(actual.getColumnBoundInfo().getOriginalTable().getValue(), is("t_order")); assertThat(actual.getColumnBoundInfo().getOriginalColumn().getValue(), is("order_id")); }
public static boolean isAllNotEmpty(CharSequence... args) { return !hasEmpty(args); }
@Test public void isAllNotEmpty() { String strings = "str"; Assert.assertTrue(StringUtil.isAllNotEmpty(strings)); }
public byte[] data() { return sha1.digest(); }
@Test public void testData() { byte[] buf = new byte[1024]; Arrays.fill(buf, (byte) 0xAA); ZDigest digest = new ZDigest(); digest.update(buf); byte[] data = digest.data(); assertThat(byt(data[0]), is(0xDE)); assertThat(byt(data[1]), is(0xB2)); assertThat(byt(data[2]), is(0x38)); assertThat(byt(data[3]), is(0x07)); }
public static <T> Read<T> read() { return new AutoValue_CassandraIO_Read.Builder<T>().build(); }
@Test public void testReadWithMapper() throws Exception { counter.set(0); SerializableFunction<Session, Mapper> factory = new NOOPMapperFactory(); pipeline.apply( CassandraIO.<String>read() .withHosts(Collections.singletonList(CASSANDRA_HOST)) .withPort(cassandraPort) .withKeyspace(CASSANDRA_KEYSPACE) .withTable(CASSANDRA_TABLE) .withCoder(SerializableCoder.of(String.class)) .withEntity(String.class) .withMapperFactoryFn(factory)); pipeline.run(); assertEquals(NUM_ROWS, counter.intValue()); }
static ProcessorSupplier readMapIndexSupplier(MapIndexScanMetadata indexScanMetadata) { return new MapIndexScanProcessorSupplier(indexScanMetadata); }
@Test public void test_pointLookup_hashed() { List<JetSqlRow> expected = new ArrayList<>(); for (int i = count; i > 0; i--) { map.put(i, new Person("value-" + i, i)); } expected.add(jetRow((5), "value-5", 5)); IndexConfig indexConfig = new IndexConfig(IndexType.HASH, "age").setName(randomName()); map.addIndex(indexConfig); IndexFilter filter = new IndexEqualsFilter(intValue(5)); MapIndexScanMetadata metadata = metadata(indexConfig.getName(), filter, -1, false); TestSupport .verifyProcessor(adaptSupplier(MapIndexScanP.readMapIndexSupplier(metadata))) .hazelcastInstance(instance()) .jobConfig(new JobConfig().setArgument(SQL_ARGUMENTS_KEY_NAME, emptyList())) .outputChecker(LENIENT_SAME_ITEMS_IN_ORDER) .disableSnapshots() .disableProgressAssertion() .expectOutput(expected); }
@Override public String toString() { StringBuilder stringVector = new StringBuilder(); stringVector.append(START_PARENTHESES); int resourceCount = 0; for (Map.Entry<String, Double> resourceEntry : resource) { resourceCount++; stringVector.append(resourceEntry.getKey()) .append(VALUE_DELIMITER) .append(resourceEntry.getValue()) .append(capacityTypes.get(resourceEntry.getKey()).postfix); if (resourceCount < capacityTypes.size()) { stringVector.append(RESOURCE_DELIMITER); } } stringVector.append(END_PARENTHESES); return stringVector.toString(); }
@Test public void testToString() { QueueCapacityVector capacityVector = QueueCapacityVector.newInstance(); capacityVector.setResource(MEMORY_URI, 10, ResourceUnitCapacityType.WEIGHT); capacityVector.setResource(VCORES_URI, 6, ResourceUnitCapacityType.PERCENTAGE); capacityVector.setResource(CUSTOM_RESOURCE, 3, ResourceUnitCapacityType.ABSOLUTE); Assert.assertEquals(MIXED_CAPACITY_VECTOR_STRING, capacityVector.toString()); QueueCapacityVector emptyCapacityVector = new QueueCapacityVector(); Assert.assertEquals("[]", emptyCapacityVector.toString()); }
public String encode() { StringBuilder sb = new StringBuilder(); //[0] sb.append(this.topicName); sb.append(SEPARATOR); //[1] sb.append(this.readQueueNums); sb.append(SEPARATOR); //[2] sb.append(this.writeQueueNums); sb.append(SEPARATOR); //[3] sb.append(this.perm); sb.append(SEPARATOR); //[4] sb.append(this.topicFilterType); sb.append(SEPARATOR); //[5] if (attributes != null) { sb.append(JSON.toJSONString(attributes)); } return sb.toString(); }
@Test public void testEncode() { TopicConfig topicConfig = new TopicConfig(); topicConfig.setTopicName(topicName); topicConfig.setReadQueueNums(queueNums); topicConfig.setWriteQueueNums(queueNums); topicConfig.setPerm(perm); topicConfig.setTopicFilterType(topicFilterType); topicConfig.setTopicMessageType(TopicMessageType.FIFO); String encode = topicConfig.encode(); assertThat(encode).isEqualTo("topic 8 8 6 SINGLE_TAG {\"message.type\":\"FIFO\"}"); }
@Override public void post(Event event) { if (!getDispatcher(event).add(event)) { log.error("Unable to post event {}", event); } }
@Test public void postEventWithNoSink() throws Exception { dispatcher.post(new Thing("boom")); validate(gooSink); validate(prickleSink); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final String dateTime = dateFormat.format(new Date(clock.getTime())); printWithBanner(dateTime, '='); output.println(); if (!gauges.isEmpty()) { printWithBanner("-- Gauges", '-'); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { output.println(entry.getKey()); printGauge(entry.getValue()); } output.println(); } if (!counters.isEmpty()) { printWithBanner("-- Counters", '-'); for (Map.Entry<String, Counter> entry : counters.entrySet()) { output.println(entry.getKey()); printCounter(entry); } output.println(); } if (!histograms.isEmpty()) { printWithBanner("-- Histograms", '-'); for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { output.println(entry.getKey()); printHistogram(entry.getValue()); } output.println(); } if (!meters.isEmpty()) { printWithBanner("-- Meters", '-'); for (Map.Entry<String, Meter> entry : meters.entrySet()) { output.println(entry.getKey()); printMeter(entry.getValue()); } output.println(); } if (!timers.isEmpty()) { printWithBanner("-- Timers", '-'); for (Map.Entry<String, Timer> entry : timers.entrySet()) { output.println(entry.getKey()); printTimer(entry.getValue()); } output.println(); } output.println(); output.flush(); }
@Test public void reportsMeterValues() throws Exception { final Meter meter = mock(Meter.class); when(meter.getCount()).thenReturn(1L); when(meter.getMeanRate()).thenReturn(2.0); when(meter.getOneMinuteRate()).thenReturn(3.0); when(meter.getFiveMinuteRate()).thenReturn(4.0); when(meter.getFifteenMinuteRate()).thenReturn(5.0); reporter.report(map(), map(), map(), map("test.meter", meter), map()); assertThat(consoleOutput()) .isEqualTo(lines( dateHeader, "", "-- Meters ----------------------------------------------------------------------", "test.meter", " count = 1", " mean rate = 2.00 events/second", " 1-minute rate = 3.00 events/second", " 5-minute rate = 4.00 events/second", " 15-minute rate = 5.00 events/second", "", "" )); }
public AmazonInfo build() { return new AmazonInfo(Name.Amazon.name(), metadata); }
@Test public void payloadWithClassAndMetadata() throws IOException { String json = "{" + " \"@class\": \"com.netflix.appinfo.AmazonInfo\"," + " \"metadata\": {" + " \"instance-id\": \"i-12345\"" + " }" + "}"; AmazonInfo info = newMapper().readValue(json, AmazonInfo.class); AmazonInfo expected = AmazonInfo.Builder.newBuilder() .addMetadata(AmazonInfo.MetaDataKey.instanceId, "i-12345") .build(); Assert.assertEquals(expected, nonCompact(info)); }
public static boolean equals(String a, String b) { if (a == null) { return b == null; } return a.equals(b); }
@Test void testEquals() { Assertions.assertTrue(StringUtils.equals("1", "1")); Assertions.assertFalse(StringUtils.equals("1", "2")); Assertions.assertFalse(StringUtils.equals(null, "1")); Assertions.assertFalse(StringUtils.equals("1", null)); Assertions.assertFalse(StringUtils.equals("", null)); Assertions.assertFalse(StringUtils.equals(null, "")); }
public void completeExceptionally(Throwable value) { checkNotNull(value); if (this.value != EMPTY) { throw new IllegalStateException("Promise is already completed"); } this.value = value; this.exceptional = true; for (BiConsumer<E, Throwable> consumer : consumers) { try { consumer.accept(null, value); } catch (Exception e) { eventloop.logger.warning(e); } } if (releaseOnComplete) { release(); } }
@Test(expected = IllegalStateException.class) public void test_completeExceptionally_whenAlreadyCompleted() { Promise<String> promise = new Promise<>(reactor.eventloop); promise.completeExceptionally(new Throwable()); promise.completeExceptionally(new Throwable()); }
@Override public String call() throws RemoteServiceException { var currentTime = System.nanoTime(); //Since currentTime and serverStartTime are both in nanoseconds, we convert it to //seconds by diving by 10e9 and ensure floating point division by multiplying it //with 1.0 first. We then check if it is greater or less than specified delay and then //send the reply if ((currentTime - serverStartTime) * 1.0 / (1000 * 1000 * 1000) < delay) { //Can use Thread.sleep() here to block and simulate a hung server throw new RemoteServiceException("Delayed service is down"); } return "Delayed service is working"; }
@Test void testDefaultConstructor() throws RemoteServiceException { Assertions.assertThrows(RemoteServiceException.class, () -> { var obj = new DelayedRemoteService(); obj.call(); }); }
public Optional<DbEntityCatalogEntry> getByCollectionName(final String collection) { return Optional.ofNullable(entitiesByCollectionName.get(collection)); }
@Test void returnsProperDataFromCatalog() { DbEntitiesCatalog catalog = new DbEntitiesCatalog(List.of(new DbEntityCatalogEntry("streams", "title", StreamImpl.class, "streams:read"))); assertThat(catalog.getByCollectionName("streams")) .isEqualTo(Optional.of( new DbEntityCatalogEntry("streams", "title", StreamImpl.class, "streams:read") ) ); }
public Flowable<V> takeFirstElements() { return ElementsStream.takeElements(queue::takeFirstAsync); }
@Test public void testTakeFirstElements() { RBlockingDequeRx<Integer> queue = redisson.getBlockingDeque("test"); List<Integer> elements = new ArrayList<>(); queue.takeFirstElements().subscribe(new Subscriber<Integer>() { @Override public void onSubscribe(Subscription s) { s.request(4); } @Override public void onNext(Integer t) { elements.add(t); } @Override public void onError(Throwable t) { } @Override public void onComplete() { } }); for (int i = 0; i < 10; i++) { sync(queue.add(i)); } assertThat(elements).containsExactly(0, 1, 2, 3); }
public void writeReference(Reference reference) throws IOException { if (reference instanceof StringReference) { writeQuotedString((StringReference) reference); } else if (reference instanceof TypeReference) { writeType((TypeReference) reference); } else if (reference instanceof FieldReference) { writeFieldDescriptor((FieldReference) reference); } else if (reference instanceof MethodReference) { writeMethodDescriptor((MethodReference) reference); } else if (reference instanceof MethodProtoReference) { writeMethodProtoDescriptor((MethodProtoReference) reference); } else if (reference instanceof MethodHandleReference) { writeMethodHandle((MethodHandleReference) reference); } else if (reference instanceof CallSiteReference) { writeCallSite((CallSiteReference) reference); } else { throw new IllegalArgumentException(String.format("Not a known reference type: %s", reference.getClass())); } }
@Test public void testWriteReference_string() throws IOException { DexFormattedWriter writer = new DexFormattedWriter(output); writer.writeReference(new ImmutableStringReference("string value")); Assert.assertEquals( "\"string value\"", output.toString()); }
@Restricted(NoExternalUse.class) public static Set<PosixFilePermission> modeToPermissions(int mode) throws IOException { // Anything larger is a file type, not a permission. int PERMISSIONS_MASK = 07777; // setgid/setuid/sticky are not supported. int MAX_SUPPORTED_MODE = 0777; mode = mode & PERMISSIONS_MASK; if ((mode & MAX_SUPPORTED_MODE) != mode) { throw new IOException("Invalid mode: " + mode); } PosixFilePermission[] allPermissions = PosixFilePermission.values(); Set<PosixFilePermission> result = EnumSet.noneOf(PosixFilePermission.class); for (int i = 0; i < allPermissions.length; i++) { if ((mode & 1) == 1) { result.add(allPermissions[allPermissions.length - i - 1]); } mode >>= 1; } return result; }
@Test public void testModeToPermissions() throws Exception { assertEquals(PosixFilePermissions.fromString("rwxrwxrwx"), Util.modeToPermissions(0777)); assertEquals(PosixFilePermissions.fromString("rwxr-xrwx"), Util.modeToPermissions(0757)); assertEquals(PosixFilePermissions.fromString("rwxr-x---"), Util.modeToPermissions(0750)); assertEquals(PosixFilePermissions.fromString("r-xr-x---"), Util.modeToPermissions(0550)); assertEquals(PosixFilePermissions.fromString("r-xr-----"), Util.modeToPermissions(0540)); assertEquals(PosixFilePermissions.fromString("--xr-----"), Util.modeToPermissions(0140)); assertEquals(PosixFilePermissions.fromString("--xr---w-"), Util.modeToPermissions(0142)); assertEquals(PosixFilePermissions.fromString("--xr--rw-"), Util.modeToPermissions(0146)); assertEquals(PosixFilePermissions.fromString("-wxr--rw-"), Util.modeToPermissions(0346)); assertEquals(PosixFilePermissions.fromString("---------"), Util.modeToPermissions(0000)); assertEquals("Non-permission bits should be ignored", PosixFilePermissions.fromString("r-xr-----"), Util.modeToPermissions(0100540)); Exception e = Assert.assertThrows(Exception.class, () -> Util.modeToPermissions(01777)); assertThat(e.getMessage(), startsWith("Invalid mode")); }
public static JavaRuntimeInfo getJavaRuntimeInfo() { return Singleton.get(JavaRuntimeInfo.class); }
@Test public void getJavaRuntimeInfoTest() { final JavaRuntimeInfo info = SystemUtil.getJavaRuntimeInfo(); assertNotNull(info); }
public void start() { executorService.scheduleWithFixedDelay(this::refresh, INITIAL_DELAY, DELAY, TimeUnit.SECONDS); }
@Test void start_adds_runnable_with_10_second_delay_and_initial_delay_putting_NodeHealth_from_provider_into_SharedHealthState() { ArgumentCaptor<Runnable> runnableCaptor = ArgumentCaptor.forClass(Runnable.class); NodeHealth[] nodeHealths = { testSupport.randomNodeHealth(), testSupport.randomNodeHealth(), testSupport.randomNodeHealth() }; Error expected = new Error("Simulating exception raised by NodeHealthProvider"); when(nodeHealthProvider.get()) .thenReturn(nodeHealths[0]) .thenReturn(nodeHealths[1]) .thenReturn(nodeHealths[2]) .thenThrow(expected); underTest.start(); verify(executorService).scheduleWithFixedDelay(runnableCaptor.capture(), eq(1L), eq(10L), eq(TimeUnit.SECONDS)); Runnable runnable = runnableCaptor.getValue(); runnable.run(); runnable.run(); runnable.run(); verify(sharedHealthState).writeMine(nodeHealths[0]); verify(sharedHealthState).writeMine(nodeHealths[1]); verify(sharedHealthState).writeMine(nodeHealths[2]); assertThatCode(runnable::run) .doesNotThrowAnyException(); }
@Override public DirectoryTimestamp getDirectoryTimestamp() { return DirectoryTimestamp.explicit; }
@Test public void testFeatures() { assertEquals(Protocol.Case.sensitive, new HubicProtocol().getCaseSensitivity()); assertEquals(Protocol.DirectoryTimestamp.explicit, new HubicProtocol().getDirectoryTimestamp()); }
@VisibleForTesting int persistNextQueues(final Instant currentTime) { final int slot = messagesCache.getNextSlotToPersist(); List<String> queuesToPersist; int queuesPersisted = 0; do { queuesToPersist = getQueuesTimer.record( () -> messagesCache.getQueuesToPersist(slot, currentTime.minus(persistDelay), QUEUE_BATCH_LIMIT)); for (final String queue : queuesToPersist) { final UUID accountUuid = MessagesCache.getAccountUuidFromQueueName(queue); final byte deviceId = MessagesCache.getDeviceIdFromQueueName(queue); final Optional<Account> maybeAccount = accountsManager.getByAccountIdentifier(accountUuid); if (maybeAccount.isEmpty()) { logger.error("No account record found for account {}", accountUuid); continue; } final Optional<Device> maybeDevice = maybeAccount.flatMap(account -> account.getDevice(deviceId)); if (maybeDevice.isEmpty()) { logger.error("Account {} does not have a device with id {}", accountUuid, deviceId); continue; } try { persistQueue(maybeAccount.get(), maybeDevice.get()); } catch (final Exception e) { persistQueueExceptionMeter.increment(); logger.warn("Failed to persist queue {}::{}; will schedule for retry", accountUuid, deviceId, e); messagesCache.addQueueToPersist(accountUuid, deviceId); Util.sleep(EXCEPTION_PAUSE_MILLIS); } } queuesPersisted += queuesToPersist.size(); } while (queuesToPersist.size() >= QUEUE_BATCH_LIMIT); return queuesPersisted; }
@Test void testPersistNextQueuesSingleQueueTooSoon() { final String queueName = new String( MessagesCache.getMessageQueueKey(DESTINATION_ACCOUNT_UUID, DESTINATION_DEVICE_ID), StandardCharsets.UTF_8); final int messageCount = (MessagePersister.MESSAGE_BATCH_LIMIT * 3) + 7; final Instant now = Instant.now(); insertMessages(DESTINATION_ACCOUNT_UUID, DESTINATION_DEVICE_ID, messageCount, now); setNextSlotToPersist(SlotHash.getSlot(queueName)); messagePersister.persistNextQueues(now); verify(messagesDynamoDb, never()).store(any(), any(), any()); }
@Override public void reset() { Iterator<T> iter = snapshottableIterator(SnapshottableHashTable.LATEST_EPOCH); while (iter.hasNext()) { iter.next(); iter.remove(); } }
@Test public void testReset() { SnapshotRegistry registry = new SnapshotRegistry(new LogContext()); SnapshottableHashTable<TestElement> table = new SnapshottableHashTable<>(registry, 1); assertNull(table.snapshottableAddOrReplace(E_1A)); assertNull(table.snapshottableAddOrReplace(E_2A)); assertNull(table.snapshottableAddOrReplace(E_3A)); registry.getOrCreateSnapshot(0); assertEquals(E_1A, table.snapshottableAddOrReplace(E_1B)); assertEquals(E_3A, table.snapshottableAddOrReplace(E_3B)); registry.getOrCreateSnapshot(1); registry.reset(); assertEquals(Collections.emptyList(), registry.epochsList()); // Check that the table is empty assertIteratorYields(table.snapshottableIterator(Long.MAX_VALUE)); }
public static void trimRecordTemplate(RecordTemplate recordTemplate, MaskTree override, final boolean failOnMismatch) { trimRecordTemplate(recordTemplate.data(), recordTemplate.schema(), override, failOnMismatch); }
@Test public void testRecord() throws CloneNotSupportedException { RecordBar bar = new RecordBar(); bar.setLocation("mountain view"); RecordBar expected = bar.copy(); // Introduce bad elements bar.data().put("SF", "CA"); Assert.assertEquals(bar.data().size(), 2); RestUtils.trimRecordTemplate(bar, false); Assert.assertEquals(bar, expected); }
@Override public void isEqualTo(@Nullable Object expected) { super.isEqualTo(expected); }
@Test public void isEqualTo_WithoutToleranceParameter_Fail_Longer() { expectFailureWhenTestingThat(array(2.2d, 3.3d)).isEqualTo(array(2.2d, 3.3d, 4.4d)); assertFailureKeys("expected", "but was", "wrong length", "expected", "but was"); assertFailureValueIndexed("expected", 1, "3"); assertFailureValueIndexed("but was", 1, "2"); }
@Override public void onClick(View v) { switch (v.getId()) { case R.id.quick_keys_popup_close: mKeyboardActionListener.onKey(KeyCodes.CANCEL, null, 0, null, true); break; case R.id.quick_keys_popup_backspace: mKeyboardActionListener.onKey(KeyCodes.DELETE, null, 0, null, true); break; case R.id.quick_keys_popup_quick_keys_insert_media: mKeyboardActionListener.onKey(KeyCodes.IMAGE_MEDIA_POPUP, null, 0, null, true); break; case R.id.quick_keys_popup_delete_recently_used_smileys: mKeyboardActionListener.onKey(KeyCodes.CLEAR_QUICK_TEXT_HISTORY, null, 0, null, true); // re-show mKeyboardActionListener.onKey(KeyCodes.QUICK_TEXT_POPUP, null, 0, null, true); break; case R.id.quick_keys_popup_quick_keys_settings: Intent startSettings = new Intent( Intent.ACTION_VIEW, Uri.parse(v.getContext().getString(R.string.deeplink_url_quick_text)), v.getContext(), MainSettingsActivity.class); startSettings.setFlags( Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_NO_HISTORY | Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS); v.getContext().startActivity(startSettings); // and closing keyboard mKeyboardActionListener.onKey(KeyCodes.CANCEL, null, 0, null, true); break; default: throw new IllegalArgumentException( "Failed to handle view id " + v.getId() + " in FrameKeyboardViewClickListener"); } }
@Test public void testOnClickSetting() throws Exception { OnKeyboardActionListener keyboardActionListener = Mockito.mock(OnKeyboardActionListener.class); FrameKeyboardViewClickListener listener = new FrameKeyboardViewClickListener(keyboardActionListener); Mockito.verifyZeroInteractions(keyboardActionListener); View view = new View(getApplicationContext()); view.setId(R.id.quick_keys_popup_quick_keys_settings); listener.onClick(view); Intent expectedIntent = new Intent( Intent.ACTION_VIEW, Uri.parse(getApplicationContext().getString(R.string.deeplink_url_quick_text)), getApplicationContext(), MainSettingsActivity.class); expectedIntent.setFlags( Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_NO_HISTORY | Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS); Intent settingIntent = Shadows.shadowOf((Application) ApplicationProvider.getApplicationContext()) .getNextStartedActivity(); Assert.assertEquals( expectedIntent.getComponent().flattenToString(), settingIntent.getComponent().flattenToString()); Assert.assertEquals(expectedIntent.getFlags(), settingIntent.getFlags()); // closes the keyboard Mockito.verify(keyboardActionListener).onKey(KeyCodes.CANCEL, null, 0, null, true); Mockito.verifyNoMoreInteractions(keyboardActionListener); }
@SafeVarargs public static Optional<Predicate<Throwable>> createExceptionsPredicate( Predicate<Throwable> exceptionPredicate, Class<? extends Throwable>... exceptions) { return PredicateCreator.createExceptionsPredicate(exceptions) .map(predicate -> exceptionPredicate == null ? predicate : predicate.or(exceptionPredicate)) .or(() -> Optional.ofNullable(exceptionPredicate)); }
@Test public void buildComplexRecordExceptionsPredicateWithoutClasses() { Predicate<Throwable> exceptionPredicate = t -> t instanceof IOException || t instanceof RuntimeException; Predicate<Throwable> predicate = PredicateCreator .createExceptionsPredicate(exceptionPredicate) .orElseThrow(); then(predicate.test(new RuntimeException())).isTrue(); then(predicate.test(new IllegalArgumentException())).isTrue(); then(predicate.test(new Throwable())).isFalse(); then(predicate.test(new Exception())).isFalse(); then(predicate.test(new IOException())).isTrue(); }
public String getEndpointsInfo() { return loggingListener.getEndpointsInfo(); }
@Test void logsNoEndpointsWhenNoResourcesAreRegistered() { runJersey(); assertThat(rc.getEndpointsInfo()).contains(" NONE"); }
public RebalanceProtocol rebalanceProtocol() { final String upgradeFrom = streamsConfig.getString(StreamsConfig.UPGRADE_FROM_CONFIG); if (upgradeFrom != null) { switch (UpgradeFromValues.fromString(upgradeFrom)) { case UPGRADE_FROM_0100: case UPGRADE_FROM_0101: case UPGRADE_FROM_0102: case UPGRADE_FROM_0110: case UPGRADE_FROM_10: case UPGRADE_FROM_11: case UPGRADE_FROM_20: case UPGRADE_FROM_21: case UPGRADE_FROM_22: case UPGRADE_FROM_23: // ATTENTION: The following log messages is used for verification in system test // streams/streams_cooperative_rebalance_upgrade_test.py::StreamsCooperativeRebalanceUpgradeTest.test_upgrade_to_cooperative_rebalance // If you change it, please do also change the system test accordingly and // verify whether the test passes. log.info("Eager rebalancing protocol is enabled now for upgrade from {}.x", upgradeFrom); log.warn("The eager rebalancing protocol is deprecated and will stop being supported in a future release." + " Please be prepared to remove the 'upgrade.from' config soon."); return RebalanceProtocol.EAGER; case UPGRADE_FROM_24: case UPGRADE_FROM_25: case UPGRADE_FROM_26: case UPGRADE_FROM_27: case UPGRADE_FROM_28: case UPGRADE_FROM_30: case UPGRADE_FROM_31: case UPGRADE_FROM_32: case UPGRADE_FROM_33: case UPGRADE_FROM_34: case UPGRADE_FROM_35: case UPGRADE_FROM_36: case UPGRADE_FROM_37: case UPGRADE_FROM_38: // we need to add new version when new "upgrade.from" values become available // This config is for explicitly sending FK response to a requested partition // and should not affect the rebalance protocol break; default: throw new IllegalArgumentException("Unknown configuration value for parameter 'upgrade.from': " + upgradeFrom); } } // ATTENTION: The following log messages is used for verification in system test // streams/streams_cooperative_rebalance_upgrade_test.py::StreamsCooperativeRebalanceUpgradeTest.test_upgrade_to_cooperative_rebalance // If you change it, please do also change the system test accordingly and // verify whether the test passes. log.info("Cooperative rebalancing protocol is enabled now"); return RebalanceProtocol.COOPERATIVE; }
@Test public void rebalanceProtocolShouldSupportAllUpgradeFromVersions() { for (final UpgradeFromValues upgradeFrom : UpgradeFromValues.values()) { config.put(StreamsConfig.UPGRADE_FROM_CONFIG, upgradeFrom.toString()); final AssignorConfiguration assignorConfiguration = new AssignorConfiguration(config); try { assignorConfiguration.rebalanceProtocol(); } catch (final Exception error) { throw new AssertionError("Upgrade from " + upgradeFrom + " failed with " + error.getMessage() + "!"); } } }
static int decodeULE128(ByteBuf in, int result) throws Http2Exception { final int readerIndex = in.readerIndex(); final long v = decodeULE128(in, (long) result); if (v > Integer.MAX_VALUE) { // the maximum value that can be represented by a signed 32 bit number is: // [0x1,0x7f] + 0x7f + (0x7f << 7) + (0x7f << 14) + (0x7f << 21) + (0x6 << 28) // OR // 0x0 + 0x7f + (0x7f << 7) + (0x7f << 14) + (0x7f << 21) + (0x7 << 28) // we should reset the readerIndex if we overflowed the int type. in.readerIndex(readerIndex); throw DECODE_ULE_128_TO_INT_DECOMPRESSION_EXCEPTION; } return (int) v; }
@Test public void testDecodeULE128IntMax() throws Http2Exception { byte[] input = {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x07}; ByteBuf in = Unpooled.wrappedBuffer(input); try { assertEquals(MAX_VALUE, decodeULE128(in, 0)); } finally { in.release(); } }
boolean openNextFile() { try { if ( meta.getFileInFields() ) { data.readrow = getRow(); // Grab another row ... if ( data.readrow == null ) { // finished processing! if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FinishedProcessing" ) ); } return false; } if ( first ) { first = false; data.inputRowMeta = getInputRowMeta(); data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // Create convert meta-data objects that will contain Date & Number formatters // All non binary content is handled as a String. It would be converted to the target type after the processing. data.convertRowMeta = data.outputRowMeta.cloneToType( ValueMetaInterface.TYPE_STRING ); if ( meta.getFileInFields() ) { // Check is filename field is provided if ( Utils.isEmpty( meta.getDynamicFilenameField() ) ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Log.NoField" ) ); throw new KettleException( BaseMessages.getString( PKG, "LoadFileInput.Log.NoField" ) ); } // cache the position of the field if ( data.indexOfFilenameField < 0 ) { data.indexOfFilenameField = data.inputRowMeta.indexOfValue( meta.getDynamicFilenameField() ); if ( data.indexOfFilenameField < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "LoadFileInput.Log.ErrorFindingField" ) + "[" + meta.getDynamicFilenameField() + "]" ); throw new KettleException( BaseMessages.getString( PKG, "LoadFileInput.Exception.CouldnotFindField", meta.getDynamicFilenameField() ) ); } } // Get the number of previous fields data.totalpreviousfields = data.inputRowMeta.size(); } } // end if first // get field value String Fieldvalue = data.inputRowMeta.getString( data.readrow, data.indexOfFilenameField ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.Stream", meta.getDynamicFilenameField(), Fieldvalue ) ); } try { // Source is a file. data.file = KettleVFS.getFileObject( Fieldvalue ); } catch ( Exception e ) { throw new KettleException( e ); } } else { if ( data.filenr >= data.files.nrOfFiles() ) { // finished processing! if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FinishedProcessing" ) ); } return false; } // Is this the last file? data.last_file = ( data.filenr == data.files.nrOfFiles() - 1 ); data.file = data.files.getFile( data.filenr ); } // Check if file exists if ( meta.isIgnoreMissingPath() && !data.file.exists() ) { logBasic( BaseMessages.getString( PKG, "LoadFileInput.Error.FileNotExists", "" + data.file.getName() ) ); return openNextFile(); } // Check if file is empty data.fileSize = data.file.getContent().getSize(); // Move file pointer ahead! data.filenr++; if ( meta.isIgnoreEmptyFile() && data.fileSize == 0 ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Error.FileSizeZero", "" + data.file.getName() ) ); return openNextFile(); } else { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.OpeningFile", data.file.toString() ) ); } data.filename = KettleVFS.getFilename( data.file ); // Add additional fields? if ( meta.getShortFileNameField() != null && meta.getShortFileNameField().length() > 0 ) { data.shortFilename = data.file.getName().getBaseName(); } if ( meta.getPathField() != null && meta.getPathField().length() > 0 ) { data.path = KettleVFS.getFilename( data.file.getParent() ); } if ( meta.isHiddenField() != null && meta.isHiddenField().length() > 0 ) { data.hidden = data.file.isHidden(); } if ( meta.getExtensionField() != null && meta.getExtensionField().length() > 0 ) { data.extension = data.file.getName().getExtension(); } if ( meta.getLastModificationDateField() != null && meta.getLastModificationDateField().length() > 0 ) { data.lastModificationDateTime = new Date( data.file.getContent().getLastModifiedTime() ); } if ( meta.getUriField() != null && meta.getUriField().length() > 0 ) { data.uriName = Const.optionallyDecodeUriString( data.file.getName().getURI() ); } if ( meta.getRootUriField() != null && meta.getRootUriField().length() > 0 ) { data.rootUriName = data.file.getName().getRootURI(); } // get File content getFileContent(); addFileToResultFilesName( data.file ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FileOpened", data.file.toString() ) ); } } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Log.UnableToOpenFile", "" + data.filenr, data.file .toString(), e.toString() ) ); stopAll(); setErrors( 1 ); return false; } return true; }
@Test public void testOpenNextFile_0() { assertFalse( stepMetaInterface.isIgnoreEmptyFile() ); // ensure default value stepInputFiles.addFile( getFile( "input0.txt" ) ); assertTrue( stepLoadFileInput.openNextFile() ); assertFalse( stepLoadFileInput.openNextFile() ); }
@Description("current timestamp without time zone") @ScalarFunction("localtimestamp") @SqlType(StandardTypes.TIMESTAMP) public static long localTimestamp(SqlFunctionProperties properties) { if (properties.isLegacyTimestamp()) { return properties.getSessionStartTime(); } ISOChronology localChronology = getChronology(properties.getTimeZoneKey()); return localChronology.getZone().convertUTCToLocal(properties.getSessionStartTime()); }
@Test public void testLocalTimestamp() { Session localSession = Session.builder(session) .setStartTime(new DateTime(2017, 3, 1, 14, 30, 0, 0, DATE_TIME_ZONE).getMillis()) .build(); try (FunctionAssertions localAssertion = new FunctionAssertions(localSession)) { localAssertion.assertFunctionString("LOCALTIMESTAMP", TimestampType.TIMESTAMP, "2017-03-01 14:30:00.000"); } }
public int run(final String[] args) throws Exception { if (!localTarget.isAutoFailoverEnabled()) { LOG.error("Automatic failover is not enabled for " + localTarget + "." + " Please ensure that automatic failover is enabled in the " + "configuration before running the ZK failover controller."); return ERR_CODE_AUTO_FAILOVER_NOT_ENABLED; } loginAsFCUser(); try { return SecurityUtil.doAsLoginUserOrFatal(new PrivilegedAction<Integer>() { @Override public Integer run() { try { return doRun(args); } catch (Exception t) { throw new RuntimeException(t); } finally { if (elector != null) { elector.terminateConnection(); } } } }); } catch (RuntimeException rte) { throw (Exception)rte.getCause(); } }
@Test public void testFormatOneClusterLeavesOtherClustersAlone() throws Exception { DummyHAService svc = cluster.getService(1); DummyZKFC zkfcInOtherCluster = new DummyZKFC(conf, cluster.getService(1)) { @Override protected String getScopeInsideParentNode() { return "other-scope"; } }; // Run without formatting the base dir, // should barf assertEquals(ZKFailoverController.ERR_CODE_NO_PARENT_ZNODE, runFC(svc)); // Format the base dir, should succeed assertEquals(0, runFC(svc, "-formatZK")); // Run the other cluster without formatting, should barf because // it uses a different parent znode assertEquals(ZKFailoverController.ERR_CODE_NO_PARENT_ZNODE, zkfcInOtherCluster.run(new String[]{})); // Should succeed in formatting the second cluster assertEquals(0, zkfcInOtherCluster.run(new String[]{"-formatZK"})); // But should not have deleted the original base node from the first // cluster assertEquals(ZKFailoverController.ERR_CODE_FORMAT_DENIED, runFC(svc, "-formatZK", "-nonInteractive")); }
public String getMethod() { return this.method; }
@Test public void testGetMethod() { Assert.assertEquals("Signature", authorizationHeader.getMethod()); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String mysqlDataType = typeDefine.getDataType().toUpperCase(); if (mysqlDataType.endsWith("ZEROFILL")) { mysqlDataType = mysqlDataType.substring(0, mysqlDataType.length() - "ZEROFILL".length()).trim(); } if (typeDefine.isUnsigned() && !(mysqlDataType.endsWith(" UNSIGNED"))) { mysqlDataType = mysqlDataType + " UNSIGNED"; } switch (mysqlDataType) { case MYSQL_NULL: builder.dataType(BasicType.VOID_TYPE); break; case MYSQL_BIT: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.dataType(BasicType.BOOLEAN_TYPE); } else if (typeDefine.getLength() == 1) { builder.dataType(BasicType.BOOLEAN_TYPE); } else { builder.dataType(PrimitiveByteArrayType.INSTANCE); // BIT(M) -> BYTE(M/8) long byteLength = typeDefine.getLength() / 8; byteLength += typeDefine.getLength() % 8 > 0 ? 1 : 0; builder.columnLength(byteLength); } break; case MYSQL_TINYINT: if (typeDefine.getColumnType().equalsIgnoreCase("tinyint(1)")) { builder.dataType(BasicType.BOOLEAN_TYPE); } else { builder.dataType(BasicType.BYTE_TYPE); } break; case MYSQL_TINYINT_UNSIGNED: case MYSQL_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case MYSQL_SMALLINT_UNSIGNED: case MYSQL_MEDIUMINT: case MYSQL_MEDIUMINT_UNSIGNED: case MYSQL_INT: case MYSQL_INTEGER: case MYSQL_YEAR: builder.dataType(BasicType.INT_TYPE); break; case MYSQL_INT_UNSIGNED: case MYSQL_INTEGER_UNSIGNED: case MYSQL_BIGINT: builder.dataType(BasicType.LONG_TYPE); break; case MYSQL_BIGINT_UNSIGNED: DecimalType intDecimalType = new DecimalType(20, 0); builder.dataType(intDecimalType); builder.columnLength(Long.valueOf(intDecimalType.getPrecision())); builder.scale(intDecimalType.getScale()); break; case MYSQL_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case MYSQL_FLOAT_UNSIGNED: log.warn("{} will probably cause value overflow.", MYSQL_FLOAT_UNSIGNED); builder.dataType(BasicType.FLOAT_TYPE); break; case MYSQL_DOUBLE: builder.dataType(BasicType.DOUBLE_TYPE); break; case MYSQL_DOUBLE_UNSIGNED: log.warn("{} will probably cause value overflow.", MYSQL_DOUBLE_UNSIGNED); builder.dataType(BasicType.DOUBLE_TYPE); break; case MYSQL_DECIMAL: Preconditions.checkArgument(typeDefine.getPrecision() > 0); DecimalType decimalType; if (typeDefine.getPrecision() > DEFAULT_PRECISION) { log.warn("{} will probably cause value overflow.", MYSQL_DECIMAL); decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } else { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale() == null ? 0 : typeDefine.getScale().intValue()); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case MYSQL_DECIMAL_UNSIGNED: Preconditions.checkArgument(typeDefine.getPrecision() > 0); log.warn("{} will probably cause value overflow.", MYSQL_DECIMAL_UNSIGNED); DecimalType decimalUnsignedType = new DecimalType( typeDefine.getPrecision().intValue() + 1, typeDefine.getScale() == null ? 0 : typeDefine.getScale().intValue()); builder.dataType(decimalUnsignedType); builder.columnLength(Long.valueOf(decimalUnsignedType.getPrecision())); builder.scale(decimalUnsignedType.getScale()); break; case MYSQL_ENUM: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(100L); } else { builder.columnLength(typeDefine.getLength()); } break; case MYSQL_CHAR: case MYSQL_VARCHAR: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L)); } else { builder.columnLength(typeDefine.getLength()); } builder.dataType(BasicType.STRING_TYPE); break; case MYSQL_TINYTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_8 - 1); break; case MYSQL_TEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_16 - 1); break; case MYSQL_MEDIUMTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_24 - 1); break; case MYSQL_LONGTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_32 - 1); break; case MYSQL_JSON: builder.dataType(BasicType.STRING_TYPE); break; case MYSQL_BINARY: case MYSQL_VARBINARY: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(1L); } else { builder.columnLength(typeDefine.getLength()); } builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case MYSQL_TINYBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_8 - 1); break; case MYSQL_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_16 - 1); break; case MYSQL_MEDIUMBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_24 - 1); break; case MYSQL_LONGBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_32 - 1); break; case MYSQL_GEOMETRY: builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case MYSQL_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case MYSQL_TIME: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case MYSQL_DATETIME: case MYSQL_TIMESTAMP: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.MYSQL, mysqlDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertBit() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("bit(1)") .dataType("bit") .length(1L) .build(); Column column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.BOOLEAN_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("bit(9)") .dataType("bit") .length(9L) .build(); column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals(2, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
@Override public boolean match(Message msg, StreamRule rule) { Object rawField = msg.getField(rule.getField()); if (rawField == null) { return rule.getInverted(); } if (rawField instanceof String) { String field = (String) rawField; Boolean result = rule.getInverted() ^ !(field.trim().isEmpty()); return result; } return !rule.getInverted(); }
@Test public void testInvertedBasicMatch() throws Exception { StreamRule rule = getSampleRule(); rule.setField("message"); rule.setType(StreamRuleType.PRESENCE); rule.setInverted(true); Message message = getSampleMessage(); StreamRuleMatcher matcher = getMatcher(rule); Boolean result = matcher.match(message, rule); assertFalse(result); }
@VisibleForTesting public List<ProjectionContext> planRemoteAssignments(Assignments assignments, VariableAllocator variableAllocator) { ImmutableList.Builder<List<ProjectionContext>> assignmentProjections = ImmutableList.builder(); for (Map.Entry<VariableReferenceExpression, RowExpression> entry : assignments.getMap().entrySet()) { List<ProjectionContext> rewritten = entry.getValue().accept(new Visitor(functionAndTypeManager, variableAllocator), null); if (rewritten.isEmpty()) { assignmentProjections.add(ImmutableList.of(new ProjectionContext(ImmutableMap.of(entry.getKey(), entry.getValue()), false))); } else { checkState(rewritten.get(rewritten.size() - 1).getProjections().size() == 1, "Expect at most 1 assignment from last projection in rewrite"); ProjectionContext last = rewritten.get(rewritten.size() - 1); ImmutableList.Builder<ProjectionContext> projectionContextBuilder = ImmutableList.builder(); projectionContextBuilder.addAll(rewritten.subList(0, rewritten.size() - 1)); projectionContextBuilder.add(new ProjectionContext(ImmutableMap.of(entry.getKey(), getOnlyElement(last.getProjections().values())), last.isRemote())); assignmentProjections.add(projectionContextBuilder.build()); } } List<ProjectionContext> mergedProjectionContexts = mergeProjectionContexts(assignmentProjections.build()); return dedupVariables(mergedProjectionContexts); }
@Test void testRemoteOnly() { PlanBuilder planBuilder = new PlanBuilder(TEST_SESSION, new PlanNodeIdAllocator(), getMetadata()); PlanRemoteProjections rule = new PlanRemoteProjections(getFunctionAndTypeManager()); List<ProjectionContext> rewritten = rule.planRemoteAssignments(Assignments.builder() .put(planBuilder.variable("a"), planBuilder.rowExpression("unittest.memory.remote_foo()")) .put(planBuilder.variable("b"), planBuilder.rowExpression("unittest.memory.remote_foo(unittest.memory.remote_foo())")) .build(), new VariableAllocator(planBuilder.getTypes().allVariables())); assertEquals(rewritten.size(), 2); assertEquals(rewritten.get(1).getProjections().size(), 2); }
@Override public final ChannelPipeline remove(ChannelHandler handler) { remove(getContextOrDie(handler)); return this; }
@Test @Timeout(value = 10000, unit = TimeUnit.MILLISECONDS) public void testRemoveAndForwardOutbound() throws Exception { final BufferedTestHandler handler1 = new BufferedTestHandler(); final BufferedTestHandler handler2 = new BufferedTestHandler(); setUp(handler1, handler2); self.eventLoop().submit(new Runnable() { @Override public void run() { ChannelPipeline p = self.pipeline(); handler2.outboundBuffer.add(8); assertEquals(8, handler2.outboundBuffer.peek()); assertTrue(handler1.outboundBuffer.isEmpty()); p.remove(handler2); assertEquals(1, handler1.outboundBuffer.size()); assertEquals(8, handler1.outboundBuffer.peek()); } }).sync(); }
public void clearPendingTasks() { final long stamp = this.stampedLock.writeLock(); try { this.pendingMetaQueue.clear(); this.pendingIndex = 0; this.closureQueue.clear(); } finally { this.stampedLock.unlockWrite(stamp); } }
@Test public void testClearPendingTasks() { testAppendPendingTask(); this.box.clearPendingTasks(); assertTrue(this.box.getPendingMetaQueue().isEmpty()); assertTrue(this.closureQueue.getQueue().isEmpty()); assertEquals(0, closureQueue.getFirstIndex()); }
public static List<String> listMatchedFilesWithRecursiveOption(PinotFS pinotFs, URI fileUri, @Nullable String includePattern, @Nullable String excludePattern, boolean searchRecursively) throws Exception { String[] files; // listFiles throws IOException files = pinotFs.listFiles(fileUri, searchRecursively); //TODO: sort input files based on creation time PathMatcher includeFilePathMatcher = null; if (includePattern != null) { includeFilePathMatcher = FileSystems.getDefault().getPathMatcher(includePattern); } PathMatcher excludeFilePathMatcher = null; if (excludePattern != null) { excludeFilePathMatcher = FileSystems.getDefault().getPathMatcher(excludePattern); } List<String> filteredFiles = new ArrayList<>(); for (String file : files) { if (includeFilePathMatcher != null) { if (!includeFilePathMatcher.matches(Paths.get(file))) { continue; } } if (excludeFilePathMatcher != null) { if (excludeFilePathMatcher.matches(Paths.get(file))) { continue; } } if (!pinotFs.isDirectory(new URI(sanitizeURIString(file)))) { // In case PinotFS implementations list files without a scheme (e.g. hdfs://), then we may lose it in the // input file path. Call SegmentGenerationUtils.getFileURI() to fix this up. // getFileURI throws URISyntaxException filteredFiles.add(SegmentGenerationUtils.getFileURI(file, fileUri).toString()); } } if (filteredFiles.isEmpty()) { throw new RuntimeException(String.format( "No file found in the input directory: %s matching includeFileNamePattern: %s," + " excludeFileNamePattern: %s", fileUri, includePattern, excludePattern)); } return filteredFiles; }
@Test public void testMatchFilesRecursiveSearchOnRecursiveInputFilePattern() throws Exception { File testDir = makeTestDir(); File inputDir = new File(testDir, "input"); File inputSubDir1 = new File(inputDir, "2009"); inputSubDir1.mkdirs(); File inputFile1 = new File(inputDir, "input.csv"); FileUtils.writeLines(inputFile1, Lists.newArrayList("col1,col2", "value1,1", "value2,2")); File inputFile2 = new File(inputSubDir1, "input.csv"); FileUtils.writeLines(inputFile2, Lists.newArrayList("col1,col2", "value3,3", "value4,4")); URI inputDirURI = new URI(inputDir.getAbsolutePath()); if (inputDirURI.getScheme() == null) { inputDirURI = new File(inputDir.getAbsolutePath()).toURI(); } PinotFS inputDirFS = PinotFSFactory.create(inputDirURI.getScheme()); String includePattern = "glob:" + inputDir.getAbsolutePath() + "/**.csv"; List<String> files = SegmentGenerationUtils.listMatchedFilesWithRecursiveOption(inputDirFS, inputDirURI, includePattern, null, true); Assert.assertEquals(files.size(), 2); }
public static ByteArrayCoder of() { return INSTANCE; }
@Test public void testRegisterByteSizeObserver() throws Exception { CoderProperties.testByteCount( ByteArrayCoder.of(), Coder.Context.OUTER, new byte[][] {{0xa, 0xb, 0xc}}); CoderProperties.testByteCount( ByteArrayCoder.of(), Coder.Context.NESTED, new byte[][] {{0xa, 0xb, 0xc}, {}, {}, {0xd, 0xe}, {}}); }
@Override public void add(Event event) { events.add(requireNonNull(event)); }
@Test public void add_throws_NPE_if_even_arg_is_null() { assertThatThrownBy(() -> underTest.add(null)) .isInstanceOf(NullPointerException.class); }
@Override @TpsControl(pointName = "RemoteNamingInstanceBatchRegister", name = "RemoteNamingInstanceBatchRegister") @Secured(action = ActionTypes.WRITE) @ExtractorManager.Extractor(rpcExtractor = BatchInstanceRequestParamExtractor.class) public BatchInstanceResponse handle(BatchInstanceRequest request, RequestMeta meta) throws NacosException { Service service = Service.newService(request.getNamespace(), request.getGroupName(), request.getServiceName(), true); InstanceUtil.batchSetInstanceIdIfEmpty(request.getInstances(), service.getGroupedServiceName()); switch (request.getType()) { case NamingRemoteConstants.BATCH_REGISTER_INSTANCE: return batchRegisterInstance(service, request, meta); default: throw new NacosException(NacosException.INVALID_PARAM, String.format("Unsupported request type %s", request.getType())); } }
@Test void testHandle() throws NacosException { BatchInstanceRequest batchInstanceRequest = new BatchInstanceRequest(); batchInstanceRequest.setType(NamingRemoteConstants.BATCH_REGISTER_INSTANCE); batchInstanceRequest.setServiceName("service1"); batchInstanceRequest.setGroupName("group1"); List<Instance> instanceList = new ArrayList<>(); Instance instance = new Instance(); instanceList.add(instance); batchInstanceRequest.setInstances(instanceList); RequestMeta requestMeta = new RequestMeta(); batchInstanceRequestHandler.handle(batchInstanceRequest, requestMeta); Mockito.verify(clientOperationService).batchRegisterInstance(Mockito.any(), Mockito.any(), Mockito.anyString()); batchInstanceRequest.setType("google"); try { batchInstanceRequestHandler.handle(batchInstanceRequest, requestMeta); } catch (Exception e) { assertEquals(NacosException.INVALID_PARAM, ((NacosException) e).getErrCode()); } }
@Override public String format(final Schema schema) { final String converted = SchemaWalker.visit(schema, new Converter()) + typePostFix(schema); return options.contains(Option.AS_COLUMN_LIST) ? stripTopLevelStruct(converted) : converted; }
@Test public void shouldFormatArray() { // Given: final Schema schema = SchemaBuilder .array(Schema.FLOAT64_SCHEMA) .build(); // Then: assertThat(DEFAULT.format(schema), is("ARRAY<DOUBLE>")); assertThat(STRICT.format(schema), is("ARRAY<DOUBLE NOT NULL> NOT NULL")); }
@Override public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) { final V result = super.computeIfAbsent(key, mappingFunction); resetInverseMap(); return result; }
@Test public void computeIfAbsentTest(){ final BiMap<String, Integer> biMap = new BiMap<>(new HashMap<>()); biMap.put("aaa", 111); biMap.put("bbb", 222); biMap.computeIfAbsent("ccc", s -> 333); assertEquals(new Integer(333), biMap.get("ccc")); assertEquals("ccc", biMap.getKey(333)); }
@Override public String getUUID() throws LocalAccessDeniedException { try { final NetworkInterface in = NetworkInterface.getByInetAddress(InetAddress.getLocalHost()); if(null == in) { return this.enumerate(); } final byte[] address = in.getHardwareAddress(); if(null == address) { return this.enumerate(); } return this.toHex(address); } catch(UnknownHostException | SocketException e) { throw new LocalAccessDeniedException(e.getMessage(), e); } }
@Test public void getUUID() throws Exception { assertNotNull(new MacUniqueIdService().getUUID()); }
static Schema getSchema(Class<? extends Message> clazz) { return getSchema(ProtobufUtil.getDescriptorForClass(clazz)); }
@Test public void testReversedOneOfSchema() { assertEquals( TestProtoSchemas.REVERSED_ONEOF_SCHEMA, ProtoSchemaTranslator.getSchema(Proto3SchemaMessages.ReversedOneOf.class)); }
public static void addSortedParams(UriBuilder uriBuilder, DataMap params, ProtocolVersion version) { if(version.compareTo(AllProtocolVersions.RESTLI_PROTOCOL_2_0_0.getProtocolVersion()) >= 0) { addSortedParams(uriBuilder, params); } else { QueryParamsDataMap.addSortedParams(uriBuilder, params); } }
@Test public void addSortedParams() { DataMap queryParams = new DataMap(); DataMap aParamMap = new DataMap(); aParamMap.put("someField", "someValue"); aParamMap.put("foo", "bar"); aParamMap.put("empty", new DataMap()); DataList bParamList = new DataList(); bParamList.add("x"); bParamList.add("y"); bParamList.add("z"); queryParams.put("aParam", aParamMap); queryParams.put("bParam", bParamList); UriBuilder uriBuilder = new UriBuilder(); URIParamUtils.addSortedParams(uriBuilder, queryParams); String query = uriBuilder.build().getQuery(); Assert.assertEquals(query, "aParam=(empty:(),foo:bar,someField:someValue)&bParam=List(x,y,z)"); }
@Override public ObjectNode encode(MaintenanceAssociation ma, CodecContext context) { checkNotNull(ma, "Maintenance Association cannot be null"); ObjectNode result = context.mapper().createObjectNode() .put(MA_NAME, ma.maId().toString()) .put(MA_NAME_TYPE, ma.maId().nameType().name()); if (ma.maNumericId() > 0) { result = result.put(MA_NUMERIC_ID, ma.maNumericId()); } if (ma.ccmInterval() != null) { result = result.put(CCM_INTERVAL, ma.ccmInterval().name()); } result.set(COMPONENT_LIST, new ComponentCodec().encode(ma.componentList(), context)); result.set(RMEP_LIST, new RMepCodec().encode(ma.remoteMepIdList(), context)); return result; }
@Test public void testEncodeMa5() throws CfmConfigException { MaintenanceAssociation ma1 = DefaultMaintenanceAssociation.builder(MAID5_Y1731, 10) .maNumericId((short) 5) .build(); ObjectNode node = mapper.createObjectNode(); node.set("ma", context.codec(MaintenanceAssociation.class).encode(ma1, context)); assertEquals("{\"ma\":{" + "\"maName\":\"abc:defghij\"," + "\"maNameType\":\"ICCY1731\"," + "\"maNumericId\":5," + "\"component-list\":[]," + "\"rmep-list\":[]}}", node.toString()); }
@Override public boolean test(Pickle pickle) { String name = pickle.getName(); return patterns.stream().anyMatch(pattern -> pattern.matcher(name).find()); }
@Test void non_anchored_name_pattern_matches_part_of_name() { Pickle pickle = createPickleWithName("a pickle name with suffix"); NamePredicate predicate = new NamePredicate(singletonList(Pattern.compile("a pickle name"))); assertTrue(predicate.test(pickle)); }
@Override public boolean isAllowedTaskMovement(final ClientState source, final ClientState destination) { final Map<String, String> sourceClientTags = clientTagFunction.apply(source.processId(), source); final Map<String, String> destinationClientTags = clientTagFunction.apply(destination.processId(), destination); for (final Entry<String, String> sourceClientTagEntry : sourceClientTags.entrySet()) { if (!sourceClientTagEntry.getValue().equals(destinationClientTags.get(sourceClientTagEntry.getKey()))) { return false; } } return true; }
@Test public void shouldDeclineTaskMovementWhenClientTagsDoNotMatch() { final ClientState source = createClientStateWithCapacity(PID_1, 1, mkMap(mkEntry(ZONE_TAG, ZONE_1), mkEntry(CLUSTER_TAG, CLUSTER_1))); final ClientState destination = createClientStateWithCapacity(PID_2, 1, mkMap(mkEntry(ZONE_TAG, ZONE_2), mkEntry(CLUSTER_TAG, CLUSTER_1))); assertFalse(standbyTaskAssignor.isAllowedTaskMovement(source, destination)); }
public static <T> Point<T> interpolate(Point<T> p1, Point<T> p2, Instant targetTime) { checkNotNull(p1, "Cannot perform interpolation when the first input points is null"); checkNotNull(p2, "Cannot perform interpolation when the second input points is null"); checkNotNull(targetTime, "Cannot perform interpolation when the targetTime is null"); checkArgument( p1.time().isBefore(p2.time()) || p1.time().equals(p2.time()), "The input points must be in chronological order" ); TimeWindow window = TimeWindow.of(p1.time(), p2.time()); checkArgument( window.contains(targetTime), "The targetTime is outside the required time window" ); if (p1.time().equals(targetTime)) { return (new PointBuilder<T>(p1)).build(); } else if (p2.time().equals(targetTime)) { return (new PointBuilder<T>(p2)).build(); } else { double fraction = window.toFractionOfRange(targetTime); //build an interpolated point LatLong interpolatedLatLong = interpolateLatLong(p1.latLong(), p2.latLong(), fraction); Double interpolatedCourseInDegrees = interpolateCourse( isNull(p1.course()) ? null : p1.course().inDegrees(), isNull(p2.course()) ? null : p2.course().inDegrees(), fraction ); //correct the interpolated course when one of the input values was null if (interpolatedCourseInDegrees == null) { interpolatedCourseInDegrees = Spherical.courseInDegrees(p1.latLong(), p2.latLong()); } double interpolatedSpeed = interpolateSpeed(p1, p2, fraction); Distance interpolatedAltitude = interpolate( p1.altitude(), p2.altitude(), fraction ); //return a copy of the 1st input point but with corrected trajectory data return (new PointBuilder<T>(p1)) .latLong(interpolatedLatLong) .course(Course.ofDegrees(interpolatedCourseInDegrees)) .speed(Speed.ofKnots(interpolatedSpeed)) .altitude(interpolatedAltitude) .time(targetTime) .build(); } }
@Test public void testInterpolatePoint2() { /* * Test the interpolation works properly at the "start" of the timewindow */ Point<String> p1 = (new PointBuilder<String>()) .time(Instant.EPOCH) .altitude(Distance.ofFeet(1000.0)) .courseInDegrees(120.0) .latLong(new LatLong(0.0, 10.0)) .speedInKnots(200.0) .build(); Point<String> p2 = (new PointBuilder<String>()) .time(Instant.EPOCH.plusSeconds(8)) .altitude(Distance.ofFeet(500.0)) .courseInDegrees(130.0) .latLong(new LatLong(5.0, 15.0)) .speedInKnots(300.0) .build(); Point<String> testPoint = interpolate(p1, p2, Instant.EPOCH); double TOLERANCE = 0.0001; assertEquals( Instant.EPOCH, testPoint.time() ); assertEquals( 1000.0, testPoint.altitude().inFeet(), TOLERANCE ); assertEquals( 120.0, testPoint.course().inDegrees(), TOLERANCE ); assertEquals(LatLong.of(0.0, 10.0), testPoint.latLong()); assertEquals( 200.0, testPoint.speed().inKnots(), TOLERANCE ); }
@Override public Object clone() { StepMeta stepMeta = new StepMeta(); stepMeta.replaceMeta( this ); stepMeta.setObjectId( null ); return stepMeta; }
@Test public void cloning() throws Exception { StepMeta meta = createTestMeta(); StepMeta clone = (StepMeta) meta.clone(); assertEquals( meta, clone ); }
public static Map<String, Class<?>> compile(Map<String, String> classNameSourceMap, ClassLoader classLoader) { return compile(classNameSourceMap, classLoader, null); }
@Test public void doNotFailOnWarning() throws Exception { Map<String, String> source = singletonMap("org.kie.memorycompiler.WarningClass", WARNING_CLASS); Map<String, Class<?>> compiled = KieMemoryCompiler.compile(source, this.getClass().getClassLoader()); Class<?> exampleClazz = compiled.get("org.kie.memorycompiler.WarningClass"); assertThat(exampleClazz).isNotNull(); Object instance = exampleClazz.getDeclaredConstructors()[0].newInstance(); Method minusMethod = exampleClazz.getMethod("minus", Integer.class, Integer.class); Object result = minusMethod.invoke(instance, 8, 4); assertThat(result).isEqualTo(4); }
public ConfigTransformerResult transform(Map<String, String> configs) { Map<String, Map<String, Set<String>>> keysByProvider = new HashMap<>(); Map<String, Map<String, Map<String, String>>> lookupsByProvider = new HashMap<>(); // Collect the variables from the given configs that need transformation for (Map.Entry<String, String> config : configs.entrySet()) { if (config.getValue() != null) { List<ConfigVariable> configVars = getVars(config.getValue(), DEFAULT_PATTERN); for (ConfigVariable configVar : configVars) { Map<String, Set<String>> keysByPath = keysByProvider.computeIfAbsent(configVar.providerName, k -> new HashMap<>()); Set<String> keys = keysByPath.computeIfAbsent(configVar.path, k -> new HashSet<>()); keys.add(configVar.variable); } } } // Retrieve requested variables from the ConfigProviders Map<String, Long> ttls = new HashMap<>(); for (Map.Entry<String, Map<String, Set<String>>> entry : keysByProvider.entrySet()) { String providerName = entry.getKey(); ConfigProvider provider = configProviders.get(providerName); Map<String, Set<String>> keysByPath = entry.getValue(); if (provider != null && keysByPath != null) { for (Map.Entry<String, Set<String>> pathWithKeys : keysByPath.entrySet()) { String path = pathWithKeys.getKey(); Set<String> keys = new HashSet<>(pathWithKeys.getValue()); ConfigData configData = provider.get(path, keys); Map<String, String> data = configData.data(); Long ttl = configData.ttl(); if (ttl != null && ttl >= 0) { ttls.put(path, ttl); } Map<String, Map<String, String>> keyValuesByPath = lookupsByProvider.computeIfAbsent(providerName, k -> new HashMap<>()); keyValuesByPath.put(path, data); } } } // Perform the transformations by performing variable replacements Map<String, String> data = new HashMap<>(configs); for (Map.Entry<String, String> config : configs.entrySet()) { data.put(config.getKey(), replace(lookupsByProvider, config.getValue(), DEFAULT_PATTERN)); } return new ConfigTransformerResult(data, ttls); }
@Test public void testReplaceVariable() { ConfigTransformerResult result = configTransformer.transform(Collections.singletonMap(MY_KEY, "${test:testPath:testKey}")); Map<String, String> data = result.data(); Map<String, Long> ttls = result.ttls(); assertEquals(TEST_RESULT, data.get(MY_KEY)); assertTrue(ttls.isEmpty()); }
public ImmutableList<Process> runServerProcesses() { logger.atInfo().log("Starting language server processes (if any)..."); return commands.stream() // Filter out commands that don't need server start up .filter(command -> !Strings.isNullOrEmpty(command.serverCommand())) .map( command -> runProcess( CommandExecutorFactory.create( command.serverCommand(), getCommand("--port=", command.port()), getCommand("--log_id=", command.logId()), getCommand("--log_output=", command.outputDir()), "--trust_all_ssl_cert=" + command.trustAllSslCert(), getCommand("--timeout_seconds=", command.timeoutSeconds().getSeconds()), getCommand("--callback_address=", command.callbackAddress()), getCommand("--callback_port=", command.callbackPort()), getCommand("--polling_uri=", command.pollingUri())))) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); }
@Test public void runServerProcess_whenServerAddressExistsAndNormalPort_returnsEmptyProcessList() { ImmutableList<LanguageServerCommand> commands = ImmutableList.of( LanguageServerCommand.create( "", "127.0.0.1", "34567", "34", "/output-here", false, Duration.ofSeconds(10), "157.34.0.2", 8080, "157.34.0.2:8881", 0)); RemoteServerLoader loader = Guice.createInjector(new RemoteServerLoaderModule(commands)) .getInstance(RemoteServerLoader.class); var processList = loader.runServerProcesses(); assertThat(processList).isEmpty(); }
@VisibleForTesting public static JobGraph createJobGraph(StreamGraph streamGraph) { return new StreamingJobGraphGenerator( Thread.currentThread().getContextClassLoader(), streamGraph, null, Runnable::run) .createJobGraph(); }
@Deprecated @Test void testSinkSupportConcurrentExecutionAttemptsWithDeprecatedSink() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(new Configuration()); env.setRuntimeMode(RuntimeExecutionMode.BATCH); final DataStream<Integer> source = env.fromData(1, 2, 3).name("source"); source.rebalance() .sinkTo(new TestSinkWithSupportsConcurrentExecutionAttemptsDeprecated()) .name("sink"); final StreamGraph streamGraph = env.getStreamGraph(); final JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph); assertThat(jobGraph.getNumberOfVertices()).isEqualTo(6); for (JobVertex jobVertex : jobGraph.getVertices()) { if (jobVertex.getName().contains("source")) { assertThat(jobVertex.isSupportsConcurrentExecutionAttempts()).isTrue(); } else if (jobVertex.getName().contains("pre-writer")) { assertThat(jobVertex.isSupportsConcurrentExecutionAttempts()).isTrue(); } else if (jobVertex.getName().contains("Writer")) { assertThat(jobVertex.isSupportsConcurrentExecutionAttempts()).isTrue(); } else if (jobVertex.getName().contains("pre-committer")) { assertThat(jobVertex.isSupportsConcurrentExecutionAttempts()).isFalse(); } else if (jobVertex.getName().contains("post-committer")) { assertThat(jobVertex.isSupportsConcurrentExecutionAttempts()).isFalse(); } else if (jobVertex.getName().contains("Committer")) { assertThat(jobVertex.isSupportsConcurrentExecutionAttempts()).isFalse(); } else { Assertions.fail("Unexpected job vertex " + jobVertex.getName()); } } }
public static DataNode parseWithSchema(final String text) { List<String> segments = Splitter.on(".").splitToList(text); boolean hasSchema = 3 == segments.size(); if (!(2 == segments.size() || hasSchema)) { throw new InvalidDataNodeFormatException(text); } DataNode result = new DataNode(segments.get(0), segments.get(segments.size() - 1)); if (hasSchema) { result.setSchemaName(segments.get(1)); } return result; }
@Test void assertParseWithSchema() { DataNode actual = DataNodeUtils.parseWithSchema("ds_0.public.tbl_0"); assertThat(actual.getDataSourceName(), is("ds_0")); assertThat(actual.getSchemaName(), is("public")); assertThat(actual.getTableName(), is("tbl_0")); }
public static <T> List<LocalProperty<T>> grouped(Collection<T> columns) { return ImmutableList.of(new GroupingProperty<>(columns)); }
@Test public void testPartialConstantGroup() { List<LocalProperty<String>> actual = builder() .constant("a") .grouped("a", "b") .build(); assertMatch( actual, builder().grouped("a", "b", "c").build(), Optional.of(grouped("c"))); assertMatch( actual, builder().grouped("a", "b").build(), Optional.empty()); assertMatch( actual, builder().grouped("a").build(), Optional.empty()); assertMatch( actual, builder().grouped("b").build(), Optional.empty()); }
@ShellMethod(key = {"temp_delete", "temp delete"}, value = "Delete view name") public String delete( @ShellOption(value = {"--view"}, help = "view name") final String tableName) { try { HoodieCLI.getTempViewProvider().deleteTable(tableName); return String.format("Delete view %s successfully!", tableName); } catch (HoodieException ex) { return String.format("Delete view %s failed!", tableName); } }
@Test public void testDelete() { Object result = shell.evaluate(() -> String.format("temp delete --view %s", tableName)); assertTrue(result.toString().endsWith("successfully!")); // after delete, we can not access table yet. assertThrows(HoodieException.class, () -> HoodieCLI.getTempViewProvider().runQuery("select * from " + tableName)); }
public static ByteBuffer sliceByteBuffer(ByteBuffer buffer, int position, int length) { ByteBuffer slicedBuffer = ((ByteBuffer) buffer.duplicate().position(position)).slice(); slicedBuffer.limit(length); return slicedBuffer; }
@Test public void sliceByteBuffer() { final int size = 100; final ByteBuffer buf = BufferUtils.getIncreasingByteBuffer(size); for (int slicePosition : new int[] {0, 1, size / 2, size - 1}) { // Slice a ByteBuffer of length 1 ByteBuffer slicedBuffer = BufferUtils.sliceByteBuffer(buf, slicePosition, 1); assertEquals(0, slicedBuffer.position()); assertEquals(1, slicedBuffer.limit()); assertTrue(BufferUtils.equalIncreasingByteBuffer(slicePosition, 1, slicedBuffer)); // Slice a ByteBuffer from the target position to the end int slicedBufferLength = size - slicePosition; ByteBuffer slicedBuffer1 = BufferUtils.sliceByteBuffer(buf, slicePosition, slicedBufferLength); ByteBuffer slicedBuffer2 = BufferUtils.sliceByteBuffer(buf, slicePosition); assertEquals(0, slicedBuffer1.position()); assertEquals(0, slicedBuffer2.position()); assertEquals(slicedBufferLength, slicedBuffer1.limit()); assertEquals(slicedBufferLength, slicedBuffer2.limit()); assertTrue(BufferUtils.equalIncreasingByteBuffer(slicePosition, slicedBufferLength, slicedBuffer1)); assertTrue(BufferUtils.equalIncreasingByteBuffer(slicePosition, slicedBufferLength, slicedBuffer2)); } }
@Override public Iterator<T> iterator() { return new LinkedSetIterator(); }
@Test public void testMultiBasic() { LOG.info("Test multi element basic"); // add once for (Integer i : list) { assertTrue(set.add(i)); } assertEquals(list.size(), set.size()); // check if the elements are in the set for (Integer i : list) { assertTrue(set.contains(i)); } // add again - should return false each time for (Integer i : list) { assertFalse(set.add(i)); } // check again if the elements are there for (Integer i : list) { assertTrue(set.contains(i)); } Iterator<Integer> iter = set.iterator(); int num = 0; while (iter.hasNext()) { assertEquals(list.get(num++), iter.next()); } // check the number of element from the iterator assertEquals(list.size(), num); LOG.info("Test multi element basic - DONE"); }
public static String toString(final Host bookmark) { return toString(bookmark, false); }
@Test public void testToStringNoDefaultHostname() { final TestProtocol protocol = new TestProtocol(Scheme.file) { @Override public String getName() { return "Disk"; } @Override public String getDefaultHostname() { return ""; } }; assertEquals("Disk", BookmarkNameProvider.toString(new Host(protocol, StringUtils.EMPTY), true)); assertEquals("Disk", BookmarkNameProvider.toString(new Host(protocol, StringUtils.EMPTY), false)); }
@GetMapping( path = "/api/{namespace}/{extension}", produces = MediaType.APPLICATION_JSON_VALUE ) @CrossOrigin @Operation(summary = "Provides metadata of the latest version of an extension") @ApiResponses({ @ApiResponse( responseCode = "200", description = "The extension metadata are returned in JSON format" ), @ApiResponse( responseCode = "404", description = "The specified extension could not be found", content = @Content() ), @ApiResponse( responseCode = "429", description = "A client has sent too many requests in a given amount of time", content = @Content(), headers = { @Header( name = "X-Rate-Limit-Retry-After-Seconds", description = "Number of seconds to wait after receiving a 429 response", schema = @Schema(type = "integer", format = "int32") ), @Header( name = "X-Rate-Limit-Remaining", description = "Remaining number of requests left", schema = @Schema(type = "integer", format = "int32") ) } ) }) public ResponseEntity<ExtensionJson> getExtension( @PathVariable @Parameter(description = "Extension namespace", example = "redhat") String namespace, @PathVariable @Parameter(description = "Extension name", example = "java") String extension ) { for (var registry : getRegistries()) { try { return ResponseEntity.ok() .cacheControl(CacheControl.noCache().cachePublic()) .body(registry.getExtension(namespace, extension, null)); } catch (NotFoundException exc) { // Try the next registry } } var json = ExtensionJson.error("Extension not found: " + NamingUtil.toExtensionId(namespace, extension)); return new ResponseEntity<>(json, HttpStatus.NOT_FOUND); }
@Test public void testInactiveExtension() throws Exception { var extVersion = mockExtension(); extVersion.setActive(false); extVersion.getExtension().setActive(false); mockMvc.perform(get("/api/{namespace}/{extension}", "foo", "bar")) .andExpect(status().isNotFound()) .andExpect(content().json(errorJson("Extension not found: foo.bar"))); }
@EventListener void updateTask(TaskChangeEvent event) { removeTaskFromScheduler(event.getTask().getId()); if (!event.isRemoved() && event.getTask().getActive()) { addTaskToScheduler(event.getTask().getId(), new SimpleTaskRunnable(event.getTask(), clientFactory.getClientForApplication(event.getTask().getApplication())), event.getTask().getCron()); } }
@Test public void updatedTaskIsRemovedAndAddedToScheduleWhenActive() { Task taskA = new Task(); taskA.setId(1l); taskA.setName("old"); taskA.setCron("0 0 * * * *"); Map<Long, ScheduledFuture<?>> jobsMap = new HashMap<>(); ScheduledFuture mockA = mock(ScheduledFuture.class); jobsMap.put(taskA.getId(), mockA); ReflectionTestUtils.setField(service, "jobsMap", jobsMap); when(scheduler.schedule(any(Runnable.class), any(CronTrigger.class))).thenReturn(mock(ScheduledFuture.class)); assertNotNull(((Map<Long, ScheduledFuture<?>>) ReflectionTestUtils.getField(service, "jobsMap")).get(1l)); Task taskAUpdated = new Task(); taskAUpdated.setId(1l); taskAUpdated.setName("new"); taskAUpdated.setCron("0 0 * * * *"); taskAUpdated.setActive(true); service.updateTask(new TaskChangeEvent(this, taskAUpdated, false)); assertEquals(1, ((Map<Long, ScheduledFuture<?>>) ReflectionTestUtils.getField(service, "jobsMap")).size()); assertNotEquals(((Map<Long, ScheduledFuture<?>>) ReflectionTestUtils.getField(service, "jobsMap")).get(1l), mockA); assertNotNull(((Map<Long, ScheduledFuture<?>>) ReflectionTestUtils.getField(service, "jobsMap")).get(1l)); verify(scheduler, times(1)).schedule(any(Runnable.class), any(CronTrigger.class)); }
BrokerInterceptorWithClassLoader load(BrokerInterceptorMetadata metadata, String narExtractionDirectory) throws IOException { final File narFile = metadata.getArchivePath().toAbsolutePath().toFile(); NarClassLoader ncl = NarClassLoaderBuilder.builder() .narFile(narFile) .parentClassLoader(BrokerInterceptorUtils.class.getClassLoader()) .extractionDirectory(narExtractionDirectory) .build(); BrokerInterceptorDefinition def = getBrokerInterceptorDefinition(ncl); if (StringUtils.isBlank(def.getInterceptorClass())) { throw new IOException("Broker interceptors `" + def.getName() + "` does NOT provide a broker" + " interceptors implementation"); } try { Class interceptorClass = ncl.loadClass(def.getInterceptorClass()); Object interceptor = interceptorClass.getDeclaredConstructor().newInstance(); if (!(interceptor instanceof BrokerInterceptor)) { throw new IOException("Class " + def.getInterceptorClass() + " does not implement broker interceptor interface"); } BrokerInterceptor pi = (BrokerInterceptor) interceptor; return new BrokerInterceptorWithClassLoader(pi, ncl); } catch (Throwable t) { rethrowIOException(t); return null; } }
@Test(expectedExceptions = IOException.class) public void testLoadBrokerEventListenerWithBlankListenerClass() throws Exception { BrokerInterceptorDefinition def = new BrokerInterceptorDefinition(); def.setDescription("test-broker-listener"); String archivePath = "/path/to/broker/listener/nar"; BrokerInterceptorMetadata metadata = new BrokerInterceptorMetadata(); metadata.setDefinition(def); metadata.setArchivePath(Paths.get(archivePath)); NarClassLoader mockLoader = mock(NarClassLoader.class); when(mockLoader.getServiceDefinition(eq(BrokerInterceptorUtils.BROKER_INTERCEPTOR_DEFINITION_FILE))) .thenReturn(ObjectMapperFactory.getYamlMapper().writer().writeValueAsString(def)); Class listenerClass = MockBrokerInterceptor.class; when(mockLoader.loadClass(eq(MockBrokerInterceptor.class.getName()))) .thenReturn(listenerClass); final NarClassLoaderBuilder mockedBuilder = mock(NarClassLoaderBuilder.class, RETURNS_SELF); when(mockedBuilder.build()).thenReturn(mockLoader); try (MockedStatic<NarClassLoaderBuilder> builder = Mockito.mockStatic(NarClassLoaderBuilder.class)) { builder.when(() -> NarClassLoaderBuilder.builder()).thenReturn(mockedBuilder); BrokerInterceptorUtils.load(metadata, ""); } }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) { IdentityProvider provider = resolveProviderOrHandleResponse(request, response, INIT_CONTEXT); if (provider != null) { handleProvider(request, response, provider); } }
@Test public void do_filter_on_auth2_identity_provider() { when(request.getRequestURI()).thenReturn("/sessions/init/" + OAUTH2_PROVIDER_KEY); identityProviderRepository.addIdentityProvider(oAuth2IdentityProvider); underTest.doFilter(request, response, chain); assertOAuth2InitCalled(); verifyNoInteractions(authenticationEvent); }
public String generatePrimitiveTypeColumnTask(long tableId, long dbId, String tableName, String dbName, List<ColumnStats> primitiveTypeStats, TabletSampleManager manager) { String prefix = "INSERT INTO " + STATISTICS_DB_NAME + "." + SAMPLE_STATISTICS_TABLE_NAME; StringBuilder builder = new StringBuilder(); builder.append(prefix).append(" "); builder.append("WITH base_cte_table as ("); String queryDataSql = generateQueryDataSql(tableName, dbName, primitiveTypeStats, manager); builder.append(queryDataSql).append(") "); int idx = 0; int size = primitiveTypeStats.size(); for (ColumnStats columnStats : primitiveTypeStats) { idx++; builder.append(generateQueryColumnSql(tableId, dbId, tableName, dbName, columnStats, "col_" + idx)); if (idx != size) { builder.append(" UNION ALL "); } } return builder.toString(); }
@Test public void generatePrimitiveTypeColumnTask() { SampleInfo sampleInfo = tabletSampleManager.generateSampleInfo("test", "t_struct"); List<String> columnNames = table.getColumns().stream().map(Column::getName).collect(Collectors.toList()); List<Type> columnTypes = table.getColumns().stream().map(Column::getType).collect(Collectors.toList()); ColumnSampleManager columnSampleManager = ColumnSampleManager.init(columnNames, columnTypes, table, sampleInfo); List<List<ColumnStats>> columnStatsBatch = columnSampleManager.splitPrimitiveTypeStats(); String primitiveSql = sampleInfo.generatePrimitiveTypeColumnTask(table.getId(), db.getId(), table.getName(), db.getFullName(), columnStatsBatch.get(0), tabletSampleManager); List<StatementBase> stmt = SqlParser.parse(primitiveSql, connectContext.getSessionVariable()); Assert.assertTrue(stmt.get(0) instanceof InsertStmt); InsertStmt insertStmt = (InsertStmt) stmt.get(0); Assert.assertTrue(insertStmt.getQueryStatement().getQueryRelation() instanceof UnionRelation); UnionRelation unionRelation = (UnionRelation) insertStmt.getQueryStatement().getQueryRelation(); Assert.assertTrue(unionRelation.getRelations().size() == 4); Assert.assertTrue(unionRelation.getRelations().get(0) instanceof SelectRelation); SelectRelation selectRelation = (SelectRelation) unionRelation.getRelations().get(0); Assert.assertTrue(selectRelation.getSelectList().getItems().size() == 12); }
public static long calculateIntervalEnd(long startTs, IntervalType intervalType, ZoneId tzId) { var startTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTs), tzId); switch (intervalType) { case WEEK: return startTime.truncatedTo(ChronoUnit.DAYS).with(WeekFields.SUNDAY_START.dayOfWeek(), 1).plusDays(7).toInstant().toEpochMilli(); case WEEK_ISO: return startTime.truncatedTo(ChronoUnit.DAYS).with(WeekFields.ISO.dayOfWeek(), 1).plusDays(7).toInstant().toEpochMilli(); case MONTH: return startTime.truncatedTo(ChronoUnit.DAYS).withDayOfMonth(1).plusMonths(1).toInstant().toEpochMilli(); case QUARTER: return startTime.truncatedTo(ChronoUnit.DAYS).with(IsoFields.DAY_OF_QUARTER, 1).plusMonths(3).toInstant().toEpochMilli(); default: throw new RuntimeException("Not supported!"); } }
@Test void testQuarterEnd() { long ts = 1704899727000L; // Wednesday, January 10 15:15:27 GMT assertThat(TimeUtils.calculateIntervalEnd(ts, IntervalType.QUARTER, ZoneId.of("Europe/Kyiv"))).isEqualTo(1711918800000L); // Monday, April 1, 2024 0:00:00 GMT+03:00 DST assertThat(TimeUtils.calculateIntervalEnd(ts, IntervalType.QUARTER, ZoneId.of("Europe/Amsterdam"))).isEqualTo(1711922400000L); // Monday, April 1, 2024 1:00:00 GMT+03:00 DST ts = 1711929600000L; // Monday, April 1, 2024 3:00:00 GMT+03:00 assertThat(TimeUtils.calculateIntervalEnd(ts, IntervalType.QUARTER, ZoneId.of("Europe/Kyiv"))).isEqualTo(1719781200000L); // Monday, July 1, 2024 0:00:00 GMT+03:00 DST assertThat(TimeUtils.calculateIntervalEnd(ts, IntervalType.QUARTER, ZoneId.of("America/New_York"))).isEqualTo(1711944000000L); // Monday, April 1, 2024 7:00:00 GMT+03:00 DST }
static VersionInfo parseVersionInfo(String str) throws ParseException { Map<String, String> map = Util.parseMap(str); VersionInfo.Builder vib = new VersionInfo.Builder(); for (Map.Entry<String, String> entry: map.entrySet()) { switch (entry.getKey()) { case "major" -> vib.withMajor(map.get(entry.getKey())); case "minor" -> vib.withMinor(map.get(entry.getKey())); case "gitVersion" -> vib.withGitVersion(map.get(entry.getKey())); case "gitCommit" -> vib.withGitCommit(map.get(entry.getKey())); case "gitTreeState" -> vib.withGitTreeState(map.get(entry.getKey())); case "buildDate" -> vib.withBuildDate(map.get(entry.getKey())); case "goVersion" -> vib.withGoVersion(map.get(entry.getKey())); case "compiler" -> vib.withCompiler(map.get(entry.getKey())); case "platform" -> vib.withPlatform(map.get(entry.getKey())); default -> LOGGER.warn("Unknown key {} found", entry.getKey()); } } return vib.build(); }
@Test public void versionInfoFromMapUnknownFieldIsIgnored(VertxTestContext context) throws ParseException { String version = """ major=1 minor=16 gitVersion=v1.16.2 gitCommit=c97fe5036ef3df2967d086711e6c0c405941e14b gitTreeState=clean buildDate=2019-10-15T19:09:08Z goVersion=go1.12.10 compiler=gc platform=linux/amd64 unknownKey=someValue"""; VersionInfo vi = PlatformFeaturesAvailability.parseVersionInfo(version); context.verify(() -> { assertThat(vi.getMajor(), is("1")); assertThat(vi.getMinor(), is("16")); }); context.completeNow(); }
CodeEmitter<T> emit(final Parameter parameter) { emitter.emit("param"); emit("name", parameter.getName()); final String parameterType = parameter.getIn(); if (ObjectHelper.isNotEmpty(parameterType)) { emit("type", RestParamType.valueOf(parameterType)); } if (!"body".equals(parameterType)) { final Schema schema = parameter.getSchema(); if (schema != null) { final String dataType = schema.getType(); if (ObjectHelper.isNotEmpty(dataType)) { emit("dataType", dataType); } emit("allowableValues", asStringList(schema.getEnum())); final StyleEnum style = parameter.getStyle(); if (ObjectHelper.isNotEmpty(style)) { if (style.equals(StyleEnum.FORM)) { // Guard against null explode value // See: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.3.md#fixed-fields-10 if (Boolean.FALSE.equals(parameter.getExplode())) { emit("collectionFormat", CollectionFormat.csv); } else { emit("collectionFormat", CollectionFormat.multi); } } } if (ObjectHelper.isNotEmpty(schema.getDefault())) { final String value = StringHelper.removeLeadingAndEndingQuotes(schema.getDefault().toString()); emit("defaultValue", value); } if ("array".equals(dataType) && schema.getItems() != null) { emit("arrayType", schema.getItems().getType()); } } } if (parameter.getRequired() != null) { emit("required", parameter.getRequired()); } else { emit("required", Boolean.FALSE); } emit("description", parameter.getDescription()); emitter.emit("endParam"); return emitter; }
@Test public void shouldEmitCodeForOas3PathParameter() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null, null); final Parameter parameter = new Parameter(); parameter.setName("param"); parameter.setIn("path"); visitor.emit(parameter); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.path)\n" + " .required(true)\n" + " .endParam()\n" + " }\n"); }
RequestQueue<WriteRequest> getWriteRequestQueue(FileIOChannel.ID channelID) { return this.writers[channelID.getThreadNum()].requestQueue; }
@Test void testExceptionInCallbackWrite() throws Exception { final AtomicBoolean handlerCalled = new AtomicBoolean(); WriteRequest regularRequest = new WriteRequest() { @Override public void requestDone(IOException ioex) { synchronized (handlerCalled) { handlerCalled.set(true); handlerCalled.notifyAll(); } } @Override public void write() {} }; WriteRequest exceptionThrower = new WriteRequest() { @Override public void requestDone(IOException ioex) { throw new RuntimeException(); } @Override public void write() {} }; RequestQueue<WriteRequest> rq = ioManager.getWriteRequestQueue(ioManager.createChannel()); // queue first an exception thrower, then a regular request. // we check that the regular request gets successfully handled rq.add(exceptionThrower); rq.add(regularRequest); synchronized (handlerCalled) { while (!handlerCalled.get()) { handlerCalled.wait(); } } }
@Override public long extract( final ConsumerRecord<Object, Object> record, final long previousTimestamp ) { return timestampExtractor.extract(record, previousTimestamp); }
@Test public void shouldCallInternalTimestampExtractorOnExtract() { // Given final MetadataTimestampExtractor metadataTimestampExtractor = new MetadataTimestampExtractor(timestampExtractor); // When metadataTimestampExtractor.extract(record, 1); // Then Mockito.verify(timestampExtractor, Mockito.times(1)) .extract(record, 1); }
public static DataSourceProvider tryGetDataSourceProviderOrNull(Configuration hdpConfig) { final String configuredPoolingType = MetastoreConf.getVar(hdpConfig, MetastoreConf.ConfVars.CONNECTION_POOLING_TYPE); return Iterables.tryFind(FACTORIES, factory -> { String poolingType = factory.getPoolingType(); return poolingType != null && poolingType.equalsIgnoreCase(configuredPoolingType); }).orNull(); }
@Test public void testCreateHikariCpDataSource() throws SQLException { MetastoreConf.setVar(conf, ConfVars.CONNECTION_POOLING_TYPE, HikariCPDataSourceProvider.HIKARI); // This is needed to prevent the HikariDataSource from trying to connect to the DB conf.set(HikariCPDataSourceProvider.HIKARI + ".initializationFailTimeout", "-1"); DataSourceProvider dsp = DataSourceProviderFactory.tryGetDataSourceProviderOrNull(conf); Assert.assertNotNull(dsp); DataSource ds = dsp.create(conf); Assert.assertTrue(ds instanceof HikariDataSource); }
public static Timestamp next(Timestamp timestamp) { if (timestamp.equals(Timestamp.MAX_VALUE)) { return timestamp; } final int nanos = timestamp.getNanos(); final long seconds = timestamp.getSeconds(); if (nanos + 1 >= NANOS_PER_SECOND) { return Timestamp.ofTimeSecondsAndNanos(seconds + 1, 0); } else { return Timestamp.ofTimeSecondsAndNanos(seconds, nanos + 1); } }
@Test public void testNextReturnsMaxWhenTimestampIsAlreadyMax() { assertEquals(Timestamp.MAX_VALUE, TimestampUtils.next(Timestamp.MAX_VALUE)); }
public String parseString(String name) { String property = getProperties().getProperty(name); if (property == null) { throw new NullPointerException(); } return property; }
@Test public void testParseString() { System.out.println("parseString"); String expResult; String result; Properties props = new Properties(); props.put("value1", "sTr1"); props.put("value2", "str_2"); props.put("empty", ""); props.put("str", "abc"); props.put("boolean", "true"); props.put("float", "24.98"); props.put("int", "12"); props.put("char", "a"); PropertyParser instance = new PropertyParser(props); expResult = "sTr1"; result = instance.parseString("value1"); assertEquals(expResult, result); expResult = "str_2"; result = instance.parseString("value2"); assertEquals(expResult, result); expResult = ""; result = instance.parseString("empty"); assertEquals(expResult, result); expResult = "abc"; result = instance.parseString("str"); assertEquals(expResult, result); expResult = "true"; result = instance.parseString("boolean"); assertEquals(expResult, result); expResult = "24.98"; result = instance.parseString("float"); assertEquals(expResult, result); expResult = "12"; result = instance.parseString("int"); assertEquals(expResult, result); expResult = "a"; result = instance.parseString("char"); assertEquals(expResult, result); try { instance.parseString("nonexistent"); fail("no exception"); } catch (NullPointerException e) { } }
public synchronized ValuesAndExtrapolations aggregate(SortedSet<Long> windowIndices, MetricDef metricDef) { return aggregate(windowIndices, metricDef, true); }
@Test public void testExtrapolationAdjacentAvgAtLeftEdge() { RawMetricValues rawValues = new RawMetricValues(NUM_WINDOWS_TO_KEEP, MIN_SAMPLES_PER_WINDOW, NUM_RAW_METRICS); prepareWindowMissingAtIndex(rawValues, 0); ValuesAndExtrapolations valuesAndExtrapolations = aggregate(rawValues, allWindowIndices(0)); assertEquals(0, valuesAndExtrapolations.metricValues().valuesFor((short) 0).get(0), EPSILON); assertEquals(0, valuesAndExtrapolations.metricValues().valuesFor((short) 1).get(0), EPSILON); assertEquals(0, valuesAndExtrapolations.metricValues().valuesFor((short) 2).get(0), EPSILON); assertEquals(1, valuesAndExtrapolations.extrapolations().size()); Assert.assertEquals(Extrapolation.NO_VALID_EXTRAPOLATION, valuesAndExtrapolations.extrapolations().get(0)); }
public int releaseMessageNotificationBatch() { int batch = getIntProperty("apollo.release-message.notification.batch", DEFAULT_RELEASE_MESSAGE_NOTIFICATION_BATCH); return checkInt(batch, 1, Integer.MAX_VALUE, DEFAULT_RELEASE_MESSAGE_NOTIFICATION_BATCH); }
@Test public void testReleaseMessageNotificationBatch() throws Exception { int someBatch = 20; when(environment.getProperty("apollo.release-message.notification.batch")).thenReturn(String.valueOf(someBatch)); assertEquals(someBatch, bizConfig.releaseMessageNotificationBatch()); }
@Override public double read() { return gaugeSource.read(); }
@Test public void whenCacheDynamicMetricSourceGcdReadsDefault() { NullableDynamicMetricsProvider metricsProvider = new NullableDynamicMetricsProvider(); WeakReference<SomeObject> someObjectWeakRef = new WeakReference<>(metricsProvider.someObject); metricsProvider.someObject.doubleField = 42.42D; metricsRegistry.registerDynamicMetricsProvider(metricsProvider); DoubleGauge doubleGauge = metricsRegistry.newDoubleGauge("foo.doubleField"); // needed to collect dynamic metrics and update the gauge created from them metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(42.42D, doubleGauge.read(), 10E-6); metricsProvider.someObject = null; System.gc(); // wait for someObject to get GCd - should have already happened assertTrueEventually(() -> assertNull(someObjectWeakRef.get())); assertEquals(DoubleGaugeImpl.DEFAULT_VALUE, doubleGauge.read(), 10E-6); }
public static <T> Iterable<T> nullToEmpty(Iterable<T> iterable) { return iterable == null ? Collections.emptyList() : iterable; }
@Test public void testIterableIsEmpty_whenNullUsed() { assertEquals(emptyList(), IterableUtil.nullToEmpty(null)); assertEquals(numbers, IterableUtil.nullToEmpty(numbers)); }
public static Map<String, String> configureFromPulsar1AuthParamString(String authParamsString) { Map<String, String> authParams = new HashMap<>(); if (isNotBlank(authParamsString)) { String[] params = authParamsString.split(","); for (String p : params) { // The value could be a file path, which could contain a colon like "C:\\path\\to\\file" on Windows. int index = p.indexOf(':'); if (index < 0) { continue; } String key = p.substring(0, index); if (!key.isEmpty()) { authParams.put(key, p.substring(index + 1)); } } } return authParams; }
@Test public void testConfigureAuthParamString() { Map<String, String> params = AuthenticationUtil.configureFromPulsar1AuthParamString( "key:value,path:C:\\path\\to\\file,null-key:,:null-value,:,key:value-2"); assertEquals(params.size(), 3); assertEquals(params.get("key"), "value-2"); assertEquals(params.get("path"), "C:\\path\\to\\file"); assertEquals(params.get("null-key"), ""); }
public static String[] split(final String str, final String separatorChars) { if (str == null) { return null; } if (isBlank(str)) { return EMPTY_ARRAY; } if (isBlank(separatorChars)) { return str.split(" "); } return str.split(separatorChars); }
@Test public void split() { String str1 = null; String separator1 = "*"; String[] res1 = StringUtil.split(str1, separator1); assert res1 == null; String str2 = ""; String separator2 = "*"; String[] res2 = StringUtil.split(str2, separator2); Assert.assertArrayEquals(res2, new String[0]); String str3 = "abc def"; String separator3 = null; String[] res3 = StringUtil.split(str3, separator3); Assert.assertArrayEquals(res3, new String[]{"abc", "def"}); String str4 = "abc def"; String separator4 = " "; String[] res4 = StringUtil.split(str4, separator4); Assert.assertArrayEquals(res4, new String[]{"abc", "def"}); String str5 = "ab:cd:ef"; String separator5 = ":"; String[] res5 = StringUtil.split(str5, separator5); Assert.assertArrayEquals(res5, new String[]{"ab", "cd", "ef"}); }
static Set<Set<Integer>> computeStronglyConnectedComponents( final int numVertex, final List<List<Integer>> outEdges) { final Set<Set<Integer>> stronglyConnectedComponents = new HashSet<>(); // a vertex will be added into this stack when it is visited for the first time final Deque<Integer> visitingStack = new ArrayDeque<>(numVertex); final boolean[] onVisitingStack = new boolean[numVertex]; // stores the order that a vertex is visited for the first time, -1 indicates it is not // visited yet final int[] vertexIndices = new int[numVertex]; Arrays.fill(vertexIndices, -1); final AtomicInteger indexCounter = new AtomicInteger(0); final int[] vertexLowLinks = new int[numVertex]; for (int vertex = 0; vertex < numVertex; vertex++) { if (!isVisited(vertex, vertexIndices)) { dfs( vertex, outEdges, vertexIndices, vertexLowLinks, visitingStack, onVisitingStack, indexCounter, stronglyConnectedComponents); } } return stronglyConnectedComponents; }
@Test void testWithCycles() { final List<List<Integer>> edges = Arrays.asList( Arrays.asList(2, 3), Arrays.asList(0), Arrays.asList(1), Arrays.asList(4), Collections.emptyList()); final Set<Set<Integer>> result = computeStronglyConnectedComponents(5, edges); final Set<Set<Integer>> expected = new HashSet<>(); expected.add(new HashSet<>(Arrays.asList(0, 1, 2))); expected.add(Collections.singleton(3)); expected.add(Collections.singleton(4)); assertThat(result).isEqualTo(expected); }
@Override public void execute() { UpdateItemResponse result = ddbClient.updateItem(UpdateItemRequest.builder().tableName(determineTableName()) .key(determineKey()).attributeUpdates(determineUpdateValues()) .expected(determineUpdateCondition()).returnValues(determineReturnValues()).build()); addAttributesToResult(result.attributes()); }
@Test public void execute() { Map<String, AttributeValue> key = new HashMap<>(); key.put("1", AttributeValue.builder().s("Key_1").build()); exchange.getIn().setHeader(Ddb2Constants.KEY, key); Map<String, AttributeValueUpdate> attributeMap = new HashMap<>(); AttributeValueUpdate attributeValue = AttributeValueUpdate.builder() .value(AttributeValue.builder().s("new value").build()).action(AttributeAction.ADD).build(); attributeMap.put("name", attributeValue); exchange.getIn().setHeader(Ddb2Constants.UPDATE_VALUES, attributeMap); Map<String, ExpectedAttributeValue> expectedAttributeValueMap = new HashMap<>(); expectedAttributeValueMap.put("name", ExpectedAttributeValue.builder() .attributeValueList(AttributeValue.builder().s("expected value").build()).build()); exchange.getIn().setHeader(Ddb2Constants.UPDATE_CONDITION, expectedAttributeValueMap); exchange.getIn().setHeader(Ddb2Constants.RETURN_VALUES, "ALL_OLD"); command.execute(); assertEquals("DOMAIN1", ddbClient.updateItemRequest.tableName()); assertEquals(attributeMap, ddbClient.updateItemRequest.attributeUpdates()); assertEquals(key, ddbClient.updateItemRequest.key()); assertEquals(expectedAttributeValueMap, ddbClient.updateItemRequest.expected()); assertEquals(ReturnValue.ALL_OLD, ddbClient.updateItemRequest.returnValues()); assertEquals(AttributeValue.builder().s("attrValue").build(), exchange.getIn().getHeader(Ddb2Constants.ATTRIBUTES, Map.class).get("attrName")); }
@Override public CRTask deserialize(JsonElement json, Type type, JsonDeserializationContext context) throws JsonParseException { return determineJsonElementForDistinguishingImplementers(json, context, TYPE, ARTIFACT_ORIGIN); }
@Test public void shouldInstantiateATaskForTypeRake() { JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("type", "rake"); taskTypeAdapter.deserialize(jsonObject, type, jsonDeserializationContext); verify(jsonDeserializationContext).deserialize(jsonObject, CRBuildTask.class); }
public void handleAssignment(final Map<TaskId, Set<TopicPartition>> activeTasks, final Map<TaskId, Set<TopicPartition>> standbyTasks) { log.info("Handle new assignment with:\n" + "\tNew active tasks: {}\n" + "\tNew standby tasks: {}\n" + "\tExisting active tasks: {}\n" + "\tExisting standby tasks: {}", activeTasks.keySet(), standbyTasks.keySet(), activeTaskIds(), standbyTaskIds()); topologyMetadata.addSubscribedTopicsFromAssignment( activeTasks.values().stream().flatMap(Collection::stream).collect(Collectors.toSet()), logPrefix ); final Map<TaskId, Set<TopicPartition>> activeTasksToCreate = new HashMap<>(activeTasks); final Map<TaskId, Set<TopicPartition>> standbyTasksToCreate = new HashMap<>(standbyTasks); final Map<Task, Set<TopicPartition>> tasksToRecycle = new HashMap<>(); final Set<Task> tasksToCloseClean = new TreeSet<>(Comparator.comparing(Task::id)); final Set<TaskId> tasksToLock = tasks.allTaskIds().stream() .filter(x -> activeTasksToCreate.containsKey(x) || standbyTasksToCreate.containsKey(x)) .collect(Collectors.toSet()); maybeLockTasks(tasksToLock); // first put aside those unrecognized tasks because of unknown named-topologies tasks.clearPendingTasksToCreate(); tasks.addPendingActiveTasksToCreate(pendingTasksToCreate(activeTasksToCreate)); tasks.addPendingStandbyTasksToCreate(pendingTasksToCreate(standbyTasksToCreate)); // first rectify all existing tasks: // 1. for tasks that are already owned, just update input partitions / resume and skip re-creating them // 2. for tasks that have changed active/standby status, just recycle and skip re-creating them // 3. otherwise, close them since they are no longer owned final Map<TaskId, RuntimeException> failedTasks = new LinkedHashMap<>(); if (stateUpdater == null) { handleTasksWithoutStateUpdater(activeTasksToCreate, standbyTasksToCreate, tasksToRecycle, tasksToCloseClean); } else { handleTasksWithStateUpdater( activeTasksToCreate, standbyTasksToCreate, tasksToRecycle, tasksToCloseClean, failedTasks ); failedTasks.putAll(collectExceptionsAndFailedTasksFromStateUpdater()); } final Map<TaskId, RuntimeException> taskCloseExceptions = closeAndRecycleTasks(tasksToRecycle, tasksToCloseClean); maybeUnlockTasks(tasksToLock); failedTasks.putAll(taskCloseExceptions); maybeThrowTaskExceptions(failedTasks); createNewTasks(activeTasksToCreate, standbyTasksToCreate); }
@Test public void shouldAssignMultipleTasksInStateUpdater() { final StreamTask activeTaskToClose = statefulTask(taskId03, taskId03ChangelogPartitions) .inState(State.RESTORING) .withInputPartitions(taskId03Partitions).build(); final StandbyTask standbyTaskToRecycle = standbyTask(taskId02, taskId02ChangelogPartitions) .inState(State.RUNNING) .withInputPartitions(taskId02Partitions).build(); final StreamTask recycledActiveTask = statefulTask(taskId02, taskId02ChangelogPartitions) .inState(State.CREATED) .withInputPartitions(taskId02Partitions).build(); final TasksRegistry tasks = mock(TasksRegistry.class); final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true); when(stateUpdater.getTasks()).thenReturn(mkSet(activeTaskToClose, standbyTaskToRecycle)); final CompletableFuture<StateUpdater.RemovedTaskResult> futureForActiveTaskToClose = new CompletableFuture<>(); when(stateUpdater.remove(activeTaskToClose.id())).thenReturn(futureForActiveTaskToClose); futureForActiveTaskToClose.complete(new StateUpdater.RemovedTaskResult(activeTaskToClose)); when(activeTaskCreator.createActiveTaskFromStandby(standbyTaskToRecycle, taskId02Partitions, consumer)) .thenReturn(recycledActiveTask); final CompletableFuture<StateUpdater.RemovedTaskResult> futureForStandbyTaskToRecycle = new CompletableFuture<>(); when(stateUpdater.remove(standbyTaskToRecycle.id())).thenReturn(futureForStandbyTaskToRecycle); futureForStandbyTaskToRecycle.complete(new StateUpdater.RemovedTaskResult(standbyTaskToRecycle)); taskManager.handleAssignment( mkMap(mkEntry(standbyTaskToRecycle.id(), standbyTaskToRecycle.inputPartitions())), Collections.emptyMap() ); verify(tasks).addPendingTasksToInit(Collections.singleton(recycledActiveTask)); verify(activeTaskToClose).suspend(); verify(activeTaskToClose).closeClean(); verify(activeTaskCreator).closeAndRemoveTaskProducerIfNeeded(activeTaskToClose.id()); verify(standbyTaskCreator).createTasks(Collections.emptyMap()); verify(activeTaskCreator).createTasks(consumer, Collections.emptyMap()); }
static Method getGetter(final Class<?> clazz, final String propertyName) { final String getterName = "get" + Character.toUpperCase(propertyName.charAt(0)) + propertyName.substring(1); final String iserName = "is" + Character.toUpperCase(propertyName.charAt(0)) + propertyName.substring(1); try { return clazz.getMethod(getterName, NO_ARGS); } catch (NoSuchMethodException e) { // ignore for now - might be a boolean property } try { return clazz.getMethod(iserName, NO_ARGS); } catch (NoSuchMethodException e) { final String className = clazz.getName(); throw SarLogger.ROOT_LOGGER.propertyMethodNotFound("Get", propertyName, className); } }
@Test public void findBooleanGetter() throws Exception { final Method getter = ReflectionUtils.getGetter(Foo.class, "b"); assertNotNull(getter); assertEquals("isB", getter.getName()); }
public void sparsePrint(PrintStream stream) { if (mSeries.isEmpty()) { return; } long start = mSeries.firstKey(); stream.printf("Time series starts at %d with width %d.%n", start, mWidthNano); for (Map.Entry<Long, Integer> entry : mSeries.entrySet()) { stream.printf("%d %d%n", (entry.getKey() - start) / mWidthNano, entry.getValue()); } }
@Test public void sparsePrintTest() { TimeSeries timeSeries = new TimeSeries(); timeSeries.record(mBase); timeSeries.record(mBase + 8L * Constants.SECOND_NANO); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); PrintStream printStream = new PrintStream(outputStream); timeSeries.sparsePrint(printStream); StringBuilder sb = new StringBuilder(); sb.append( String.format("Time series starts at %d with width %d.%n", mBase, Constants.SECOND_NANO)); sb.append(String.format("%d %d%n", 0, 1)); sb.append(String.format("%d %d%n", 8, 1)); Assert.assertEquals(sb.toString(), outputStream.toString()); }
@Override public float readFloat() throws EOFException { return Float.intBitsToFloat(readInt()); }
@Test public void testReadFloat() throws Exception { double readFloat = in.readFloat(); int intB = Bits.readInt(INIT_DATA, 0, byteOrder == BIG_ENDIAN); double aFloat = Float.intBitsToFloat(intB); assertEquals(aFloat, readFloat, 0); }
static void setConfig(String configName) throws Exception { config = HandlerConfig.load(configName); initHandlers(); initChains(); initPaths(); }
@Test(expected = Exception.class) public void invalidMethod_init_throws() throws Exception { Handler.setConfig("invalid-method"); }
@EventListener void startup(ServerStartupEvent event) { storageProviderMetricsBinder.ifPresent(x -> LOGGER.debug("JobRunr StorageProvider MicroMeter Metrics enabled")); backgroundJobServerMetrics.ifPresent(x -> LOGGER.debug("JobRunr BackgroundJobServer MicroMeter Metrics enabled")); }
@Test void onStartOptionalsAreCalledToBootstrapBinders() { final JobRunrMetricsStarter jobRunrMetricsStarter = new JobRunrMetricsStarter(Optional.of(storageProviderMetricsBinder), Optional.of(backgroundJobServerMetricsBinder)); ListAppender<ILoggingEvent> logger = LoggerAssert.initFor(jobRunrMetricsStarter); jobRunrMetricsStarter.startup(null); assertThat(logger) .hasDebugMessageContaining("JobRunr StorageProvider MicroMeter Metrics enabled") .hasDebugMessageContaining("JobRunr BackgroundJobServer MicroMeter Metrics enabled"); }
public static void logSQL(final QueryContext queryContext, final boolean showSimple, final ExecutionContext executionContext) { log("Logic SQL: {}", queryContext.getSql()); if (showSimple) { logSimpleMode(executionContext.getExecutionUnits()); } else { logNormalMode(executionContext.getExecutionUnits()); } }
@Test void assertLogSimpleSQL() { SQLLogger.logSQL(queryContext, true, new ExecutionContext(queryContext, executionUnits, mock(RouteContext.class))); assertThat(appenderList.size(), is(2)); assertTrue(appenderList.stream().allMatch(loggingEvent -> Level.INFO == loggingEvent.getLevel())); assertThat(appenderList.get(0).getFormattedMessage(), is("Logic SQL: SELECT * FROM t_user")); assertThat(appenderList.get(1).getFormattedMessage(), is("Actual SQL(simple): [db3, db2, db1] ::: 3")); }
public boolean compatibleVersion(String acceptableVersionRange, String actualVersion) { V pluginVersion = parseVersion(actualVersion); // Treat a single version "1.4" as a left bound, equivalent to "[1.4,)" if (acceptableVersionRange.matches(VERSION_REGEX)) { return ge(pluginVersion, parseVersion(acceptableVersionRange)); } // Otherwise ensure it is a version range with bounds Matcher matcher = INTERVAL_PATTERN.matcher(acceptableVersionRange); Preconditions.checkArgument(matcher.matches(), "invalid version range"); String leftBound = matcher.group("left"); String rightBound = matcher.group("right"); Preconditions.checkArgument( leftBound != null || rightBound != null, "left and right bounds cannot both be empty"); BiPredicate<V, V> leftComparator = acceptableVersionRange.startsWith("[") ? VersionChecker::ge : VersionChecker::gt; BiPredicate<V, V> rightComparator = acceptableVersionRange.endsWith("]") ? VersionChecker::le : VersionChecker::lt; if (leftBound != null && !leftComparator.test(pluginVersion, parseVersion(leftBound))) { return false; } if (rightBound != null && !rightComparator.test(pluginVersion, parseVersion(rightBound))) { return false; } return true; }
@Test public void testRange_invalid() { for (String rangeSpec : new String[] {"[]", "[,]", "(,]", "[,)", "(,)", "[1,2,3]", "[1]", "foo", "{,2.3)", ""}) { try { checker.compatibleVersion(rangeSpec, "1.3"); Assert.fail("should have thrown an exception for " + rangeSpec); } catch (IllegalArgumentException ex) { // as expected } } }