focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@BuildStep AdditionalBeanBuildItem produce(Capabilities capabilities, JobRunrBuildTimeConfiguration jobRunrBuildTimeConfiguration) { Set<Class<?>> additionalBeans = new HashSet<>(); additionalBeans.add(JobRunrProducer.class); additionalBeans.add(JobRunrStarter.class); additionalBeans.add(jsonMapper(capabilities)); additionalBeans.addAll(storageProvider(capabilities, jobRunrBuildTimeConfiguration)); return AdditionalBeanBuildItem.builder() .setUnremovable() .addBeanClasses(additionalBeans.toArray(new Class[0])) .build(); }
@Test void jobRunrProducerUsesMongoDBStorageProviderIfMongoDBClientCapabilityIsPresent() { lenient().when(capabilities.isPresent(Capability.MONGODB_CLIENT)).thenReturn(true); final AdditionalBeanBuildItem additionalBeanBuildItem = jobRunrExtensionProcessor.produce(capabilities, jobRunrBuildTimeConfiguration); assertThat(additionalBeanBuildItem.getBeanClasses()) .contains(JobRunrMongoDBStorageProviderProducer.class.getName()) .doesNotContain(JobRunrDocumentDBStorageProviderProducer.class.getName()); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { try { final String target = new DefaultUrlProvider(session.getHost()).toUrl(renamed).find(DescriptiveUrl.Type.provider).getUrl(); if(session.getFeature(Lock.class) != null && status.getLockId() != null) { // Indicate that the client has knowledge of that state token session.getClient().move(new DAVPathEncoder().encode(file), file.isDirectory() ? String.format("%s/", target) : target, status.isExists(), Collections.singletonMap(HttpHeaders.IF, String.format("(<%s>)", status.getLockId()))); } else { session.getClient().move(new DAVPathEncoder().encode(file), file.isDirectory() ? String.format("%s/", target) : target, status.isExists()); } // Copy original file attributes return renamed.withAttributes(file.attributes()); } catch(SardineException e) { throw new DAVExceptionMappingService().map("Cannot rename {0}", e, file); } catch(IOException e) { throw new HttpExceptionMappingService().map(e, file); } }
@Test public void testMove() throws Exception { final Path test = new DAVTouchFeature(session).touch(new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertEquals(TransferStatus.UNKNOWN_LENGTH, test.attributes().getSize()); final TransferStatus status = new TransferStatus(); new DAVTimestampFeature(session).setTimestamp(test, status.withModified(5000L)); final PathAttributes attr = new DAVAttributesFinderFeature(session).find(test); final Path target = new DAVMoveFeature(session).move(test.withAttributes(status.getResponse()), new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new DAVFindFeature(session).find(test)); assertTrue(new DAVFindFeature(session).find(target)); assertEquals(status.getResponse(), target.attributes()); assertEquals(attr, new DAVAttributesFinderFeature(session).find(target)); new DAVDeleteFeature(session).delete(Collections.<Path>singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public void setCapacity(int capacity) { get(setCapacityAsync(capacity)); }
@Test public void testSetCapacity() { RRingBuffer<Integer> buffer = redisson.getRingBuffer("test"); buffer.trySetCapacity(5); for (int i = 0; i < 10; i++) { buffer.add(i); } assertThat(buffer).containsExactly(5, 6, 7, 8, 9); buffer.setCapacity(3); assertThat(buffer).containsExactly(7, 8, 9); RRingBuffer<Integer> buffer2 = redisson.getRingBuffer("test2"); buffer2.setCapacity(3); for (int i = 0; i < 10; i++) { buffer2.add(i); } assertThat(buffer2).containsExactly(7, 8, 9); }
@Override public Response request(Request request, long timeoutMills) throws NacosException { DefaultRequestFuture pushFuture = sendRequestInner(request, null); try { return pushFuture.get(timeoutMills); } catch (Exception e) { throw new NacosException(NacosException.SERVER_ERROR, e); } finally { RpcAckCallbackSynchronizer.clearFuture(getMetaInfo().getConnectionId(), pushFuture.getRequestId()); } }
@Test void testStatusRuntimeException() { Mockito.doReturn(new DefaultEventLoop()).when(channel).eventLoop(); Mockito.doThrow(new StatusRuntimeException(Status.CANCELLED)).when(streamObserver).onNext(Mockito.any()); Mockito.doReturn(true).when(streamObserver).isReady(); try { connection.request(new NotifySubscriberRequest(), 3000L); assertTrue(false); } catch (Exception e) { assertTrue(e instanceof ConnectionAlreadyClosedException); assertTrue(e.getCause() instanceof StatusRuntimeException); } }
@VisibleForTesting public static JobGraph createJobGraph(StreamGraph streamGraph) { return new StreamingJobGraphGenerator( Thread.currentThread().getContextClassLoader(), streamGraph, null, Runnable::run) .createJobGraph(); }
@Test void testChainStartEndSetting() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // set parallelism to 2 to avoid chaining with source in case when available processors is // 1. env.setParallelism(2); // fromElements -> CHAIN(Map -> Print) env.fromData(1, 2, 3) .map( new MapFunction<Integer, Integer>() { @Override public Integer map(Integer value) throws Exception { return value; } }) .print(); JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph()); List<JobVertex> verticesSorted = jobGraph.getVerticesSortedTopologicallyFromSources(); JobVertex sourceVertex = verticesSorted.get(0); JobVertex mapPrintVertex = verticesSorted.get(1); assertThat(sourceVertex.getProducedDataSets().get(0).getResultType()) .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); assertThat(mapPrintVertex.getInputs().get(0).getSource().getResultType()) .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); StreamConfig sourceConfig = new StreamConfig(sourceVertex.getConfiguration()); StreamConfig mapConfig = new StreamConfig(mapPrintVertex.getConfiguration()); Map<Integer, StreamConfig> chainedConfigs = mapConfig.getTransitiveChainedTaskConfigs(getClass().getClassLoader()); StreamConfig printConfig = chainedConfigs.values().iterator().next(); assertThat(sourceConfig.isChainStart()).isTrue(); assertThat(sourceConfig.isChainEnd()).isTrue(); assertThat(mapConfig.isChainStart()).isTrue(); assertThat(mapConfig.isChainEnd()).isFalse(); assertThat(printConfig.isChainStart()).isFalse(); assertThat(printConfig.isChainEnd()).isTrue(); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 2) { onInvalidDataReceived(device, data); return; } // Read the Op Code final int opCode = data.getIntValue(Data.FORMAT_UINT8, 0); // Estimate the expected operand size based on the Op Code int expectedOperandSize; switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> // UINT8 expectedOperandSize = 1; case OP_CODE_CALIBRATION_VALUE_RESPONSE -> // Calibration Value expectedOperandSize = 10; case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE, OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE, OP_CODE_HYPO_ALERT_LEVEL_RESPONSE, OP_CODE_HYPER_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> // SFLOAT expectedOperandSize = 2; case OP_CODE_RESPONSE_CODE -> // Request Op Code (UINT8), Response Code Value (UINT8) expectedOperandSize = 2; default -> { onInvalidDataReceived(device, data); return; } } // Verify packet length if (data.size() != 1 + expectedOperandSize && data.size() != 1 + expectedOperandSize + 2) { onInvalidDataReceived(device, data); return; } // Verify CRC if present final boolean crcPresent = data.size() == 1 + expectedOperandSize + 2; // opCode + expected operand + CRC if (crcPresent) { final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 1 + expectedOperandSize); final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 1 + expectedOperandSize); if (expectedCrc != actualCrc) { onCGMSpecificOpsResponseReceivedWithCrcError(device, data); return; } } switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> { final int interval = data.getIntValue(Data.FORMAT_UINT8, 1); onContinuousGlucoseCommunicationIntervalReceived(device, interval, crcPresent); return; } case OP_CODE_CALIBRATION_VALUE_RESPONSE -> { final float glucoseConcentrationOfCalibration = data.getFloatValue(Data.FORMAT_SFLOAT, 1); final int calibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 3); final int calibrationTypeAndSampleLocation = data.getIntValue(Data.FORMAT_UINT8, 5); @SuppressLint("WrongConstant") final int calibrationType = calibrationTypeAndSampleLocation & 0x0F; final int calibrationSampleLocation = calibrationTypeAndSampleLocation >> 4; final int nextCalibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 6); final int calibrationDataRecordNumber = data.getIntValue(Data.FORMAT_UINT16_LE, 8); final int calibrationStatus = data.getIntValue(Data.FORMAT_UINT8, 10); onContinuousGlucoseCalibrationValueReceived(device, glucoseConcentrationOfCalibration, calibrationTime, nextCalibrationTime, calibrationType, calibrationSampleLocation, calibrationDataRecordNumber, new CGMCalibrationStatus(calibrationStatus), crcPresent); return; } case OP_CODE_RESPONSE_CODE -> { final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 1); // ignore final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 2); if (responseCode == CGM_RESPONSE_SUCCESS) { onCGMSpecificOpsOperationCompleted(device, requestCode, crcPresent); } else { onCGMSpecificOpsOperationError(device, requestCode, responseCode, crcPresent); } return; } } // Read SFLOAT value final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 1); switch (opCode) { case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientHighAlertReceived(device, value, crcPresent); case OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientLowAlertReceived(device, value, crcPresent); case OP_CODE_HYPO_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHypoAlertReceived(device, value, crcPresent); case OP_CODE_HYPER_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHyperAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfDecreaseAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfIncreaseAlertReceived(device, value, crcPresent); } }
@Test public void onCGMSpecificOpsOperationCompleted() { final Data data = new Data(new byte[] { 28, 2, 1}); callback.onDataReceived(null, data); assertTrue(success); assertFalse(secured); assertEquals(2, requestCode); }
public static String[] splitOnCharacter(String value, String needle, int count) { String[] rc = new String[count]; rc[0] = value; for (int i = 1; i < count; i++) { String v = rc[i - 1]; int p = v.indexOf(needle); if (p < 0) { return rc; } rc[i - 1] = v.substring(0, p); rc[i] = v.substring(p + 1); } return rc; }
@Test public void testSplitOnCharacter() { String[] list = splitOnCharacter("foo", "'", 1); assertEquals(1, list.length); assertEquals("foo", list[0]); list = splitOnCharacter("foo,bar", ",", 2); assertEquals(2, list.length); assertEquals("foo", list[0]); assertEquals("bar", list[1]); list = splitOnCharacter("foo,bar,", ",", 3); assertEquals(3, list.length); assertEquals("foo", list[0]); assertEquals("bar", list[1]); list = splitOnCharacter(",foo,bar", ",", 3); assertEquals(3, list.length); assertEquals("foo", list[1]); assertEquals("bar", list[2]); list = splitOnCharacter(",foo,bar,", ",", 4); assertEquals(4, list.length); assertEquals("foo", list[1]); assertEquals("bar", list[2]); StringBuilder sb = new StringBuilder(); for (int i = 0; i < 100; i++) { sb.append(i); sb.append(","); } String value = sb.toString(); int count = StringHelper.countChar(value, ',') + 1; list = splitOnCharacter(value, ",", count); assertEquals(101, list.length); assertEquals("0", list[0]); assertEquals("50", list[50]); assertEquals("99", list[99]); }
public void replay( ConsumerGroupMemberMetadataKey key, ConsumerGroupMemberMetadataValue value ) { String groupId = key.groupId(); String memberId = key.memberId(); ConsumerGroup consumerGroup = getOrMaybeCreatePersistedConsumerGroup(groupId, value != null); Set<String> oldSubscribedTopicNames = new HashSet<>(consumerGroup.subscribedTopicNames().keySet()); if (value != null) { ConsumerGroupMember oldMember = consumerGroup.getOrMaybeCreateMember(memberId, true); consumerGroup.updateMember(new ConsumerGroupMember.Builder(oldMember) .updateWith(value) .build()); } else { ConsumerGroupMember oldMember = consumerGroup.getOrMaybeCreateMember(memberId, false); if (oldMember.memberEpoch() != LEAVE_GROUP_MEMBER_EPOCH) { throw new IllegalStateException("Received a tombstone record to delete member " + memberId + " but did not receive ConsumerGroupCurrentMemberAssignmentValue tombstone."); } if (consumerGroup.targetAssignment().containsKey(memberId)) { throw new IllegalStateException("Received a tombstone record to delete member " + memberId + " but did not receive ConsumerGroupTargetAssignmentMetadataValue tombstone."); } consumerGroup.removeMember(memberId); } updateGroupsByTopics(groupId, oldSubscribedTopicNames, consumerGroup.subscribedTopicNames().keySet()); }
@Test public void testConsumerGroupStates() { String groupId = "fooup"; String memberId1 = Uuid.randomUuid().toString(); Uuid fooTopicId = Uuid.randomUuid(); String fooTopicName = "foo"; MockPartitionAssignor assignor = new MockPartitionAssignor("range"); GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .withConsumerGroupAssignors(Collections.singletonList(assignor)) .withConsumerGroup(new ConsumerGroupBuilder(groupId, 10)) .build(); assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY, context.consumerGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newConsumerGroupMemberSubscriptionRecord(groupId, new ConsumerGroupMember.Builder(memberId1) .setState(MemberState.STABLE) .setSubscribedTopicNames(Collections.singletonList(fooTopicName)) .build())); context.replay(GroupCoordinatorRecordHelpers.newConsumerGroupEpochRecord(groupId, 11)); assertEquals(ConsumerGroup.ConsumerGroupState.ASSIGNING, context.consumerGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newConsumerGroupTargetAssignmentRecord(groupId, memberId1, mkAssignment( mkTopicAssignment(fooTopicId, 1, 2, 3)))); context.replay(GroupCoordinatorRecordHelpers.newConsumerGroupTargetAssignmentEpochRecord(groupId, 11)); assertEquals(ConsumerGroup.ConsumerGroupState.RECONCILING, context.consumerGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newConsumerGroupCurrentAssignmentRecord(groupId, new ConsumerGroupMember.Builder(memberId1) .setState(MemberState.UNREVOKED_PARTITIONS) .setMemberEpoch(11) .setPreviousMemberEpoch(10) .setAssignedPartitions(mkAssignment(mkTopicAssignment(fooTopicId, 1, 2, 3))) .build())); assertEquals(ConsumerGroup.ConsumerGroupState.RECONCILING, context.consumerGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newConsumerGroupCurrentAssignmentRecord(groupId, new ConsumerGroupMember.Builder(memberId1) .setState(MemberState.STABLE) .setMemberEpoch(11) .setPreviousMemberEpoch(10) .setAssignedPartitions(mkAssignment(mkTopicAssignment(fooTopicId, 1, 2, 3))) .build())); assertEquals(ConsumerGroup.ConsumerGroupState.STABLE, context.consumerGroupState(groupId)); }
public CreateStreamCommand createStreamCommand(final KsqlStructuredDataOutputNode outputNode) { return new CreateStreamCommand( outputNode.getSinkName().get(), outputNode.getSchema(), outputNode.getTimestampColumn(), outputNode.getKsqlTopic().getKafkaTopicName(), Formats.from(outputNode.getKsqlTopic()), outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(), Optional.of(outputNode.getOrReplace()), Optional.of(false) ); }
@Test public void shouldCreateStreamCommandWithSingleValueWrappingFromConfig() { // Given: ksqlConfig = new KsqlConfig(ImmutableMap.of( KsqlConfig.KSQL_WRAP_SINGLE_VALUES, false )); final CreateStream statement = new CreateStream(SOME_NAME, ONE_KEY_ONE_VALUE, false, true, withProperties, false); // When: final CreateStreamCommand cmd = createSourceFactory .createStreamCommand(statement, ksqlConfig); // Then: assertThat(cmd.getFormats().getValueFeatures(), is(SerdeFeatures.of(SerdeFeature.UNWRAP_SINGLES))); }
public static LocalUri parse(String path) { if (path.startsWith(SCHEME)) { URI parsed = URI.create(path); path = parsed.getPath(); } if (!path.startsWith(SLASH)) { throw new IllegalArgumentException("Path must start at root /"); } StringTokenizer tok = new StringTokenizer(path, SLASH); LocalUri hpath = Root; while (tok.hasMoreTokens()) { hpath = hpath.append(tok.nextToken()); } return hpath; }
@Test public void testParse() { String path = "/example/some-id/instances/some-instance-id"; LocalUri hpath = LocalUri.Root.append("example").append("some-id").append("instances").append("some-instance-id"); LocalUri parsedHPath = LocalUri.parse(path); assertEquals(hpath, parsedHPath); }
public Stream<FactMapping> getFactMappingsByFactName(String factName) { return internalFilter(e -> e.getFactIdentifier().getName().equalsIgnoreCase(factName)); }
@Test public void getFactMappingsByFactName() { modelDescriptor.addFactMapping(FactIdentifier.create("test", String.class.getCanonicalName()), ExpressionIdentifier.create("test expression 0", EXPECT)); modelDescriptor.addFactMapping(FactIdentifier.create("test", String.class.getCanonicalName()), ExpressionIdentifier.create("test expression 1", EXPECT)); modelDescriptor.addFactMapping(FactIdentifier.create("TEST", String.class.getCanonicalName()), ExpressionIdentifier.create("test expression 2", EXPECT)); modelDescriptor.addFactMapping(FactIdentifier.create("Test", String.class.getCanonicalName()), ExpressionIdentifier.create("test expression 3", EXPECT)); modelDescriptor.addFactMapping(FactIdentifier.create("tEsT", String.class.getCanonicalName()), ExpressionIdentifier.create("test expression 4", EXPECT)); final Stream<FactMapping> retrieved = modelDescriptor.getFactMappingsByFactName("test"); assertThat(retrieved).isNotNull().hasSize(5); }
public ChmCommons.EntryType getEntryType() { return entryType; }
@Test public void testGetEntryType() { assertEquals(TestParameters.entryType, dle.getEntryType()); }
@Override public void close() throws InterruptedException { beginShutdown("KafkaEventQueue#close"); eventHandlerThread.join(); log.info("closed event queue."); }
@Test public void testCreateAndClose() throws Exception { KafkaEventQueue queue = new KafkaEventQueue(Time.SYSTEM, logContext, "testCreateAndClose"); queue.close(); }
public void schedule(ExecutableMethod<?, ?> method) { if (hasParametersOutsideOfJobContext(method.getTargetMethod())) { throw new IllegalStateException("Methods annotated with " + Recurring.class.getName() + " can only have zero parameters or a single parameter of type JobContext."); } String id = getId(method); String cron = getCron(method); String interval = getInterval(method); if (StringUtils.isNullOrEmpty(cron) && StringUtils.isNullOrEmpty(interval)) throw new IllegalArgumentException("Either cron or interval attribute is required."); if (isNotNullOrEmpty(cron) && isNotNullOrEmpty(interval)) throw new IllegalArgumentException("Both cron and interval attribute provided. Only one is allowed."); if (Recurring.RECURRING_JOB_DISABLED.equals(cron) || Recurring.RECURRING_JOB_DISABLED.equals(interval)) { if (id == null) { LOGGER.warn("You are trying to disable a recurring job using placeholders but did not define an id."); } else { jobScheduler.deleteRecurringJob(id); } } else { JobDetails jobDetails = getJobDetails(method); ZoneId zoneId = getZoneId(method); if (isNotNullOrEmpty(cron)) { jobScheduler.scheduleRecurrently(id, jobDetails, CronExpression.create(cron), zoneId); } else { jobScheduler.scheduleRecurrently(id, jobDetails, new Interval(interval), zoneId); } } }
@Test void beansWithMethodsAnnotatedWithRecurringCronAnnotationWillAutomaticallyBeRegistered() { final ExecutableMethod executableMethod = mock(ExecutableMethod.class); final Method method = getRequiredMethod(MyServiceWithRecurringJob.class, "myRecurringMethod"); when(executableMethod.getTargetMethod()).thenReturn(method); when(executableMethod.stringValue(Recurring.class, "id")).thenReturn(Optional.of("my-recurring-job")); when(executableMethod.stringValue(Recurring.class, "cron")).thenReturn(Optional.of("*/15 * * * *")); when(executableMethod.stringValue(Recurring.class, "interval")).thenReturn(Optional.empty()); when(executableMethod.stringValue(Recurring.class, "zoneId")).thenReturn(Optional.empty()); jobRunrRecurringJobScheduler.schedule(executableMethod); verify(jobScheduler).scheduleRecurrently(eq("my-recurring-job"), jobDetailsArgumentCaptor.capture(), eq(CronExpression.create("*/15 * * * *")), eq(ZoneId.systemDefault())); final JobDetails actualJobDetails = jobDetailsArgumentCaptor.getValue(); assertThat(actualJobDetails) .isCacheable() .hasClassName(MyServiceWithRecurringJob.class.getName()) .hasMethodName("myRecurringMethod") .hasNoArgs(); }
public static String nullToEmpty(String str) { return str == null ? "" : str; }
@Test public void nullToEmpty() { String string = "null"; Assert.assertEquals("null", StringUtil.nullToEmpty(string)); }
@Override public PageData<WidgetsBundle> findTenantWidgetsBundlesByTenantId(UUID tenantId, PageLink pageLink) { return DaoUtil.toPageData( widgetsBundleRepository .findTenantWidgetsBundlesByTenantId( tenantId, pageLink.getTextSearch(), DaoUtil.toPageable(pageLink))); }
@Test public void testFindWidgetsBundlesByTenantId() { UUID tenantId1 = Uuids.timeBased(); UUID tenantId2 = Uuids.timeBased(); // Create a bunch of widgetBundles for (int i = 0; i < 10; i++) { createWidgetBundles(3, tenantId1, "WB1_" + i + "_"); createWidgetBundles(5, tenantId2, "WB2_" + i + "_"); createSystemWidgetBundles(10, "WB_SYS_" + i + "_"); } widgetsBundles = widgetsBundleDao.find(TenantId.SYS_TENANT_ID); assertEquals(180, widgetsBundleDao.find(TenantId.SYS_TENANT_ID).size()); PageLink pageLink1 = new PageLink(40, 0, "WB"); PageData<WidgetsBundle> widgetsBundles1 = widgetsBundleDao.findTenantWidgetsBundlesByTenantId(tenantId1, pageLink1); assertEquals(30, widgetsBundles1.getData().size()); PageLink pageLink2 = new PageLink(40, 0, "WB"); PageData<WidgetsBundle> widgetsBundles2 = widgetsBundleDao.findTenantWidgetsBundlesByTenantId(tenantId2, pageLink2); assertEquals(40, widgetsBundles2.getData().size()); pageLink2 = pageLink2.nextPageLink(); PageData<WidgetsBundle> widgetsBundles3 = widgetsBundleDao.findTenantWidgetsBundlesByTenantId(tenantId2, pageLink2); assertEquals(10, widgetsBundles3.getData().size()); }
public <T> Map<String, Object> schemas(Class<? extends T> cls) { return this.schemas(cls, false); }
@SuppressWarnings("unchecked") @Test void dag() throws URISyntaxException { Helpers.runApplicationContext((applicationContext) -> { JsonSchemaGenerator jsonSchemaGenerator = applicationContext.getBean(JsonSchemaGenerator.class); Map<String, Object> generate = jsonSchemaGenerator.schemas(Dag.class); var definitions = (Map<String, Map<String, Object>>) generate.get("definitions"); var dag = definitions.get(Dag.class.getName()); assertThat((List<String>) dag.get("required"), not(contains("errors"))); }); }
public Set<String> getCaseInsensitivePKs() { Set<String> pks = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); allIndexes.forEach((key, index) -> { if (index.getIndextype().value() == IndexType.PRIMARY.value()) { for (ColumnMeta col : index.getValues()) { pks.add(col.getColumnName()); } } }); if (pks.size() < 1) { throw new NotSupportYetException(String.format("%s needs to contain the primary key.", tableName)); } return Collections.unmodifiableSet(pks); }
@Test public void testGetCaseInsensitivePKs() { tableMeta.getColumnMeta("col2".trim()).setColumnName("CoL2"); Set<String> pks = tableMeta.getCaseInsensitivePKs(); assertEquals(2, pks.size()); assertTrue(pks.contains("col1")); assertTrue(pks.contains("CoL2")); }
public static double similar(String strA, String strB) { String newStrA, newStrB; if (strA.length() < strB.length()) { newStrA = removeSign(strB); newStrB = removeSign(strA); } else { newStrA = removeSign(strA); newStrB = removeSign(strB); } // 用较大的字符串长度作为分母,相似子串作为分子计算出字串相似度 int temp = Math.max(newStrA.length(), newStrB.length()); if(0 == temp) { // 两个都是空串相似度为1,被认为是相同的串 return 1; } final int commonLength = longestCommonSubstringLength(newStrA, newStrB); return NumberUtil.div(commonLength, temp); }
@Test public void similarTest(){ final double abd = TextSimilarity.similar("abd", "1111"); assertEquals(0, abd, 0); }
Integer getMaxSize( Collection<BeanInjectionInfo.Property> properties, Object obj ) { int max = Integer.MIN_VALUE; for ( BeanInjectionInfo.Property property: properties ) { max = Math.max( max, ( isCollection( property ) ? getCollectionSize( property, obj ) // if not collection then field of length one : 1 ) ); } return ( max != Integer.MIN_VALUE ) ? max : null; }
@Test public void getMaxSize_Collections() { BeanInjector bi = new BeanInjector(null ); BeanInjectionInfo bii = new BeanInjectionInfo( MetaBeanLevel1.class ); MetaBeanLevel1 mbl1 = new MetaBeanLevel1(); mbl1.setSub( new MetaBeanLevel2() ); mbl1.getSub().setFilenames( new String[] { "file1", "file2", "file3"} ); mbl1.getSub().setAscending( Arrays.asList( new Boolean[] { true, false, false, true} ) ); mbl1.getSub().setSeparator( "/" ); assertEquals(new Integer(4 ), bi.getMaxSize( bii.getProperties().values(), mbl1.getSub() ) ); }
public String getObjectPath() { return objectPath; }
@Test public void testObjectPath() { assertEquals("/QSYS.LIB/LIBRARY.LIB/QUEUE.DTAQ", jt400Configuration.getObjectPath()); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; // This handles a tombstone message if (value == null) { return SchemaAndValue.NULL; } try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (config.schemasEnabled() && (!jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME) || !jsonValue.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); // The deserialized data should either be an envelope object containing the schema and the payload or the schema // was stripped during serialization and we need to fill in an all-encompassing schema. if (!config.schemasEnabled()) { ObjectNode envelope = JSON_NODE_FACTORY.objectNode(); envelope.set(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME, null); envelope.set(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME, jsonValue); jsonValue = envelope; } Schema schema = asConnectSchema(jsonValue.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); return new SchemaAndValue( schema, convertToConnect(schema, jsonValue.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME), config) ); }
@Test public void decimalToConnectOptionalWithDefaultValue() { BigDecimal reference = new BigDecimal(new BigInteger("156"), 2); Schema schema = Decimal.builder(2).optional().defaultValue(reference).build(); String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"optional\": true, \"default\": \"AJw=\", \"parameters\": { \"scale\": \"2\" } }, \"payload\": null }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, schemaAndValue.value()); }
public static synchronized Date parseDate(String dateStr, PrimitiveType type) throws AnalysisException { Date date = null; Matcher matcher = DATETIME_FORMAT_REG.matcher(dateStr); if (!matcher.matches()) { throw new AnalysisException("Invalid date string: " + dateStr); } if (type == PrimitiveType.DATE) { ParsePosition pos = new ParsePosition(0); date = DATE_FORMAT.parse(dateStr, pos); if (pos.getIndex() != dateStr.length() || date == null) { throw new AnalysisException("Invalid date string: " + dateStr); } } else if (type == PrimitiveType.DATETIME) { try { date = DATETIME_FORMAT.parse(dateStr); } catch (ParseException e) { throw new AnalysisException("Invalid date string: " + dateStr); } } else { Preconditions.checkState(false, "error type: " + type); } return date; }
@Test public void testDateParse() { // date List<String> validDateList = new LinkedList<>(); validDateList.add("2013-12-02"); validDateList.add("2013-12-02"); validDateList.add("2013-12-2"); validDateList.add("2013-12-2"); validDateList.add("9999-12-31"); validDateList.add("1900-01-01"); validDateList.add("2013-2-28"); validDateList.add("0000-01-01"); for (String validDate : validDateList) { try { TimeUtils.parseDate(validDate, PrimitiveType.DATE); } catch (AnalysisException e) { e.printStackTrace(); System.out.println(validDate); Assert.fail(); } } List<String> invalidDateList = new LinkedList<>(); invalidDateList.add("2013-12-02 "); invalidDateList.add(" 2013-12-02"); invalidDateList.add("20131-2-28"); invalidDateList.add("a2013-2-28"); invalidDateList.add("2013-22-28"); invalidDateList.add("2013-2-29"); invalidDateList.add("2013-2-28 2:3:4"); for (String invalidDate : invalidDateList) { try { TimeUtils.parseDate(invalidDate, PrimitiveType.DATE); Assert.fail(); } catch (AnalysisException e) { Assert.assertTrue(e.getMessage().contains("Invalid")); } } // datetime List<String> validDateTimeList = new LinkedList<>(); validDateTimeList.add("2013-12-02 13:59:59"); validDateTimeList.add("2013-12-2 13:59:59"); validDateTimeList.add("2013-12-2 1:59:59"); validDateTimeList.add("2013-12-2 3:1:1"); validDateTimeList.add("9999-12-31 23:59:59"); validDateTimeList.add("1900-01-01 00:00:00"); validDateTimeList.add("2013-2-28 23:59:59"); validDateTimeList.add("2013-2-28 2:3:4"); validDateTimeList.add("2014-05-07 19:8:50"); validDateTimeList.add("0000-01-01 00:00:00"); for (String validDateTime : validDateTimeList) { try { TimeUtils.parseDate(validDateTime, PrimitiveType.DATETIME); } catch (AnalysisException e) { e.printStackTrace(); System.out.println(validDateTime); Assert.fail(); } } List<String> invalidDateTimeList = new LinkedList<>(); invalidDateTimeList.add("2013-12-02 12:12:10"); invalidDateTimeList.add(" 2013-12-02 12:12:10 "); invalidDateTimeList.add("20131-2-28 12:12:10"); invalidDateTimeList.add("a2013-2-28 12:12:10"); invalidDateTimeList.add("2013-22-28 12:12:10"); invalidDateTimeList.add("2013-2-29 12:12:10"); invalidDateTimeList.add("2013-2-28"); invalidDateTimeList.add("2013-13-01 12:12:12"); for (String invalidDateTime : invalidDateTimeList) { try { TimeUtils.parseDate(invalidDateTime, PrimitiveType.DATETIME); Assert.fail(); } catch (AnalysisException e) { Assert.assertTrue(e.getMessage().contains("Invalid")); } } }
public void start() { worker.start(); }
@Test(timeout = 1000) public void catchNotification() throws IOException, InterruptedException { CountDownLatch catched = new CountDownLatch(1); AtomicReference<Selector> selectorRef = new AtomicReference<>(); Ctx ctx = new Ctx() { @Override public Selector createSelector() { selectorRef.set(super.createSelector()); return selectorRef.get(); } }; ctx.setNotificationExceptionHandler((t, e) -> { if (e instanceof ClosedSelectorException) { catched.countDown(); } }); Poller poller = new Poller(ctx, "test"); poller.start(); selectorRef.get().close(); catched.await(); }
@Override public KeyValueIterator<K, V> reverseRange(final K from, final K to) { final NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>> nextIteratorFunction = new NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>>() { @Override public KeyValueIterator<K, V> apply(final ReadOnlyKeyValueStore<K, V> store) { try { return store.reverseRange(from, to); } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException("State store is not available anymore and may have been migrated to another instance; please re-discover its location from the state metadata."); } } }; final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); return new DelegatingPeekingKeyValueIterator<>( storeName, new CompositeKeyValueIterator<>(stores.iterator(), nextIteratorFunction)); }
@Test public void shouldReturnValueOnReverseRangeNullFromKey() { stubOneUnderlying.put("0", "zero"); stubOneUnderlying.put("1", "one"); stubOneUnderlying.put("2", "two"); final LinkedList<KeyValue<String, String>> expectedContents = new LinkedList<>(); expectedContents.add(new KeyValue<>("1", "one")); expectedContents.add(new KeyValue<>("0", "zero")); try (final KeyValueIterator<String, String> iterator = theStore.reverseRange(null, "1")) { assertEquals(expectedContents, Utils.toList(iterator)); } }
public void getConfig(StorDistributionConfig.Group.Builder builder) { builder.index(index == null ? "invalid" : index); builder.name(name == null ? "invalid" : name); partitions.ifPresent(builder::partitions); for (StorageNode node : nodes) { StorDistributionConfig.Group.Nodes.Builder nb = new StorDistributionConfig.Group.Nodes.Builder(); nb.index(node.getDistributionKey()); nb.retired(node.isRetired()); builder.nodes.add(nb); } builder.capacity(getCapacity()); }
@Test void testSingleGroup() { ContentCluster cluster = parse( "<content id=\"storage\">\n" + " <redundancy>3</redundancy>" + " <documents/>" + " <group>\n" + " <node hostalias=\"mockhost\" distribution-key=\"0\"/>\n" + " <node hostalias=\"mockhost\" distribution-key=\"1\"/>\n" + " </group>\n" + "</content>" ); assertEquals("content", cluster.getStorageCluster().getChildren().get("0").getServicePropertyString("clustertype")); assertEquals("storage", cluster.getStorageCluster().getChildren().get("0").getServicePropertyString("clustername")); assertEquals("0", cluster.getStorageCluster().getChildren().get("0").getServicePropertyString("index")); assertEquals("content", cluster.getDistributorNodes().getChildren().get("0").getServicePropertyString("clustertype")); assertEquals("storage", cluster.getDistributorNodes().getChildren().get("0").getServicePropertyString("clustername")); assertEquals("0", cluster.getDistributorNodes().getChildren().get("0").getServicePropertyString("index")); StorDistributionConfig.Builder builder = new StorDistributionConfig.Builder(); cluster.getConfig(builder); StorDistributionConfig config = new StorDistributionConfig(builder); assertEquals(1, config.group().size()); assertEquals("invalid", config.group(0).index()); assertEquals("invalid", config.group(0).name()); assertEquals(2, config.group(0).nodes().size()); assertEquals(0, config.group(0).nodes(0).index()); assertEquals(1, config.group(0).nodes(1).index()); //assertNotNull(cluster.getRootGroup().getNodes().get(0).getHost()); DistributionConfig.Builder distributionBuilder = new DistributionConfig.Builder(); cluster.getConfig(distributionBuilder); DistributionConfig.Cluster clusterConfig = distributionBuilder.build().cluster("storage"); assertEquals(1, clusterConfig.group().size()); assertEquals("invalid", clusterConfig.group(0).index()); assertEquals("invalid", clusterConfig.group(0).name()); assertEquals(2, clusterConfig.group(0).nodes().size()); assertEquals(0, clusterConfig.group(0).nodes(0).index()); assertEquals(1, clusterConfig.group(0).nodes(1).index()); }
public static Builder builder() { return new Builder(); }
@Test public void testBuilderDoesNotBuildInvalidRequests() { assertThatThrownBy(() -> GetNamespaceResponse.builder().withNamespace(null).build()) .isInstanceOf(NullPointerException.class) .hasMessage("Invalid namespace: null"); assertThatThrownBy(() -> GetNamespaceResponse.builder().setProperties(null).build()) .isInstanceOf(NullPointerException.class) .hasMessage("Invalid properties map: null"); Map<String, String> mapWithNullKey = Maps.newHashMap(); mapWithNullKey.put(null, "hello"); assertThatThrownBy(() -> GetNamespaceResponse.builder().setProperties(mapWithNullKey).build()) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Invalid property: null"); Map<String, String> mapWithMultipleNullValues = Maps.newHashMap(); mapWithMultipleNullValues.put("a", null); mapWithMultipleNullValues.put("b", "b"); assertThatThrownBy( () -> GetNamespaceResponse.builder().setProperties(mapWithMultipleNullValues).build()) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Invalid value for properties [a]: null"); }
public static RpcInvokeContext getContext() { RpcInvokeContext context = LOCAL.get(); if (context == null) { context = new RpcInvokeContext(); LOCAL.set(context); } return context; }
@Test public void getContext() throws Exception { RpcInvokeContext old = RpcInvokeContext.peekContext(); try { if (old != null) { RpcInvokeContext.removeContext(); } RpcInvokeContext context = RpcInvokeContext.peekContext(); Assert.assertTrue(context == null); context = RpcInvokeContext.getContext(); Assert.assertTrue(context != null); RpcInvokeContext.removeContext(); Assert.assertTrue(RpcInvokeContext.peekContext() == null); context = new RpcInvokeContext(); RpcInvokeContext.setContext(context); Assert.assertTrue(RpcInvokeContext.getContext() != null); Assert.assertNotEquals(RpcInvokeContext.getContext(), context); RpcInvokeContext.removeContext(); Assert.assertTrue(RpcInvokeContext.peekContext() == null); Assert.assertTrue(context != null); RpcInvokeContext.removeContext(); Assert.assertTrue(RpcInvokeContext.peekContext() == null); } finally { RpcInvokeContext.setContext(old); } }
public BackgroundException map(final IOException failure, final Path directory) { return super.map("Connection failed", failure, directory); }
@Test public void testPeerShutDownIncorrectly() { final DefaultIOExceptionMappingService s = new DefaultIOExceptionMappingService(); final SSLHandshakeException failure = new SSLHandshakeException("Remote host closed connection during handshake"); failure.initCause(new EOFException("SSL peer shut down incorrectly")); assertEquals(ConnectionRefusedException.class, s.map(failure).getClass()); }
@Override public Integer getLocalValue() { return this.max; }
@Test void testGet() { IntMaximum max = new IntMaximum(); assertThat(max.getLocalValue().intValue()).isEqualTo(Integer.MIN_VALUE); }
public void reset() { final long timestamp = System.currentTimeMillis(); if(log.isDebugEnabled()) { log.debug(String.format("Reset with timestamp %d", timestamp)); } this.reset(timestamp, transfer.getTransferred()); }
@Test public void testReset() throws Exception { final DownloadTransfer transfer = new DownloadTransfer(new Host(new TestProtocol()), new Path("/p", EnumSet.of(Path.Type.file)), new Local("/t")); final TransferSpeedometer s = new TransferSpeedometer(transfer); transfer.addSize(8L); assertEquals(8L, s.getStatus().getSize(), 0L); assertEquals(0L, s.getStatus().getTransferred(), 0L); assertEquals(0L, s.getStatus().getSpeed(), 0L); s.reset(); assertEquals(0L, s.getStatus().getSpeed(), 0L); transfer.addTransferred(4L); Thread.sleep(1000L); assertNotEquals(0L, s.getStatus().getSpeed(), 0L); s.reset(); assertEquals(0L, s.getStatus().getSpeed(), 0L); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testGettingJLSDefaults() throws Exception { ProxyInvocationHandler handler = new ProxyInvocationHandler(Maps.newHashMap()); JLSDefaults proxy = handler.as(JLSDefaults.class); assertFalse(proxy.getBoolean()); assertEquals('\0', proxy.getChar()); assertEquals((byte) 0, proxy.getByte()); assertEquals((short) 0, proxy.getShort()); assertEquals(0, proxy.getInt()); assertEquals(0L, proxy.getLong()); assertEquals(0f, proxy.getFloat(), 0f); assertEquals(0d, proxy.getDouble(), 0d); assertNull(proxy.getObject()); }
@SuppressWarnings("Duplicates") public byte[] get(int index) { assert index < _numValues; int offsetBufferIndex = index >>> OFFSET_BUFFER_SHIFT_OFFSET; PinotDataBuffer offsetBuffer = _offsetBuffers.get(offsetBufferIndex); int offsetIndex = index & OFFSET_BUFFER_MASK; int previousValueEndOffset = offsetBuffer.getInt(offsetIndex << 2); int valueEndOffset = offsetBuffer.getInt((offsetIndex + 1) << 2); if (previousValueEndOffset == valueEndOffset) { return EMPTY_BYTES; } int valueBufferIndex = (valueEndOffset - 1) >>> VALUE_BUFFER_SHIFT_OFFSET; int startOffsetInValueBuffer; int valueLength; if ((previousValueEndOffset - 1) >>> VALUE_BUFFER_SHIFT_OFFSET != valueBufferIndex) { // The first value in the value buffer startOffsetInValueBuffer = 0; valueLength = valueEndOffset & VALUE_BUFFER_MASK; } else { // Not the first value in the value buffer startOffsetInValueBuffer = previousValueEndOffset & VALUE_BUFFER_MASK; valueLength = valueEndOffset - previousValueEndOffset; } byte[] value = new byte[valueLength]; _valueBuffers.get(valueBufferIndex).copyTo(startOffsetInValueBuffer, value); return value; }
@Test public void testGet() throws Exception { try (OffHeapMutableBytesStore offHeapMutableBytesStore = new OffHeapMutableBytesStore(_memoryManager, null)) { for (int i = 0; i < NUM_VALUES; i++) { offHeapMutableBytesStore.add(_values[i]); } for (int i = 0; i < NUM_VALUES; i++) { int index = RANDOM.nextInt(NUM_VALUES); assertTrue(Arrays.equals(offHeapMutableBytesStore.get(index), _values[index])); } } }
public static Read read() { return new AutoValue_RabbitMqIO_Read.Builder() .setQueueDeclare(false) .setExchangeDeclare(false) .setMaxReadTime(null) .setMaxNumRecords(Long.MAX_VALUE) .setUseCorrelationId(false) .build(); }
@Test public void testReadDeclaredTopicExchange() throws Exception { final int numRecords = 10; RabbitMqIO.Read read = RabbitMqIO.read().withExchange("DeclaredTopicExchange", "topic", "user.create.#"); final Supplier<String> publishRoutingKeyGen = new Supplier<String>() { private AtomicInteger counter = new AtomicInteger(0); @Override public String get() { int count = counter.getAndIncrement(); if (count % 2 == 0) { return "user.create." + count; } return "user.delete." + count; } }; ExchangeTestPlan plan = new ExchangeTestPlan(read, numRecords / 2, numRecords) { @Override public Supplier<String> publishRoutingKeyGen() { return publishRoutingKeyGen; } @Override public List<String> expectedResults() { return IntStream.range(0, numRecords) .filter(i -> i % 2 == 0) .mapToObj(RabbitMqTestUtils::generateRecord) .map(RabbitMqTestUtils::recordToString) .collect(Collectors.toList()); } }; doExchangeTest(plan); }
public static void validateRdwAid(byte[] aId) { for (final byte[] compare : RDW_AID) { if (Arrays.equals(compare, aId)) { return; } } logger.error("Driving licence has unknown aId: {}", Hex.toHexString(aId).toUpperCase()); throw new ClientException("Unknown aId"); }
@Test public void validateRdwAidUnsuccessful() { ClientException thrown = assertThrows(ClientException.class, () -> CardValidations.validateRdwAid(Hex.decode("SSSSSS"))); assertEquals("Unknown aId", thrown.getMessage()); }
public static <T> RestResult<T> success() { return RestResult.<T>builder().withCode(200).build(); }
@Test void testSuccessWithDefault() { RestResult<Object> restResult = RestResultUtils.success(); assertRestResult(restResult, 200, null, null, true); }
public ExitStatus(Options options) { this.options = options; }
@Test void wip_with_ambiguous_scenarios() { createWipRuntime(); bus.send(testCaseFinishedWithStatus(Status.AMBIGUOUS)); assertThat(exitStatus.exitStatus(), is(equalTo((byte) 0x0))); }
@Override public void sendSmsCode(SmsCodeSendReqDTO reqDTO) { SmsSceneEnum sceneEnum = SmsSceneEnum.getCodeByScene(reqDTO.getScene()); Assert.notNull(sceneEnum, "验证码场景({}) 查找不到配置", reqDTO.getScene()); // 创建验证码 String code = createSmsCode(reqDTO.getMobile(), reqDTO.getScene(), reqDTO.getCreateIp()); // 发送验证码 smsSendService.sendSingleSms(reqDTO.getMobile(), null, null, sceneEnum.getTemplateCode(), MapUtil.of("code", code)); }
@Test public void sendSmsCode_exceedDay() { // mock 数据 SmsCodeDO smsCodeDO = randomPojo(SmsCodeDO.class, o -> o.setMobile("15601691300").setTodayIndex(10).setCreateTime(LocalDateTime.now())); smsCodeMapper.insert(smsCodeDO); // 准备参数 SmsCodeSendReqDTO reqDTO = randomPojo(SmsCodeSendReqDTO.class, o -> { o.setMobile("15601691300"); o.setScene(SmsSceneEnum.MEMBER_LOGIN.getScene()); }); // mock 方法 SqlConstants.init(DbType.MYSQL); when(smsCodeProperties.getSendFrequency()).thenReturn(Duration.ofMillis(0)); // 调用,并断言异常 assertServiceException(() -> smsCodeService.sendSmsCode(reqDTO), SMS_CODE_EXCEED_SEND_MAXIMUM_QUANTITY_PER_DAY); }
private static String getNameServiceId(Configuration conf, String addressKey) { String nameserviceId = conf.get(DFS_NAMESERVICE_ID); if (nameserviceId != null) { return nameserviceId; } Collection<String> nsIds = DFSUtilClient.getNameServiceIds(conf); if (1 == nsIds.size()) { return nsIds.toArray(new String[1])[0]; } String nnId = conf.get(DFS_HA_NAMENODE_ID_KEY); return getSuffixIDs(conf, addressKey, null, nnId, LOCAL_ADDRESS_MATCHER)[0]; }
@Test public void getNameServiceId() { HdfsConfiguration conf = new HdfsConfiguration(); conf.set(DFS_NAMESERVICE_ID, "nn1"); assertEquals("nn1", DFSUtil.getNamenodeNameServiceId(conf)); }
public static Builder forCurrentMagic(ProduceRequestData data) { return forMagic(RecordBatch.CURRENT_MAGIC_VALUE, data); }
@Test public void testV3AndAboveCannotUseMagicV1() { ByteBuffer buffer = ByteBuffer.allocate(256); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V1, Compression.NONE, TimestampType.CREATE_TIME, 0L); builder.append(10L, null, "a".getBytes()); ProduceRequest.Builder requestBuilder = ProduceRequest.forCurrentMagic(new ProduceRequestData() .setTopicData(new ProduceRequestData.TopicProduceDataCollection(Collections.singletonList( new ProduceRequestData.TopicProduceData() .setName("test") .setPartitionData(Collections.singletonList(new ProduceRequestData.PartitionProduceData() .setIndex(0) .setRecords(builder.build())))) .iterator())) .setAcks((short) 1) .setTimeoutMs(5000)); assertThrowsForAllVersions(requestBuilder, InvalidRecordException.class); }
@Override @Deprecated public <KR, VR> KStream<KR, VR> transform(final org.apache.kafka.streams.kstream.TransformerSupplier<? super K, ? super V, KeyValue<KR, VR>> transformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); final String name = builder.newProcessorName(TRANSFORM_NAME); return flatTransform(new TransformerSupplierAdapter<>(transformerSupplier), Named.as(name), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullTransformerSupplierOnTransformWithStores() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transform(null, "storeName")); assertThat(exception.getMessage(), equalTo("transformerSupplier can't be null")); }
public static L2ModificationInstruction modL2Dst(MacAddress addr) { checkNotNull(addr, "Dst l2 address cannot be null"); return new L2ModificationInstruction.ModEtherInstruction( L2ModificationInstruction.L2SubType.ETH_DST, addr); }
@Test public void testModL2DstMethod() { final Instruction instruction = Instructions.modL2Dst(mac1); final L2ModificationInstruction.ModEtherInstruction modEtherInstruction = checkAndConvert(instruction, Instruction.Type.L2MODIFICATION, L2ModificationInstruction.ModEtherInstruction.class); assertThat(modEtherInstruction.mac(), is(equalTo(mac1))); assertThat(modEtherInstruction.subtype(), is(equalTo(L2ModificationInstruction.L2SubType.ETH_DST))); }
private boolean setNodeState(OrchestratorContext context, HostName host, int storageNodeIndex, ClusterControllerNodeState wantedState, ContentService contentService, Condition condition, boolean throwOnFailure) { try { ClusterControllerClientTimeouts timeouts = context.getClusterControllerTimeouts(); Inspector response = client.send(strategy(hosts), Method.POST) .at("cluster", "v2", clusterName, contentService.nameInClusterController(), Integer.toString(storageNodeIndex)) .deadline(timeouts.readBudget()) .parameters(() -> deadline(timeouts)) .body(stateChangeRequestBytes(wantedState, condition, context.isProbe())) .throwing(retryOnRedirect) .read(SlimeUtils::jsonToSlime).get(); if ( ! response.field("wasModified").asBool()) { if (throwOnFailure) throw new HostStateChangeDeniedException(host, HostedVespaPolicy.SET_NODE_STATE_CONSTRAINT, "Failed to set state to " + wantedState + " in cluster controller: " + response.field("reason").asString()); return false; } return true; } catch (ResponseException e) { throw new HostStateChangeDeniedException(host, HostedVespaPolicy.SET_NODE_STATE_CONSTRAINT, "Failed setting node " + storageNodeIndex + " in cluster " + clusterName + " to state " + wantedState + ": " + e.getMessage()); } catch (UncheckedIOException e) { throw new HostStateChangeDeniedException(host, HostedVespaPolicy.CLUSTER_CONTROLLER_AVAILABLE_CONSTRAINT, String.format("Giving up setting %s for storage node with index %d in cluster %s: %s", wantedState, storageNodeIndex, clusterName, e.getMessage()), e.getCause()); } catch (UncheckedTimeoutException e) { throw new HostStateChangeDeniedException(host, HostedVespaPolicy.DEADLINE_CONSTRAINT, "Timeout while waiting for setNodeState(" + storageNodeIndex + ", " + wantedState + ") against " + hosts + ": " + e.getMessage(), e); } }
@Test public void verifySetNodeState() { OrchestratorContext context = OrchestratorContext.createContextForSingleAppOp(clock); wire.expect((url, body) -> { assertEquals("http://host1:19050/cluster/v2/cc/storage/2?timeout=9.6", url.asURI().toString()); assertEquals("{\"state\":{\"user\":{\"reason\":\"Orchestrator\",\"state\":\"down\"}},\"condition\":\"SAFE\"}", body); return "{ \"wasModified\": true }"; }, 200); client.setNodeState(context, host, 2, DOWN, ContentService.STORAGE_NODE, false); clock.advance(Duration.ofSeconds(9)); wire.expect((url, body) -> { assertEquals("http://host1:19050/cluster/v2/cc/storage/1?timeout=0.6", url.asURI().toString()); assertEquals("{\"state\":{\"user\":{\"reason\":\"Orchestrator\",\"state\":\"down\"}},\"condition\":\"SAFE\"}", body); return "{ \"wasModified\": false, \"reason\": \"because\" }"; }, 200); assertEquals("Changing the state of node would violate controller-set-node-state: Failed to set state to DOWN in cluster controller: because", assertThrows(HostStateChangeDeniedException.class, () -> client.setNodeState(context, host, 1, DOWN, ContentService.STORAGE_NODE, false)) .getMessage()); }
@Override public void delete(LookupTableDto nativeEntity) { lookupTableService.deleteAndPostEventImmutable(nativeEntity.id()); }
@Test @MongoDBFixtures("LookupTableFacadeTest.json") public void delete() { final Optional<LookupTableDto> lookupTableDto = lookupTableService.get("5adf24dd4b900a0fdb4e530d"); assertThat(lookupTableService.findAll()).hasSize(1); lookupTableDto.ifPresent(facade::delete); assertThat(lookupTableService.findAll()).isEmpty(); assertThat(lookupTableService.get("5adf24dd4b900a0fdb4e530d")).isEmpty(); }
@ApiOperation(value = "Query for historic activity instances", tags = {"History", "Query" }, notes = "All supported JSON parameter fields allowed are exactly the same as the parameters found for getting a collection of historic task instances, but passed in as JSON-body arguments rather than URL-parameters to allow for more advanced querying and preventing errors with request-uri’s that are too long.") @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates request was successful and the activities are returned"), @ApiResponse(code = 400, message = "Indicates an parameter was passed in the wrong format. The status-message contains additional information") }) @PostMapping(value = "/query/historic-activity-instances", produces = "application/json") public DataResponse<HistoricActivityInstanceResponse> queryActivityInstances(@RequestBody HistoricActivityInstanceQueryRequest queryRequest, @ApiParam(hidden = true) @RequestParam Map<String, String> allRequestParams) { return getQueryResponse(queryRequest, allRequestParams); }
@Test @Deployment public void testQueryActivityInstances() throws Exception { HashMap<String, Object> processVariables = new HashMap<>(); processVariables.put("stringVar", "Azerty"); processVariables.put("intVar", 67890); processVariables.put("booleanVar", false); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables); Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); taskService.complete(task.getId()); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables); String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_HISTORIC_ACTIVITY_INSTANCE_QUERY); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("activityId", "processTask"); assertResultsPresentInDataResponse(url, requestNode, 2, "processTask"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityId", "processTask"); requestNode.put("finished", true); assertResultsPresentInDataResponse(url, requestNode, 1, "processTask"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityId", "processTask"); requestNode.put("finished", false); assertResultsPresentInDataResponse(url, requestNode, 1, "processTask"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityId", "processTask2"); assertResultsPresentInDataResponse(url, requestNode, 1, "processTask2"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityId", "processTask3"); assertResultsPresentInDataResponse(url, requestNode, 0); requestNode = objectMapper.createObjectNode(); requestNode.put("activityName", "Process task"); assertResultsPresentInDataResponse(url, requestNode, 2, "processTask"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityName", "Process task2"); assertResultsPresentInDataResponse(url, requestNode, 1, "processTask2"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityName", "Process task3"); assertResultsPresentInDataResponse(url, requestNode, 0); requestNode = objectMapper.createObjectNode(); requestNode.put("activityType", "userTask"); assertResultsPresentInDataResponse(url, requestNode, 3, "processTask", "processTask2"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityType", "startEvent"); assertResultsPresentInDataResponse(url, requestNode, 2, "theStart"); requestNode = objectMapper.createObjectNode(); requestNode.put("activityType", "receiveTask"); assertResultsPresentInDataResponse(url, requestNode, 0); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance.getId()); assertResultsPresentInDataResponse(url, requestNode, 5, "theStart", "flow1", "processTask", "flow2", "processTask2"); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance2.getId()); assertResultsPresentInDataResponse(url, requestNode, 3, "theStart", "flow1", "processTask"); requestNode = objectMapper.createObjectNode(); requestNode.put("processDefinitionId", processInstance.getProcessDefinitionId()); assertResultsPresentInDataResponse(url, requestNode, 8, "theStart", "flow1", "processTask", "flow2", "processTask2"); requestNode = objectMapper.createObjectNode(); requestNode.put("taskAssignee", "kermit"); assertResultsPresentInDataResponse(url, requestNode, 2, "processTask"); requestNode = objectMapper.createObjectNode(); requestNode.put("taskAssignee", "fozzie"); assertResultsPresentInDataResponse(url, requestNode, 1, "processTask2"); requestNode = objectMapper.createObjectNode(); requestNode.put("taskAssignee", "fozzie2"); assertResultsPresentInDataResponse(url, requestNode, 0); }
public static String hashForStringArray(String[] stringData) { checkNotNull(stringData); return computeHashFor(stringData); }
@Test public void hashForStringArrayDoesNotIgnoreNull() { String[] array1 = new String[]{"a", "b"}; String[] array2 = new String[]{"a", "b", null}; String[] array3 = new String[]{"a", null, "b"}; //rehashing produces same output twice assertThat(hashForStringArray(array1), is(hashForStringArray(array1))); assertThat(hashForStringArray(array2), is(hashForStringArray(array2))); assertThat(hashForStringArray(array3), is(hashForStringArray(array3))); //all 3 hashes are different assertThat(hashForStringArray(array1), is(not(hashForStringArray(array2)))); assertThat(hashForStringArray(array1), is(not(hashForStringArray(array3)))); assertThat(hashForStringArray(array2), is(not(hashForStringArray(array3)))); }
@Override public void onCreating(AbstractJob job) { JobDetails jobDetails = job.getJobDetails(); Optional<Job> jobAnnotation = getJobAnnotation(jobDetails); setJobName(job, jobAnnotation); setAmountOfRetries(job, jobAnnotation); setLabels(job, jobAnnotation); }
@Test void testDisplayNameWithAnnotationUsingJobParametersAndMDCVariables() { MDC.put("customer.id", "1"); Job job = anEnqueuedJob() .withoutName() .withJobDetails(jobDetails() .withClassName(TestService.class) .withMethodName("doWorkWithAnnotation") .withJobParameter(5) .withJobParameter("John Doe")) .build(); defaultJobFilter.onCreating(job); assertThat(job.getJobName()).isEqualTo("Doing some hard work for user John Doe (customerId: 1)"); }
static URI determineClasspathResourceUri(Path baseDir, String basePackagePath, Path resource) { String subPackageName = determineSubpackagePath(baseDir, resource); String resourceName = resource.getFileName().toString(); String classpathResourcePath = of(basePackagePath, subPackageName, resourceName) .filter(value -> !value.isEmpty()) // default package . .collect(joining(RESOURCE_SEPARATOR_STRING)); return classpathResourceUri(classpathResourcePath); }
@Test void determineFullyQualifiedResourceName() { Path baseDir = Paths.get("path", "to", "com", "example", "app"); String basePackageName = "com/example/app"; Path resourceFile = Paths.get("path", "to", "com", "example", "app", "app.feature"); URI fqn = ClasspathSupport.determineClasspathResourceUri(baseDir, basePackageName, resourceFile); assertEquals(URI.create("classpath:com/example/app/app.feature"), fqn); }
@Override public void flush() { internal.flush(); }
@Test public void shouldDelegateAndRecordMetricsOnFlush() { store.flush(); verify(inner).flush(); assertThat((Double) getMetric("flush-rate").metricValue(), greaterThan(0.0)); }
public static MagicDetector parse(MediaType mediaType, String type, String offset, String value, String mask) { int start = 0; int end = 0; if (offset != null) { int colon = offset.indexOf(':'); if (colon == -1) { start = Integer.parseInt(offset); end = start; } else { start = Integer.parseInt(offset.substring(0, colon)); end = Integer.parseInt(offset.substring(colon + 1)); } } byte[] patternBytes = decodeValue(value, type); byte[] maskBytes = null; if (mask != null) { maskBytes = decodeValue(mask, type); } return new MagicDetector(mediaType, patternBytes, maskBytes, type.equals("regex"), type.equals("stringignorecase"), start, end); }
@Test public void testDetectString() throws Exception { String data = "abcdEFGhijklmnoPQRstuvwxyz0123456789"; MediaType testMT = new MediaType("application", "test"); Detector detector; // Check regular String matching detector = MagicDetector.parse(testMT, "string", "0:20", "abcd", null); assertDetect(detector, testMT, data.getBytes(US_ASCII)); detector = MagicDetector.parse(testMT, "string", "0:20", "cdEFGh", null); assertDetect(detector, testMT, data.getBytes(US_ASCII)); // Check Little Endian and Big Endian utf-16 strings detector = MagicDetector.parse(testMT, "unicodeLE", "0:20", "cdEFGh", null); assertDetect(detector, testMT, data.getBytes(UTF_16LE)); detector = MagicDetector.parse(testMT, "unicodeBE", "0:20", "cdEFGh", null); assertDetect(detector, testMT, data.getBytes(UTF_16BE)); // Check case ignoring String matching detector = MagicDetector.parse(testMT, "stringignorecase", "0:20", "BcDeFgHiJKlm", null); assertDetect(detector, testMT, data.getBytes(US_ASCII)); }
public List<Long> availableWindows() { return getWindowList(_oldestWindowIndex, _currentWindowIndex - 1); }
@Test public void testAddSamplesWithLargeInterval() { MetricSampleAggregator<String, IntegerEntity> aggregator = new MetricSampleAggregator<>(NUM_WINDOWS, WINDOW_MS, MIN_SAMPLES_PER_WINDOW, 0, _metricDef); // Populate samples for time window indexed from 0 to NUM_WINDOWS to aggregator. CruiseControlUnitTestUtils.populateSampleAggregator(NUM_WINDOWS + 1, MIN_SAMPLES_PER_WINDOW, aggregator, ENTITY1, 0, WINDOW_MS, _metricDef); // Populate samples for time window index from 4 * NUM_WINDOWS to 5 * NUM_WINDOWS - 1 to aggregator. CruiseControlUnitTestUtils.populateSampleAggregator(NUM_WINDOWS, MIN_SAMPLES_PER_WINDOW, aggregator, ENTITY1, 4 * NUM_WINDOWS, WINDOW_MS, _metricDef); // If aggregator rolls out time window properly, time window indexed from 4 * NUM_WINDOW -1 to 5 * NUM_WINDOW -1 are // currently in memory and time window indexed from 4 * NUM_WINDOW -1 to 5 * NUM_WINDOW - 2 should be returned from query. List<Long> availableWindows = aggregator.availableWindows(); assertEquals(NUM_WINDOWS, availableWindows.size()); for (int i = 0; i < NUM_WINDOWS; i++) { assertEquals((i + 4 * NUM_WINDOWS) * WINDOW_MS, availableWindows.get(i).longValue()); } }
public void convertQueueHierarchy(FSQueue queue) { List<FSQueue> children = queue.getChildQueues(); final String queueName = queue.getName(); emitChildQueues(queueName, children); emitMaxAMShare(queueName, queue); emitMaxParallelApps(queueName, queue); emitMaxAllocations(queueName, queue); emitPreemptionDisabled(queueName, queue); emitChildCapacity(queue); emitMaximumCapacity(queueName, queue); emitSizeBasedWeight(queueName); emitOrderingPolicy(queueName, queue); checkMaxChildCapacitySetting(queue); emitDefaultUserLimitFactor(queueName, children); for (FSQueue childQueue : children) { convertQueueHierarchy(childQueue); } }
@Test public void testAutoCreateV2FlagsInWeightMode() { converter = builder.withPercentages(false).build(); converter.convertQueueHierarchy(rootQueue); assertTrue("root autocreate v2 flag", csConfig.isAutoQueueCreationV2Enabled(ROOT)); assertTrue("root.admins autocreate v2 flag", csConfig.isAutoQueueCreationV2Enabled(ADMINS)); assertTrue("root.admins.alice autocreate v2 flag", csConfig.isAutoQueueCreationV2Enabled(ADMINS_ALICE)); assertTrue("root.users autocreate v2 flag", csConfig.isAutoQueueCreationV2Enabled(USERS)); assertTrue("root.misc autocreate v2 flag", csConfig.isAutoQueueCreationV2Enabled(MISC)); //leaf queue root.admins.alice is removed from the below list //adding reservation to a leaf, it's queueType changes to FSParentQueue Set<String> leafs = Sets.difference(ALL_QUEUES, Sets.newHashSet("root", "root.admins", "root.users", "root.misc", "root.admins.alice")); for (String queue : leafs) { key = PREFIX + queue + ".auto-queue-creation-v2.enabled"; assertEquals("Key " + key + " has different value", false, csConfig .isAutoQueueCreationV2Enabled(new QueuePath(queue))); } }
@Override public DescriptiveUrlBag toUrl(final Path file) { final DescriptiveUrlBag list = new DefaultUrlProvider(session.getHost()).toUrl(file); if(file.isFile()) { // Authenticated browser download using cookie-based Google account authentication in conjunction with ACL list.add(new DescriptiveUrl(URI.create(String.format("https://storage.cloud.google.com%s", URIEncoder.encode(file.getAbsolute()))), DescriptiveUrl.Type.authenticated, MessageFormat.format(LocaleFactory.localizedString("{0} URL"), LocaleFactory.localizedString("Authenticated")))); // Website configuration final Distribution distribution = new Distribution(Distribution.DOWNLOAD, URI.create(String.format("%s://%s.%s", Distribution.DOWNLOAD.getScheme(), containerService.getContainer(file).getName(), session.getHost().getProtocol().getDefaultHostname())), false); distribution.setUrl(URI.create(String.format("%s://%s.%s", Distribution.DOWNLOAD.getScheme(), containerService.getContainer(file).getName(), session.getHost().getProtocol().getDefaultHostname()))); list.addAll(new DistributionUrlProvider(distribution).toUrl(file)); } // gsutil URI list.add(new DescriptiveUrl(URI.create(String.format("gs://%s%s", containerService.getContainer(file).getName(), file.isRoot() ? Path.DELIMITER : containerService.isContainer(file) ? Path.DELIMITER : String.format("/%s", URIEncoder.encode(containerService.getKey(file))))), DescriptiveUrl.Type.provider, MessageFormat.format(LocaleFactory.localizedString("{0} URL"), session.getHost().getProtocol().getName()))); return list; }
@Test public void testWebsiteConfiguration() { final GoogleStorageUrlProvider provider = new GoogleStorageUrlProvider(session); assertEquals("http://test.cyberduck.ch.storage.googleapis.com/f", provider.toUrl(new Path("test.cyberduck.ch/f", EnumSet.of(Path.Type.file))).find( DescriptiveUrl.Type.origin).getUrl()); }
@Nullable @Override public GenericRow decode(byte[] payload, GenericRow destination) { try { destination = (GenericRow) _decodeMethod.invoke(null, payload, destination); } catch (Exception e) { throw new RuntimeException(e); } return destination; }
@Test public void testNestedMessageClass() throws Exception { ProtoBufCodeGenMessageDecoder messageDecoder = setupDecoder("complex_types.jar", "org.apache.pinot.plugin.inputformat.protobuf.ComplexTypes$TestMessage$NestedMessage", ImmutableSet.of(NESTED_STRING_FIELD, NESTED_INT_FIELD)); ComplexTypes.TestMessage.NestedMessage nestedMessage = ComplexTypes.TestMessage.NestedMessage.newBuilder().setNestedStringField("hello").setNestedIntField(42).build(); GenericRow destination = new GenericRow(); messageDecoder.decode(nestedMessage.toByteArray(), destination); assertNotNull(destination.getValue(NESTED_STRING_FIELD)); assertNotNull(destination.getValue(NESTED_INT_FIELD)); assertEquals(destination.getValue(NESTED_STRING_FIELD), "hello"); assertEquals(destination.getValue(NESTED_INT_FIELD), 42); }
@Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { Map<String, String> mdcContextMap = getMdcContextMap(); return super.schedule(ContextPropagator.decorateRunnable(contextPropagators, () -> { try { setMDCContext(mdcContextMap); command.run(); } finally { MDC.clear(); } }), delay, unit); }
@Test public void testScheduleCallablePropagatesMDCContext() { MDC.put("key", "value"); MDC.put("key2","value2"); final Map<String, String> contextMap = MDC.getCopyOfContextMap(); final ScheduledFuture<Map<String, String>> scheduledFuture = this.schedulerService .schedule(MDC::getCopyOfContextMap, 0, TimeUnit.MILLISECONDS); waitAtMost(1, TimeUnit.SECONDS).until(matches(() -> assertThat(scheduledFuture.get()).hasSize(2).containsExactlyEntriesOf(contextMap))); }
@Override public String toString() { return "ConfigChangeItem{" + "key='" + key + '\'' + ", oldValue='" + oldValue + '\'' + ", newValue='" + newValue + '\'' + ", type=" + type + '}'; }
@Test void testToString() { ConfigChangeItem item = new ConfigChangeItem("testKey", null, "testValue"); item.setType(PropertyChangeType.ADDED); assertEquals("ConfigChangeItem{key='testKey', oldValue='null', newValue='testValue', type=ADDED}", item.toString()); }
public URI buildEncodedUri(String endpointUrl) { if (endpointUrl == null) { throw new RuntimeException("Url string cannot be null!"); } if (endpointUrl.isEmpty()) { throw new RuntimeException("Url string cannot be empty!"); } URI uri = UriComponentsBuilder.fromUriString(endpointUrl).build().encode().toUri(); if (uri.getScheme() == null || uri.getScheme().isEmpty()) { throw new RuntimeException("Transport scheme(protocol) must be provided!"); } boolean authorityNotValid = uri.getAuthority() == null || uri.getAuthority().isEmpty(); boolean hostNotValid = uri.getHost() == null || uri.getHost().isEmpty(); if (authorityNotValid || hostNotValid) { throw new RuntimeException("Url string is invalid!"); } return uri; }
@Test public void testBuildUriWithSpecialSymbols() { Mockito.when(client.buildEncodedUri(any())).thenCallRealMethod(); String url = "http://192.168.1.1/data?d={\"a\": 12}"; String expected = "http://192.168.1.1/data?d=%7B%22a%22:%2012%7D"; URI uri = client.buildEncodedUri(url); Assertions.assertEquals(expected, uri.toString()); }
public static KTableHolder<GenericKey> build( final KGroupedStreamHolder groupedStream, final StreamAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedStream, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test public void shouldBuildAggregatorParamsCorrectlyForWindowedAggregate() { for (final Runnable given : given()) { // Given: clearInvocations( groupedStream, timeWindowedStream, sessionWindowedStream, aggregated, aggregateParamsFactory ); when(aggregateParamsFactory.create(any(), any(), any(), any(), anyBoolean(), any())) .thenReturn(aggregateParams); given.run(); // When: windowedAggregate.build(planBuilder, planInfo); // Then: verify(aggregateParamsFactory) .create(INPUT_SCHEMA, NON_AGG_COLUMNS, functionRegistry, FUNCTIONS, true, KsqlConfig.empty()); } }
public TaskAcknowledgeResult acknowledgeCoordinatorState( OperatorInfo coordinatorInfo, @Nullable ByteStreamStateHandle stateHandle) { synchronized (lock) { if (disposed) { return TaskAcknowledgeResult.DISCARDED; } final OperatorID operatorId = coordinatorInfo.operatorId(); OperatorState operatorState = operatorStates.get(operatorId); // sanity check for better error reporting if (!notYetAcknowledgedOperatorCoordinators.remove(operatorId)) { return operatorState != null && operatorState.getCoordinatorState() != null ? TaskAcknowledgeResult.DUPLICATE : TaskAcknowledgeResult.UNKNOWN; } if (operatorState == null) { operatorState = new OperatorState( operatorId, coordinatorInfo.currentParallelism(), coordinatorInfo.maxParallelism()); operatorStates.put(operatorId, operatorState); } if (stateHandle != null) { operatorState.setCoordinatorState(stateHandle); } return TaskAcknowledgeResult.SUCCESS; } }
@Test void testDuplicateAcknowledgeCoordinator() throws Exception { final OperatorInfo coordinator = new TestingOperatorInfo(); final PendingCheckpoint checkpoint = createPendingCheckpointWithCoordinators(coordinator); checkpoint.acknowledgeCoordinatorState(coordinator, new TestingStreamStateHandle()); final TaskAcknowledgeResult secondAck = checkpoint.acknowledgeCoordinatorState(coordinator, null); assertThat(TaskAcknowledgeResult.DUPLICATE).isEqualTo(secondAck); }
public static void emptyCheck(String paramName, String value) { if (StringUtils.isEmpty(value)) { throw new IllegalArgumentException("The value of " + paramName + " can't be empty"); } }
@Test public void testEmptyCheck() { assertThrows(IllegalArgumentException.class, () -> ValueValidationUtil.emptyCheck("param1", "")); assertThrows(IllegalArgumentException.class, () -> ValueValidationUtil.emptyCheck("param2", null)); ValueValidationUtil.emptyCheck("param3", "nonEmpty"); }
@Private public void handleEvent(JobHistoryEvent event) { synchronized (lock) { // If this is JobSubmitted Event, setup the writer if (event.getHistoryEvent().getEventType() == EventType.AM_STARTED) { try { AMStartedEvent amStartedEvent = (AMStartedEvent) event.getHistoryEvent(); setupEventWriter(event.getJobID(), amStartedEvent); } catch (IOException ioe) { LOG.error("Error JobHistoryEventHandler in handleEvent: " + event, ioe); throw new YarnRuntimeException(ioe); } } // For all events // (1) Write it out // (2) Process it for JobSummary // (3) Process it for ATS (if enabled) MetaInfo mi = fileMap.get(event.getJobID()); try { HistoryEvent historyEvent = event.getHistoryEvent(); if (! (historyEvent instanceof NormalizedResourceEvent)) { mi.writeEvent(historyEvent); } processEventForJobSummary(event.getHistoryEvent(), mi.getJobSummary(), event.getJobID()); if (LOG.isDebugEnabled()) { LOG.debug("In HistoryEventHandler " + event.getHistoryEvent().getEventType()); } } catch (IOException e) { LOG.error("Error writing History Event: " + event.getHistoryEvent(), e); throw new YarnRuntimeException(e); } if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) { JobSubmittedEvent jobSubmittedEvent = (JobSubmittedEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setSubmitTime(jobSubmittedEvent.getSubmitTime()); mi.getJobIndexInfo().setQueueName(jobSubmittedEvent.getJobQueueName()); } //initialize the launchTime in the JobIndexInfo of MetaInfo if(event.getHistoryEvent().getEventType() == EventType.JOB_INITED ){ JobInitedEvent jie = (JobInitedEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setJobStartTime(jie.getLaunchTime()); } if (event.getHistoryEvent().getEventType() == EventType.JOB_QUEUE_CHANGED) { JobQueueChangeEvent jQueueEvent = (JobQueueChangeEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setQueueName(jQueueEvent.getJobQueueName()); } // If this is JobFinishedEvent, close the writer and setup the job-index if (event.getHistoryEvent().getEventType() == EventType.JOB_FINISHED) { try { JobFinishedEvent jFinishedEvent = (JobFinishedEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setFinishTime(jFinishedEvent.getFinishTime()); mi.getJobIndexInfo().setNumMaps(jFinishedEvent.getSucceededMaps()); mi.getJobIndexInfo().setNumReduces( jFinishedEvent.getSucceededReduces()); mi.getJobIndexInfo().setJobStatus(JobState.SUCCEEDED.toString()); closeEventWriter(event.getJobID()); processDoneFiles(event.getJobID()); } catch (IOException e) { throw new YarnRuntimeException(e); } } // In case of JOB_ERROR, only process all the Done files(e.g. job // summary, job history file etc.) if it is last AM retry. if (event.getHistoryEvent().getEventType() == EventType.JOB_ERROR) { try { JobUnsuccessfulCompletionEvent jucEvent = (JobUnsuccessfulCompletionEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setFinishTime(jucEvent.getFinishTime()); mi.getJobIndexInfo().setNumMaps(jucEvent.getSucceededMaps()); mi.getJobIndexInfo().setNumReduces(jucEvent.getSucceededReduces()); mi.getJobIndexInfo().setJobStatus(jucEvent.getStatus()); closeEventWriter(event.getJobID()); if(context.isLastAMRetry()) processDoneFiles(event.getJobID()); } catch (IOException e) { throw new YarnRuntimeException(e); } } if (event.getHistoryEvent().getEventType() == EventType.JOB_FAILED || event.getHistoryEvent().getEventType() == EventType.JOB_KILLED) { try { JobUnsuccessfulCompletionEvent jucEvent = (JobUnsuccessfulCompletionEvent) event .getHistoryEvent(); mi.getJobIndexInfo().setFinishTime(jucEvent.getFinishTime()); mi.getJobIndexInfo().setNumMaps(jucEvent.getSucceededMaps()); mi.getJobIndexInfo().setNumReduces(jucEvent.getSucceededReduces()); mi.getJobIndexInfo().setJobStatus(jucEvent.getStatus()); closeEventWriter(event.getJobID()); processDoneFiles(event.getJobID()); } catch (IOException e) { throw new YarnRuntimeException(e); } } } }
@Test (timeout=50000) public void testTimelineEventHandling() throws Exception { TestParams t = new TestParams(RunningAppContext.class, false); Configuration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); long currentTime = System.currentTimeMillis(); try (MiniYARNCluster yarnCluster = new MiniYARNCluster( TestJobHistoryEventHandler.class.getSimpleName(), 1, 1, 1, 1)) { yarnCluster.init(conf); yarnCluster.start(); Configuration confJHEH = new YarnConfiguration(conf); confJHEH.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); confJHEH.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + yarnCluster.getApplicationHistoryServer().getPort()); JHEvenHandlerForTest jheh = new JHEvenHandlerForTest(t.mockAppContext, 0); jheh.init(confJHEH); jheh.start(); TimelineStore ts = yarnCluster.getApplicationHistoryServer() .getTimelineStore(); handleEvent(jheh, new JobHistoryEvent(t.jobId, new AMStartedEvent( t.appAttemptId, 200, t.containerId, "nmhost", 3000, 4000, -1), currentTime - 10)); jheh.getDispatcher().await(); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId()); Assert.assertEquals(1, tEntity.getEvents().size()); Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(0).getEventType()); Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(0).getTimestamp()); handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobSubmittedEvent(TypeConverter.fromYarn(t.jobId), "name", "user", 200, "/foo/job.xml", new HashMap<JobACL, AccessControlList>(), "default"), currentTime + 10)); jheh.getDispatcher().await(); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); tEntity = entities.getEntities().get(0); Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId()); Assert.assertEquals(2, tEntity.getEvents().size()); Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType()); Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(1).getEventType()); Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp()); Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(1).getTimestamp()); handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobQueueChangeEvent(TypeConverter.fromYarn(t.jobId), "q2"), currentTime - 20)); jheh.getDispatcher().await(); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); tEntity = entities.getEntities().get(0); Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId()); Assert.assertEquals(3, tEntity.getEvents().size()); Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType()); Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(1).getEventType()); Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(2).getEventType()); Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp()); Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(1).getTimestamp()); Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(2).getTimestamp()); handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobFinishedEvent(TypeConverter.fromYarn(t.jobId), 0, 0, 0, 0, 0, 0, 0, new Counters(), new Counters(), new Counters()), currentTime)); jheh.getDispatcher().await(); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); tEntity = entities.getEntities().get(0); Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId()); Assert.assertEquals(4, tEntity.getEvents().size()); Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType()); Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(1).getEventType()); Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(2).getEventType()); Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(3).getEventType()); Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp()); Assert.assertEquals(currentTime, tEntity.getEvents().get(1).getTimestamp()); Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(2).getTimestamp()); Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(3).getTimestamp()); handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobUnsuccessfulCompletionEvent(TypeConverter.fromYarn(t.jobId), 0, 0, 0, 0, 0, 0, 0, JobStateInternal.KILLED.toString()), currentTime + 20)); jheh.getDispatcher().await(); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); tEntity = entities.getEntities().get(0); Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId()); Assert.assertEquals(5, tEntity.getEvents().size()); Assert.assertEquals(EventType.JOB_KILLED.toString(), tEntity.getEvents().get(0).getEventType()); Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(1).getEventType()); Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(2).getEventType()); Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(3).getEventType()); Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(4).getEventType()); Assert.assertEquals(currentTime + 20, tEntity.getEvents().get(0).getTimestamp()); Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(1).getTimestamp()); Assert.assertEquals(currentTime, tEntity.getEvents().get(2).getTimestamp()); Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(3).getTimestamp()); Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(4).getTimestamp()); handleEvent(jheh, new JobHistoryEvent(t.jobId, new TaskStartedEvent(t.taskID, 0, TaskType.MAP, ""))); jheh.getDispatcher().await(); entities = ts.getEntities("MAPREDUCE_TASK", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); tEntity = entities.getEntities().get(0); Assert.assertEquals(t.taskID.toString(), tEntity.getEntityId()); Assert.assertEquals(1, tEntity.getEvents().size()); Assert.assertEquals(EventType.TASK_STARTED.toString(), tEntity.getEvents().get(0).getEventType()); Assert.assertEquals(TaskType.MAP.toString(), tEntity.getEvents().get(0).getEventInfo().get("TASK_TYPE")); handleEvent(jheh, new JobHistoryEvent(t.jobId, new TaskStartedEvent(t.taskID, 0, TaskType.REDUCE, ""))); jheh.getDispatcher().await(); entities = ts.getEntities("MAPREDUCE_TASK", null, null, null, null, null, null, null, null, null); Assert.assertEquals(1, entities.getEntities().size()); tEntity = entities.getEntities().get(0); Assert.assertEquals(t.taskID.toString(), tEntity.getEntityId()); Assert.assertEquals(2, tEntity.getEvents().size()); Assert.assertEquals(EventType.TASK_STARTED.toString(), tEntity.getEvents().get(1).getEventType()); Assert.assertEquals(TaskType.REDUCE.toString(), tEntity.getEvents().get(0).getEventInfo().get("TASK_TYPE")); Assert.assertEquals(TaskType.MAP.toString(), tEntity.getEvents().get(1).getEventInfo().get("TASK_TYPE")); } }
@Override public boolean overlap(final Window other) throws IllegalArgumentException { if (getClass() != other.getClass()) { throw new IllegalArgumentException("Cannot compare windows of different type. Other window has type " + other.getClass() + "."); } final TimeWindow otherWindow = (TimeWindow) other; return startMs < otherWindow.endMs && otherWindow.startMs < endMs; }
@Test public void shouldOverlapIfOtherWindowContainsThisWindow() { /* * This: [-------) * Other: [------------------) */ assertTrue(window.overlap(new TimeWindow(0, end))); assertTrue(window.overlap(new TimeWindow(0, end + 1))); assertTrue(window.overlap(new TimeWindow(0, 150))); assertTrue(window.overlap(new TimeWindow(start - 1, end))); assertTrue(window.overlap(new TimeWindow(start - 1, end + 1))); assertTrue(window.overlap(new TimeWindow(start - 1, 150))); assertTrue(window.overlap(new TimeWindow(start, end))); assertTrue(window.overlap(new TimeWindow(start, end + 1))); assertTrue(window.overlap(new TimeWindow(start, 150))); }
public long getActualStartTimeMs() { return actualStartTime; }
@Test void initCreate_noStartTime_setsCurrentTime() { AsyncInitializationWrapper init = new AsyncInitializationWrapper(); long initTime = ManagementFactory.getRuntimeMXBean().getStartTime(); assertEquals(initTime, init.getActualStartTimeMs()); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseCreateAsStatement() { // When: List<CommandParser.ParsedCommand> commands = parse("CREATE STREAM FOO AS SELECT col1, col2 + 2 FROM BAR;"); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (false)); assertThat(commands.get(0).getCommand(), is("CREATE STREAM FOO AS SELECT col1, col2 + 2 FROM BAR;")); }
public static ApiVersionCollection filterApis( RecordVersion minRecordVersion, ApiMessageType.ListenerType listenerType, boolean enableUnstableLastVersion, boolean clientTelemetryEnabled ) { ApiVersionCollection apiKeys = new ApiVersionCollection(); for (ApiKeys apiKey : ApiKeys.apisForListener(listenerType)) { // Skip telemetry APIs if client telemetry is disabled. if ((apiKey == ApiKeys.GET_TELEMETRY_SUBSCRIPTIONS || apiKey == ApiKeys.PUSH_TELEMETRY) && !clientTelemetryEnabled) continue; if (apiKey.minRequiredInterBrokerMagic <= minRecordVersion.value) { apiKey.toApiVersion(enableUnstableLastVersion).ifPresent(apiKeys::add); } } return apiKeys; }
@Test public void testMetadataQuorumApisAreDisabled() { ApiVersionsResponse response = new ApiVersionsResponse.Builder(). setThrottleTimeMs(AbstractResponse.DEFAULT_THROTTLE_TIME). setApiVersions(ApiVersionsResponse.filterApis( RecordVersion.current(), ListenerType.ZK_BROKER, true, true)). setSupportedFeatures(Features.emptySupportedFeatures()). setFinalizedFeatures(Collections.emptyMap()). setFinalizedFeaturesEpoch(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH). build(); // Ensure that APIs needed for the KRaft mode are not exposed through ApiVersions until we are ready for them HashSet<ApiKeys> exposedApis = apiKeysInResponse(response); assertFalse(exposedApis.contains(ApiKeys.VOTE)); assertFalse(exposedApis.contains(ApiKeys.BEGIN_QUORUM_EPOCH)); assertFalse(exposedApis.contains(ApiKeys.END_QUORUM_EPOCH)); assertFalse(exposedApis.contains(ApiKeys.DESCRIBE_QUORUM)); }
@Deprecated @Override public void init(final ProcessorContext context, final StateStore root) { this.context = context instanceof InternalProcessorContext ? (InternalProcessorContext<?, ?>) context : null; taskId = context.taskId(); initStoreSerde(context); streamsMetrics = (StreamsMetricsImpl) context.metrics(); registerMetrics(); final Sensor restoreSensor = StateStoreMetrics.restoreSensor(taskId.toString(), metricsScope, name(), streamsMetrics); // register and possibly restore the state from the logs maybeMeasureLatency(() -> super.init(context, root), time, restoreSensor); }
@Test public void shouldDelegateInit() { setUp(); final MeteredSessionStore<String, String> outer = new MeteredSessionStore<>( innerStore, STORE_TYPE, Serdes.String(), Serdes.String(), new MockTime() ); doNothing().when(innerStore).init((StateStoreContext) context, outer); outer.init((StateStoreContext) context, outer); }
@Override public void populateDisplayData(DisplayData.Builder builder) { builder .add(DisplayData.item("maxAttempts", maxAttempts).withLabel("maxAttempts")) .add(DisplayData.item("initialBackoff", initialBackoff).withLabel("initialBackoff")) .add(DisplayData.item("samplePeriod", samplePeriod).withLabel("samplePeriod")) .add( DisplayData.item("samplePeriodBucketSize", samplePeriodBucketSize) .withLabel("samplePeriodBucketSize")) .add(DisplayData.item("overloadRatio", overloadRatio).withLabel("overloadRatio")) .add(DisplayData.item("throttleDuration", throttleDuration).withLabel("throttleDuration")) .add( DisplayData.item("batchInitialCount", batchInitialCount).withLabel("batchInitialCount")) .add(DisplayData.item("batchMaxCount", batchMaxCount).withLabel("batchMaxCount")) .add(DisplayData.item("batchMaxBytes", batchMaxBytes).withLabel("batchMaxBytes")) .add( DisplayData.item("batchTargetLatency", batchTargetLatency) .withLabel("batchTargetLatency")) .add( DisplayData.item("hintMaxNumWorkers", hintMaxNumWorkers).withLabel("hintMaxNumWorkers")) .add( DisplayData.item("shouldReportDiagnosticMetrics", shouldReportDiagnosticMetrics) .withLabel("shouldReportDiagnosticMetrics")); }
@Test public void populateDisplayData() { //noinspection unchecked ArgumentCaptor<ItemSpec<?>> captor = ArgumentCaptor.forClass(DisplayData.ItemSpec.class); DisplayData.Builder builder = mock(DisplayData.Builder.class); when(builder.add(captor.capture())).thenReturn(builder); RpcQosOptions rpcQosOptions = RpcQosOptions.defaultOptions(); rpcQosOptions.populateDisplayData(builder); List<String> actualKeys = captor.getAllValues().stream().map(ItemSpec::getKey).sorted().collect(Collectors.toList()); List<String> expectedKeys = newArrayList( "batchInitialCount", "batchMaxBytes", "batchMaxCount", "batchTargetLatency", "hintMaxNumWorkers", "initialBackoff", "maxAttempts", "overloadRatio", "samplePeriod", "samplePeriodBucketSize", "shouldReportDiagnosticMetrics", "throttleDuration"); assertEquals(expectedKeys, actualKeys); }
public static String getMethodResourceName(Invoker<?> invoker, Invocation invocation){ return getMethodResourceName(invoker, invocation, false); }
@Test public void testGetResourceName() throws NoSuchMethodException { Invoker invoker = mock(Invoker.class); when(invoker.getInterface()).thenReturn(DemoService.class); Invocation invocation = mock(Invocation.class); Method method = DemoService.class.getDeclaredMethod("sayHello", String.class, int.class); when(invocation.getMethodName()).thenReturn(method.getName()); when(invocation.getParameterTypes()).thenReturn(method.getParameterTypes()); String resourceName = DubboUtils.getMethodResourceName(invoker, invocation); assertEquals("com.alibaba.csp.sentinel.adapter.dubbo3.provider.DemoService:sayHello(java.lang.String,int)", resourceName); }
public static String printLogical(List<PlanFragment> fragments, FunctionAndTypeManager functionAndTypeManager, Session session) { Map<PlanFragmentId, PlanFragment> fragmentsById = Maps.uniqueIndex(fragments, PlanFragment::getId); PlanNodeIdGenerator idGenerator = new PlanNodeIdGenerator(); StringBuilder output = new StringBuilder(); output.append("digraph logical_plan {\n"); for (PlanFragment fragment : fragments) { printFragmentNodes(output, fragment, idGenerator, functionAndTypeManager, session); } for (PlanFragment fragment : fragments) { fragment.getRoot().accept(new EdgePrinter(output, fragmentsById, idGenerator), null); } output.append("}\n"); return output.toString(); }
@Test public void testPrintLogicalForJoinNode() { ValuesNode valuesNode = new ValuesNode(Optional.empty(), new PlanNodeId("right"), ImmutableList.of(), ImmutableList.of(), Optional.empty()); PlanNode node = new JoinNode( Optional.empty(), new PlanNodeId("join"), JoinType.INNER, TEST_TABLE_SCAN_NODE, //Left : Probe side valuesNode, //Right : Build side Collections.emptyList(), //No Criteria ImmutableList.<VariableReferenceExpression>builder() .addAll(TEST_TABLE_SCAN_NODE.getOutputVariables()) .addAll(valuesNode.getOutputVariables()) .build(), Optional.empty(), //NO filter Optional.empty(), Optional.empty(), Optional.of(JoinDistributionType.REPLICATED), ImmutableMap.of()); String actual = printLogical( ImmutableList.of(createTestPlanFragment(0, node)), FUNCTION_AND_TYPE_MANAGER, testSessionBuilder().build()); String expected = "digraph logical_plan {\n" + "subgraph cluster_0 {\n" + "label = \"SOURCE\"\n" + "plannode_1[label=\"{CrossJoin[REPLICATED]|Estimates: \\{rows: ? (0B), cpu: ?, memory: ?, network: ?\\}\n" + "}\", style=\"rounded, filled\", shape=record, fillcolor=orange];\n" + "plannode_2[label=\"{TableScan | [TableHandle \\{connectorId='connector_id', connectorHandle='com.facebook.presto.testing.TestingMetadata$TestingTableHandle@1af56f7', layout='Optional.empty'\\}]|Estimates: \\{rows: ? (0B), cpu: ?, memory: ?, network: ?\\}\n" + "}\", style=\"rounded, filled\", shape=record, fillcolor=deepskyblue];\n" + "plannode_3[label=\"{Values|Estimates: \\{rows: ? (0B), cpu: ?, memory: ?, network: ?\\}\n" + "}\", style=\"rounded, filled\", shape=record, fillcolor=deepskyblue];\n" + "}\n" + "plannode_1 -> plannode_3 [label = \"Build\"];\n" + //valuesNode should be the Build side "plannode_1 -> plannode_2 [label = \"Probe\"];\n" + //TEST_TABLE_SCAN_NODE should be the Probe side "}\n"; assertEquals(actual, expected); }
public static String sanitizeInput(String input) { // iterate through input and keep sanitizing until it's fully injection proof String sanitizedInput; String sanitizedInputTemp = input; while (true) { sanitizedInput = sanitizeInputOnce(sanitizedInputTemp); if (sanitizedInput.equals(sanitizedInputTemp)) break; sanitizedInputTemp = sanitizedInput; } return sanitizedInput; }
@Test public void testSanitizeInput() { // This function is sanitize the string. It removes ";","|","&&","..." // from string. assertEquals("a", sanitizeInput("|a|")); // test the removing of pipe sign from string. assertEquals("a", sanitizeInput("...a...")); // test the removing of dots from string. assertEquals("a", sanitizeInput(";a;")); // test the removing of semicolon sign from string. assertEquals("a", sanitizeInput("&&a&&")); // test the removing of AMP sign from string. assertEquals( "a", sanitizeInput("|a...")); // test the removing of pipe sign and semicolon sign from string. assertEquals( "an apple", sanitizeInput("an &&apple")); // test the removing of AMP sign which are between two words. assertEquals( "an apple", sanitizeInput("an ...apple")); // test the removing of dots which are between two words. assertEquals( "an apple.", sanitizeInput( ";an |apple....")); // test the removing of pipe sign and dots which are between two // words. And test the fourth dot is not removed. }
@Override public BulkOperationResponse executeBulkOperation(final BulkOperationRequest bulkOperationRequest, final C userContext, final AuditParams params) { if (bulkOperationRequest.entityIds() == null || bulkOperationRequest.entityIds().isEmpty()) { throw new BadRequestException(NO_ENTITY_IDS_ERROR); } List<BulkOperationFailure> capturedFailures = new LinkedList<>(); for (String entityId : bulkOperationRequest.entityIds()) { try { T entityModel = singleEntityOperationExecutor.execute(entityId, userContext); try { if (params != null) { auditEventSender.success(getAuditActor(userContext), params.eventType(), successAuditLogContextCreator.create(entityModel, params.entityClass())); } } catch (Exception auditLogStoreException) { //exception on audit log storing should not result in failure report, as the operation itself is successful LOG.error("Failed to store in the audit log information about successful entity removal via bulk action ", auditLogStoreException); } } catch (Exception ex) { capturedFailures.add(new BulkOperationFailure(entityId, ex.getMessage())); try { if (params != null) { auditEventSender.failure(getAuditActor(userContext), params.eventType(), failureAuditLogContextCreator.create(params.entityIdInPathParam(), entityId)); } } catch (Exception auditLogStoreException) { //exception on audit log storing should not result in failure report, as the operation itself is successful LOG.error("Failed to store in the audit log information about failed entity removal via bulk action ", auditLogStoreException); } } } return new BulkOperationResponse( bulkOperationRequest.entityIds().size() - capturedFailures.size(), capturedFailures); }
@Test void returnsProperResponseOnPartiallySuccessfulBulkRemoval() throws Exception { mockUserContext(); doThrow(new MongoException("MongoDB is striking against increasing retirement age")).when(singleEntityOperationExecutor).execute(eq("1"), eq(context)); final BulkOperationResponse bulkOperationResponse = toTest.executeBulkOperation(new BulkOperationRequest(List.of("1", "2", "3")), context, params); assertThat(bulkOperationResponse.successfullyPerformed()).isEqualTo(2); assertThat(bulkOperationResponse.failures()) .hasSize(1) .containsExactly( new BulkOperationFailure("1", "MongoDB is striking against increasing retirement age") ); verify(singleEntityOperationExecutor).execute("1", context); verify(singleEntityOperationExecutor).execute("2", context); verify(singleEntityOperationExecutor).execute("3", context); verifyNoMoreInteractions(singleEntityOperationExecutor); verify(auditEventSender, times(1)).failure(any(), eq(eventType), any()); verify(auditEventSender, times(2)).success(any(), eq(eventType), any()); }
@GET @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get prekey count", description = "Gets the number of one-time prekeys uploaded for this device and still available") @ApiResponse(responseCode = "200", description = "Body contains the number of available one-time prekeys for the device.", useReturnTypeSchema = true) @ApiResponse(responseCode = "401", description = "Account authentication check failed.") public CompletableFuture<PreKeyCount> getStatus(@ReadOnly @Auth final AuthenticatedDevice auth, @QueryParam("identity") @DefaultValue("aci") final IdentityType identityType) { final CompletableFuture<Integer> ecCountFuture = keysManager.getEcCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); final CompletableFuture<Integer> pqCountFuture = keysManager.getPqCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); return ecCountFuture.thenCombine(pqCountFuture, PreKeyCount::new); }
@Test void putPrekeyWithInvalidSignature() { final ECSignedPreKey badSignedPreKey = KeysHelper.signedECPreKey(1, Curve.generateKeyPair()); final SetKeysRequest setKeysRequest = new SetKeysRequest(List.of(), badSignedPreKey, null, null); Response response = resources.getJerseyTest() .target("/v2/keys") .queryParam("identity", "aci") .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .put(Entity.entity(setKeysRequest, MediaType.APPLICATION_JSON_TYPE)); assertThat(response.getStatus()).isEqualTo(422); }
@Nonnull public static String pathToString(@Nonnull Path path) { return path.toString(); }
@Test void testPathToString() { assertEquals("foo" + File.separator + "bar.txt", StringUtil.pathToString(Paths.get("foo/bar.txt"))); }
public static <K, V> KvSwap<K, V> create() { return new KvSwap<>(); }
@Test @Category(NeedsRunner.class) public void testKvSwapEmpty() { PCollection<KV<String, Integer>> input = p.apply( Create.of(Arrays.asList(EMPTY_TABLE)) .withCoder(KvCoder.of(StringUtf8Coder.of(), BigEndianIntegerCoder.of()))); PCollection<KV<Integer, String>> output = input.apply(KvSwap.create()); PAssert.that(output).empty(); p.run(); }
@SuppressWarnings("unchecked") private UDFType deserializeUDF() { return (UDFType) deserializeObjectFromKryo(udfSerializedBytes, (Class<Serializable>) getUDFClass()); }
@SuppressWarnings("unchecked") @Test public void testDeserializeUDF() throws Exception { // test deserialize udf GenericUDFMacro udfMacro = new GenericUDFMacro(); HiveFunctionWrapper<GenericUDFMacro> functionWrapper = new HiveFunctionWrapper<>(GenericUDFMacro.class, udfMacro); GenericUDFMacro deserializeUdfMacro = functionWrapper.createFunction(); assertThat(deserializeUdfMacro.getClass().getName()) .isEqualTo(GenericUDFMacro.class.getName()); // test deserialize udf loaded by user code class loader instead of current thread class // loader ClassLoader userClassLoader = FlinkUserCodeClassLoaders.create( new URL[] {udfJar.toURI().toURL()}, getClass().getClassLoader(), new Configuration()); Class<ScalarFunction> udfClass = (Class<ScalarFunction>) userClassLoader.loadClass(udfClassName); ScalarFunction udf = udfClass.newInstance(); HiveFunctionWrapper<ScalarFunction> functionWrapper1 = new HiveFunctionWrapper<>(udfClass, udf); ScalarFunction deserializedUdf = functionWrapper1.createFunction(); assertThat(deserializedUdf.getClass().getName()).isEqualTo(udfClassName); }
public static String post(String url, String username, String password, Map<String, Object> params) throws Exception { return post(url, username, password, JSON.toJSONString(params), CONNECT_TIMEOUT_DEFAULT_IN_MILL, SOCKET_TIMEOUT_DEFAULT_IN_MILL); }
@Test public void testSimpleCase() throws Exception { String url = "https://httpbin.org/post"; Map<String, Object> params = new HashMap<String, Object>(); params.put("foo", "bar"); String rsp = HttpUtils.post(url, null,null,params); System.out.println(rsp); Assert.assertNotNull(rsp); }
public void sendTestEmail(String toAddress, String subject, String message) throws EmailException { try { EmailMessage emailMessage = new EmailMessage(); emailMessage.setTo(toAddress); emailMessage.setSubject(subject); emailMessage.setPlainTextMessage(message + getServerBaseUrlFooter()); send(emailMessage); } catch (EmailException e) { LOG.debug("Fail to send test email to {}: {}", toAddress, e); throw e; } }
@Test public void sendTestEmailShouldSanitizeLog() throws Exception { logTester.setLevel(LoggerLevel.TRACE); configure(); underTest.sendTestEmail("user@nowhere", "Test Message from SonarQube", "This is a message \n containing line breaks \r that should be sanitized when logged."); assertThat(logTester.logs(Level.TRACE)).isNotEmpty() .contains("Sending email: This is a message _ containing line breaks _ that should be sanitized when logged.__Mail sent from: http://nemo.sonarsource.org"); }
public static void main(String[] args) { var sagaOrchestrator = new SagaOrchestrator(newSaga(), serviceDiscovery()); Saga.Result goodOrder = sagaOrchestrator.execute("good_order"); Saga.Result badOrder = sagaOrchestrator.execute("bad_order"); Saga.Result crashedOrder = sagaOrchestrator.execute("crashed_order"); LOGGER.info("orders: goodOrder is {}, badOrder is {},crashedOrder is {}", goodOrder, badOrder, crashedOrder); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> SagaApplication.main(new String[]{})); }
public static Resource file2Resource(String filename) throws MalformedURLException { Path file = Paths.get(filename); Resource resource = new UrlResource(file.toUri()); if (resource.exists() || resource.isReadable()) { return resource; } else { log.error("File can not be read, fileName:{}", filename); } return null; }
@Test public void testFile2Resource() throws IOException { // Define dest file path String destFilename = rootPath + System.getProperty("file.separator") + "resource.txt"; logger.info("destFilename: " + destFilename); // Define test resource File file = new File(destFilename); org.apache.commons.io.FileUtils.writeStringToFile(file, "test data", Charset.defaultCharset()); // Invoke file2Resource and test not null Resource resource = FileUtils.file2Resource(file.toString()); Assertions.assertNotNull(resource); // Invoke file2Resource and test null Resource resource1 = FileUtils.file2Resource(file + "abc"); Assertions.assertNull(resource1); }
public String getCurrentGtidLastCommit() { return gtidMap.get(CURRENT_GTID_LAST_COMMIT); }
@Test public void getCurrentGtidLastCommitOutputNull() { // Arrange final LogHeader objectUnderTest = new LogHeader(0); // Act final String actual = objectUnderTest.getCurrentGtidLastCommit(); // Assert result Assert.assertNull(actual); }
public static void addNumEntriesActiveMemTableMetric(final StreamsMetricsImpl streamsMetrics, final RocksDBMetricContext metricContext, final Gauge<BigInteger> valueProvider) { addMutableMetric( streamsMetrics, metricContext, valueProvider, NUMBER_OF_ENTRIES_ACTIVE_MEMTABLE, NUMBER_OF_ENTRIES_ACTIVE_MEMTABLE_DESCRIPTION ); }
@Test public void shouldAddNumEntriesActiveMemTableMetric() { final String name = "num-entries-active-mem-table"; final String description = "Total number of entries in the active memtable"; runAndVerifyMutableMetric( name, description, () -> RocksDBMetrics.addNumEntriesActiveMemTableMetric(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER) ); }
@SuppressWarnings("MethodLength") static void dissectControlRequest( final ArchiveEventCode eventCode, final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder); HEADER_DECODER.wrap(buffer, offset + encodedLength); encodedLength += MessageHeaderDecoder.ENCODED_LENGTH; switch (eventCode) { case CMD_IN_CONNECT: CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendConnect(builder); break; case CMD_IN_CLOSE_SESSION: CLOSE_SESSION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendCloseSession(builder); break; case CMD_IN_START_RECORDING: START_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording(builder); break; case CMD_IN_STOP_RECORDING: STOP_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecording(builder); break; case CMD_IN_REPLAY: REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplay(builder); break; case CMD_IN_STOP_REPLAY: STOP_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplay(builder); break; case CMD_IN_LIST_RECORDINGS: LIST_RECORDINGS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordings(builder); break; case CMD_IN_LIST_RECORDINGS_FOR_URI: LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingsForUri(builder); break; case CMD_IN_LIST_RECORDING: LIST_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecording(builder); break; case CMD_IN_EXTEND_RECORDING: EXTEND_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording(builder); break; case CMD_IN_RECORDING_POSITION: RECORDING_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendRecordingPosition(builder); break; case CMD_IN_TRUNCATE_RECORDING: TRUNCATE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTruncateRecording(builder); break; case CMD_IN_STOP_RECORDING_SUBSCRIPTION: STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingSubscription(builder); break; case CMD_IN_STOP_POSITION: STOP_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopPosition(builder); break; case CMD_IN_FIND_LAST_MATCHING_RECORD: FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendFindLastMatchingRecord(builder); break; case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS: LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingSubscriptions(builder); break; case CMD_IN_START_BOUNDED_REPLAY: BOUNDED_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartBoundedReplay(builder); break; case CMD_IN_STOP_ALL_REPLAYS: STOP_ALL_REPLAYS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopAllReplays(builder); break; case CMD_IN_REPLICATE: REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate(builder); break; case CMD_IN_STOP_REPLICATION: STOP_REPLICATION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplication(builder); break; case CMD_IN_START_POSITION: START_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartPosition(builder); break; case CMD_IN_DETACH_SEGMENTS: DETACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDetachSegments(builder); break; case CMD_IN_DELETE_DETACHED_SEGMENTS: DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDeleteDetachedSegments(builder); break; case CMD_IN_PURGE_SEGMENTS: PURGE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeSegments(builder); break; case CMD_IN_ATTACH_SEGMENTS: ATTACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAttachSegments(builder); break; case CMD_IN_MIGRATE_SEGMENTS: MIGRATE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendMigrateSegments(builder); break; case CMD_IN_AUTH_CONNECT: AUTH_CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAuthConnect(builder); break; case CMD_IN_KEEP_ALIVE: KEEP_ALIVE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendKeepAlive(builder); break; case CMD_IN_TAGGED_REPLICATE: TAGGED_REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTaggedReplicate(builder); break; case CMD_IN_START_RECORDING2: START_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording2(builder); break; case CMD_IN_EXTEND_RECORDING2: EXTEND_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording2(builder); break; case CMD_IN_STOP_RECORDING_BY_IDENTITY: STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingByIdentity(builder); break; case CMD_IN_PURGE_RECORDING: PURGE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeRecording(builder); break; case CMD_IN_REPLICATE2: REPLICATE_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate2(builder); break; case CMD_IN_REQUEST_REPLAY_TOKEN: REPLAY_TOKEN_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplayToken(builder); break; default: builder.append(": unknown command"); } }
@Test void controlRequestMigrateSegments() { internalEncodeLogHeader(buffer, 0, 1000, 1000, () -> 500_000_000L); final MigrateSegmentsRequestEncoder requestEncoder = new MigrateSegmentsRequestEncoder(); requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder) .controlSessionId(7) .correlationId(6) .srcRecordingId(1) .dstRecordingId(21902); dissectControlRequest(CMD_IN_MIGRATE_SEGMENTS, buffer, 0, builder); assertEquals("[0.500000000] " + CONTEXT + ": " + CMD_IN_MIGRATE_SEGMENTS.name() + " [1000/1000]:" + " controlSessionId=7" + " correlationId=6" + " srcRecordingId=1" + " dstRecordingId=21902", builder.toString()); }
public static Catalog buildIcebergCatalog(String name, Map<String, String> options, Object conf) { String catalogImpl = options.get(CatalogProperties.CATALOG_IMPL); if (catalogImpl == null) { String catalogType = PropertyUtil.propertyAsString(options, ICEBERG_CATALOG_TYPE, ICEBERG_CATALOG_TYPE_HIVE); switch (catalogType.toLowerCase(Locale.ENGLISH)) { case ICEBERG_CATALOG_TYPE_HIVE: catalogImpl = ICEBERG_CATALOG_HIVE; break; case ICEBERG_CATALOG_TYPE_HADOOP: catalogImpl = ICEBERG_CATALOG_HADOOP; break; case ICEBERG_CATALOG_TYPE_REST: catalogImpl = ICEBERG_CATALOG_REST; break; case ICEBERG_CATALOG_TYPE_GLUE: catalogImpl = ICEBERG_CATALOG_GLUE; break; case ICEBERG_CATALOG_TYPE_NESSIE: catalogImpl = ICEBERG_CATALOG_NESSIE; break; case ICEBERG_CATALOG_TYPE_JDBC: catalogImpl = ICEBERG_CATALOG_JDBC; break; default: throw new UnsupportedOperationException("Unknown catalog type: " + catalogType); } } else { String catalogType = options.get(ICEBERG_CATALOG_TYPE); Preconditions.checkArgument( catalogType == null, "Cannot create catalog %s, both type and catalog-impl are set: type=%s, catalog-impl=%s", name, catalogType, catalogImpl); } return loadCatalog(catalogImpl, name, options, conf); }
@Test public void buildCustomCatalog_withTypeSet() { Map<String, String> options = Maps.newHashMap(); options.put(CatalogProperties.CATALOG_IMPL, "CustomCatalog"); options.put(CatalogUtil.ICEBERG_CATALOG_TYPE, "hive"); Configuration hadoopConf = new Configuration(); String name = "custom"; assertThatThrownBy(() -> CatalogUtil.buildIcebergCatalog(name, options, hadoopConf)) .isInstanceOf(IllegalArgumentException.class) .hasMessage( "Cannot create catalog custom, both type and catalog-impl are set: type=hive, catalog-impl=CustomCatalog"); }
public boolean isTimeout() { return System.currentTimeMillis() - start > timeout; }
@Test public void testIsTimeOut() throws Exception { MessageFuture messageFuture = new MessageFuture(); messageFuture.setTimeout(TIME_OUT_FIELD); assertThat(messageFuture.isTimeout()).isFalse(); Thread.sleep(TIME_OUT_FIELD + 1); assertThat(messageFuture.isTimeout()).isTrue(); }
public void deleteKVConfig(final String namespace, final String key) { try { this.lock.writeLock().lockInterruptibly(); try { HashMap<String, String> kvTable = this.configTable.get(namespace); if (null != kvTable) { String value = kvTable.remove(key); log.info("deleteKVConfig delete a config item, Namespace: {} Key: {} Value: {}", namespace, key, value); } } finally { this.lock.writeLock().unlock(); } } catch (InterruptedException e) { log.error("deleteKVConfig InterruptedException", e); } this.persist(); }
@Test public void testDeleteKVConfig() { kvConfigManager.deleteKVConfig(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG, "UnitTest"); byte[] kvConfig = kvConfigManager.getKVListByNamespace(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG); assertThat(kvConfig).isNull(); Assert.assertTrue(kvConfig == null); String value = kvConfigManager.getKVConfig(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG, "UnitTest"); assertThat(value).isNull(); }
public static String getViewActiveVersionNode(final String databaseName, final String schemaName, final String viewName) { return String.join("/", getMetaDataNode(), databaseName, SCHEMAS_NODE, schemaName, VIEWS_NODE, viewName, ACTIVE_VERSION); }
@Test void assertGetViewActiveVersionNode() { assertThat(ViewMetaDataNode.getViewActiveVersionNode("foo_db", "foo_schema", "foo_view"), is("/metadata/foo_db/schemas/foo_schema/views/foo_view/active_version")); }
@Subscribe @AllowConcurrentEvents public void handleIndexReopening(IndicesReopenedEvent event) { for (final String index : event.indices()) { if (!indexSetRegistry.isManagedIndex(index)) { LOG.debug("Not handling reopened index <{}> because it's not managed by any index set.", index); continue; } LOG.debug("Index \"{}\" has been reopened. Calculating index range.", index); checkIfHealthy(indices.waitForRecovery(index), (status) -> new RuntimeException("Not handling reopened index <" + index + ">, index is unhealthy: " + status)); final IndexRange indexRange; try { indexRange = calculateRange(index); auditEventSender.success(AuditActor.system(nodeId), ES_INDEX_RANGE_CREATE, ImmutableMap.of("index_name", index)); } catch (Exception e) { final String message = "Couldn't calculate index range for index \"" + index + "\""; LOG.error(message, e); auditEventSender.failure(AuditActor.system(nodeId), ES_INDEX_RANGE_CREATE, ImmutableMap.of("index_name", index)); throw new RuntimeException(message, e); } save(indexRange); } }
@Test @MongoDBFixtures("MongoIndexRangeServiceTest.json") public void testHandleIndexReopening() throws Exception { final DateTime begin = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC); final DateTime end = new DateTime(2016, 1, 15, 0, 0, DateTimeZone.UTC); when(indices.indexRangeStatsOfIndex("graylog_3")).thenReturn(IndexRangeStats.create(begin, end)); when(indexSetRegistry.isManagedIndex("graylog_3")).thenReturn(true); when(indices.waitForRecovery("graylog_3")).thenReturn(HealthStatus.Green); localEventBus.post(IndicesReopenedEvent.create(Collections.singleton("graylog_3"))); final SortedSet<IndexRange> indexRanges = indexRangeService.find(begin, end); assertThat(indexRanges).hasSize(1); assertThat(indexRanges.first().indexName()).isEqualTo("graylog_3"); assertThat(indexRanges.first().begin()).isEqualTo(begin); assertThat(indexRanges.first().end()).isEqualTo(end); }
public MutablePersistentHashSet<K> beginWrite() { return new MutablePersistentHashSet<>(this); }
@Test public void iterationTest() { Random random = new Random(8234890); PersistentHashSet.MutablePersistentHashSet<Integer> tree = new PersistentHashSet<Integer>().beginWrite(); int[] p = genPermutation(random); HashSet<Integer> added = new HashSet<>(); for (int i = 0; i < ENTRIES_TO_ADD; i++) { int size = tree.size(); Assert.assertEquals(i, size); if ((size & 1023) == 0 || size < 100) { Collection<Integer> actual = new HashSet<>(size); for (Integer key : tree) { Assert.assertFalse(actual.contains(key)); actual.add(key); } Assert.assertEquals(size, actual.size()); for (Integer key : added) { Assert.assertTrue(actual.contains(key)); } Iterator<Integer> treeItr = tree.iterator(); actual.clear(); for (int j = 0; j < size; j++) { Integer key = treeItr.next(); Assert.assertFalse(actual.contains(key)); actual.add(key); } Assert.assertEquals(size, actual.size()); for (Integer key : added) { Assert.assertTrue(actual.contains(key)); } } tree.add(p[i]); added.add(p[i]); } }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void getStickerSet() { GetStickerSetResponse response = bot.execute(new GetStickerSet(stickerSet)); StickerSet stickerSet = response.stickerSet(); for (Sticker sticker : response.stickerSet().stickers()) { StickerTest.check(sticker, true, true); } // clean up stickers, max 120 allowed if (stickerSet.stickers().length > 50) { for (int i = stickerSet.stickers().length - 1; i > stickerSet.stickers().length - 10; i--) { bot.execute(new DeleteStickerFromSet(stickerSet.stickers()[i].fileId())); } } assertTrue(stickerSet.containsMasks()); assertEquals(stickerSet.stickerType(), Sticker.Type.mask); assertEquals(TelegramBotTest.stickerSet, stickerSet.name()); assertEquals("test1", stickerSet.title()); assertFalse(stickerSet.isAnimated()); Sticker sticker = stickerSet.stickers()[0]; assertEquals(TelegramBotTest.stickerSet, sticker.setName()); MaskPosition maskPosition = sticker.maskPosition(); assertEquals(MaskPosition.Point.forehead.name(), maskPosition.point()); assertEquals(0f, maskPosition.xShift(), 0); assertEquals(0f, maskPosition.yShift(), 0); assertEquals(1f, maskPosition.scale(), 0); }
@Operation(summary = "get", description = "Get a cluster") @GetMapping("/{id}") public ResponseEntity<ClusterVO> get(@PathVariable Long id) { return ResponseEntity.success(clusterService.get(id)); }
@Test void getReturnsNotFoundForInvalidId() { Long id = 999L; when(clusterService.get(id)).thenReturn(null); ResponseEntity<ClusterVO> response = clusterController.get(id); assertTrue(response.isSuccess()); assertNull(response.getData()); }
public static Instruction pushVlan() { return new L2ModificationInstruction.ModVlanHeaderInstruction( L2ModificationInstruction.L2SubType.VLAN_PUSH, EthType.EtherType.VLAN.ethType()); }
@Test public void testPushVlanMethod() { final Instruction instruction = Instructions.pushVlan(); final L2ModificationInstruction.ModVlanHeaderInstruction pushHeaderInstruction = checkAndConvert(instruction, Instruction.Type.L2MODIFICATION, L2ModificationInstruction.ModVlanHeaderInstruction.class); assertThat(pushHeaderInstruction.ethernetType().toString(), is(EthType.EtherType.VLAN.toString())); assertThat(pushHeaderInstruction.subtype(), is(L2ModificationInstruction.L2SubType.VLAN_PUSH)); }
public static <T> T getBean(Class<T> interfaceClass, Class typeClass) { Object object = serviceMap.get(interfaceClass.getName() + "<" + typeClass.getName() + ">"); if(object == null) return null; if(object instanceof Object[]) { return (T)Array.get(object, 0); } else { return (T)object; } }
@Test public void testInfoValidator() { Validator<Info> infoValidator = SingletonServiceFactory.getBean(Validator.class, Info.class); Info info = SingletonServiceFactory.getBean(Info.class); Assert.assertTrue(infoValidator.validate(info)); }
@Override public String getSignVersion() { return "V4"; }
@Test public void testGetSignVersion() { DefaultAuthSigner signer = new DefaultAuthSigner(); String expectedVersion = "V4"; String actualVersion = signer.getSignVersion(); // Assert the returned version matches the expected version Assertions.assertEquals(expectedVersion, actualVersion); }
@Override public void cancel() { // we are already in the state canceling }
@Test void testCancelIsIgnored() throws Exception { try (MockStateWithExecutionGraphContext ctx = new MockStateWithExecutionGraphContext()) { Canceling canceling = createCancelingState(ctx, new StateTrackingMockExecutionGraph()); canceling.cancel(); ctx.assertNoStateTransition(); } }
private RemotingCommand resetMasterFlushOffset(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { final RemotingCommand response = RemotingCommand.createResponseCommand(null); if (this.brokerController.getBrokerConfig().getBrokerId() != MixAll.MASTER_ID) { ResetMasterFlushOffsetHeader requestHeader = (ResetMasterFlushOffsetHeader) request.decodeCommandCustomHeader(ResetMasterFlushOffsetHeader.class); if (requestHeader.getMasterFlushOffset() != null) { this.brokerController.getMessageStore().setMasterFlushedOffset(requestHeader.getMasterFlushOffset()); } } response.setCode(ResponseCode.SUCCESS); response.setRemark(null); return response; }
@Test public void testResetMasterFlushOffset() throws RemotingCommandException { ResetMasterFlushOffsetHeader requestHeader = new ResetMasterFlushOffsetHeader(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.RESET_MASTER_FLUSH_OFFSET,requestHeader); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); requestHeader.setMasterFlushOffset(0L); request.makeCustomHeaderToNet(); response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
public static BigDecimal minus(Object minuend, Object subtrahend) { if (!minuend.getClass().equals(subtrahend.getClass())) { throw new IllegalStateException( String.format( "Unsupported operand type, the minuend type %s is different with subtrahend type %s.", minuend.getClass().getSimpleName(), subtrahend.getClass().getSimpleName())); } if (minuend instanceof Integer) { return BigDecimal.valueOf((int) minuend).subtract(BigDecimal.valueOf((int) subtrahend)); } else if (minuend instanceof Long) { return BigDecimal.valueOf((long) minuend) .subtract(BigDecimal.valueOf((long) subtrahend)); } else if (minuend instanceof BigInteger) { return new BigDecimal( ((BigInteger) minuend).subtract((BigInteger) subtrahend).toString()); } else if (minuend instanceof BigDecimal) { return ((BigDecimal) minuend).subtract((BigDecimal) subtrahend); } else { throw new UnsupportedOperationException( String.format( "Unsupported type %s for numeric minus.", minuend.getClass().getSimpleName())); } }
@Test public void testMinus() { assertEquals(BigDecimal.valueOf(9999), ObjectUtils.minus(10000, 1)); assertEquals( BigDecimal.valueOf(4294967295L), ObjectUtils.minus(Integer.MAX_VALUE, Integer.MIN_VALUE)); assertEquals(BigDecimal.valueOf(9999999999999L), ObjectUtils.minus(10000000000000L, 1L)); assertEquals( new BigDecimal("18446744073709551615"), ObjectUtils.minus(Long.MAX_VALUE, Long.MIN_VALUE)); assertEquals( new BigDecimal("99.12344"), ObjectUtils.minus(new BigDecimal("100.12345"), new BigDecimal("1.00001"))); }
public final void containsEntry(@Nullable Object key, @Nullable Object value) { // TODO(kak): Can we share any of this logic w/ MapSubject.containsEntry()? checkNotNull(actual); if (!actual.containsEntry(key, value)) { Map.Entry<@Nullable Object, @Nullable Object> entry = immutableEntry(key, value); ImmutableList<Map.Entry<@Nullable Object, @Nullable Object>> entryList = ImmutableList.of(entry); // TODO(cpovirk): If the key is present but not with the right value, we could fail using // something like valuesForKey(key).contains(value). Consider whether this is worthwhile. if (hasMatchingToStringPair(actual.entries(), entryList)) { failWithoutActual( fact("expected to contain entry", entry), fact("an instance of", objectToTypeName(entry)), simpleFact("but did not"), fact( "though it did contain", countDuplicatesAndAddTypeInfo( retainMatchingToString(actual.entries(), /* itemsToCheck = */ entryList))), fact("full contents", actualCustomStringRepresentationForPackageMembersToCall())); } else if (actual.containsKey(key)) { failWithoutActual( fact("expected to contain entry", entry), simpleFact("but did not"), fact("though it did contain values with that key", actual.asMap().get(key)), fact("full contents", actualCustomStringRepresentationForPackageMembersToCall())); } else if (actual.containsValue(value)) { Set<@Nullable Object> keys = new LinkedHashSet<>(); for (Map.Entry<?, ?> actualEntry : actual.entries()) { if (Objects.equal(actualEntry.getValue(), value)) { keys.add(actualEntry.getKey()); } } failWithoutActual( fact("expected to contain entry", entry), simpleFact("but did not"), fact("though it did contain keys with that value", keys), fact("full contents", actualCustomStringRepresentationForPackageMembersToCall())); } else { failWithActual("expected to contain entry", immutableEntry(key, value)); } } }
@Test public void containsEntry() { ImmutableMultimap<String, String> multimap = ImmutableMultimap.of("kurt", "kluever"); assertThat(multimap).containsEntry("kurt", "kluever"); }
public static <U> U valueOrElse(Versioned<U> versioned, U defaultValue) { return versioned == null ? defaultValue : versioned.value(); }
@Test public void testOrElse() { Versioned<String> vv = new Versioned<>("foo", 1); Versioned<String> nullVV = null; assertThat(Versioned.valueOrElse(vv, "bar"), is("foo")); assertThat(Versioned.valueOrElse(nullVV, "bar"), is("bar")); }
@Override public boolean acquirePermission(final int permits) { long timeoutInNanos = state.get().config.getTimeoutDuration().toNanos(); State modifiedState = updateStateWithBackOff(permits, timeoutInNanos); boolean result = waitForPermissionIfNecessary(timeoutInNanos, modifiedState.nanosToWait); publishRateLimiterAcquisitionEvent(result, permits); return result; }
@Test public void reserveFewThenSkipCyclesBeforeRefresh() throws Exception { setup(Duration.ofNanos(CYCLE_IN_NANOS)); setTimeOnNanos(CYCLE_IN_NANOS); boolean permission = rateLimiter.acquirePermission(); then(permission).isTrue(); then(metrics.getAvailablePermissions()).isZero(); then(metrics.getNanosToWait()).isEqualTo(CYCLE_IN_NANOS); then(metrics.getNumberOfWaitingThreads()).isZero(); AtomicReference<Boolean> firstReservedPermission = new AtomicReference<>(null); Thread firstCaller = new Thread( () -> firstReservedPermission.set(rateLimiter.acquirePermission())); firstCaller.setDaemon(true); firstCaller.start(); awaitImpatiently() .atMost(5, SECONDS) .until(firstCaller::getState, equalTo(Thread.State.TIMED_WAITING)); then(metrics.getAvailablePermissions()).isEqualTo(-1); then(metrics.getNanosToWait()).isEqualTo(CYCLE_IN_NANOS * 2); then(metrics.getNumberOfWaitingThreads()).isEqualTo(1); AtomicReference<Boolean> secondReservedPermission = new AtomicReference<>(null); Thread secondCaller = new Thread( () -> secondReservedPermission.set(rateLimiter.acquirePermission())); secondCaller.setDaemon(true); secondCaller.start(); awaitImpatiently() .atMost(5, SECONDS) .until(secondCaller::getState, equalTo(Thread.State.TIMED_WAITING)); then(metrics.getAvailablePermissions()).isEqualTo(-1); then(metrics.getNanosToWait()).isEqualTo(CYCLE_IN_NANOS * 2); then(metrics.getNumberOfWaitingThreads()).isEqualTo(2); setTimeOnNanos(CYCLE_IN_NANOS * 6 + 10); awaitImpatiently() .atMost(5, SECONDS) .until(firstReservedPermission::get, equalTo(true)); awaitImpatiently() .atMost(5, SECONDS) .until(secondReservedPermission::get, equalTo(false)); then(metrics.getAvailablePermissions()).isEqualTo(1); then(metrics.getNanosToWait()).isEqualTo(0L); then(metrics.getNumberOfWaitingThreads()).isZero(); }
@Override public double p(double[] x) { return Math.exp(logp(x)); }
@Test public void testPdf() { System.out.println("pdf"); MultivariateGaussianDistribution instance = new MultivariateGaussianDistribution(mu, Matrix.of(sigma)); for (int i = 0; i < x.length; i++) { assertEquals(pdf[i], instance.p(x[i]), 1E-4); } }