focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
static CompletableFuture<Void> updateSubscriptions(Pattern topicsPattern, java.util.function.Consumer<String> topicsHashSetter, GetTopicsResult getTopicsResult, TopicsChangedListener topicsChangedListener, List<String> oldTopics, String subscriptionForLog) { topicsHashSetter.accept(getTopicsResult.getTopicsHash()); if (!getTopicsResult.isChanged()) { return CompletableFuture.completedFuture(null); } List<String> newTopics; if (getTopicsResult.isFiltered()) { newTopics = getTopicsResult.getNonPartitionedOrPartitionTopics(); } else { newTopics = getTopicsResult.filterTopics(topicsPattern).getNonPartitionedOrPartitionTopics(); } final List<CompletableFuture<?>> listenersCallback = new ArrayList<>(2); Set<String> topicsAdded = TopicList.minus(newTopics, oldTopics); Set<String> topicsRemoved = TopicList.minus(oldTopics, newTopics); if (log.isDebugEnabled()) { log.debug("Pattern consumer [{}] Recheck pattern consumer's topics. topicsAdded: {}, topicsRemoved: {}", subscriptionForLog, topicsAdded, topicsRemoved); } listenersCallback.add(topicsChangedListener.onTopicsAdded(topicsAdded)); listenersCallback.add(topicsChangedListener.onTopicsRemoved(topicsRemoved)); return FutureUtil.waitForAll(Collections.unmodifiableList(listenersCallback)); }
@Test public void testChangedFilteredResponse() { PatternMultiTopicsConsumerImpl.updateSubscriptions( Pattern.compile("tenant/my-ns/name-.*"), mockTopicsHashSetter, new GetTopicsResult(Arrays.asList( "persistent://tenant/my-ns/name-0", "persistent://tenant/my-ns/name-1", "persistent://tenant/my-ns/name-2"), "TOPICS_HASH", true, true), mockListener, Arrays.asList("persistent://tenant/my-ns/name-0"), ""); verify(mockListener).onTopicsAdded(Sets.newHashSet( "persistent://tenant/my-ns/name-1", "persistent://tenant/my-ns/name-2")); verify(mockListener).onTopicsRemoved(Collections.emptySet()); verify(mockTopicsHashSetter).accept("TOPICS_HASH"); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { return payload.readStringFixByBytes(readLengthFromMeta(columnDef.getColumnMeta(), payload)); }
@Test void assertReadWithMeta1() { columnDef.setColumnMeta(1); when(payload.getByteBuf()).thenReturn(byteBuf); when(byteBuf.readUnsignedByte()).thenReturn((short) 0xff); when(payload.readStringFixByBytes(0xff)).thenReturn(new byte[255]); assertThat(new MySQLBlobBinlogProtocolValue().read(columnDef, payload), is(new byte[255])); }
@CheckForNull @Override public Set<Path> branchChangedFiles(String targetBranchName, Path rootBaseDir) { return Optional.ofNullable((branchChangedFilesWithFileMovementDetection(targetBranchName, rootBaseDir))) .map(GitScmProvider::extractAbsoluteFilePaths) .orElse(null); }
@Test public void branchChangedFiles_should_not_crash_if_branches_have_no_common_ancestors() throws GitAPIException, IOException { String fileName = "file-in-first-commit.xoo"; String renamedName = "file-renamed.xoo"; git.checkout().setOrphan(true).setName("b1").call(); Path file = worktree.resolve(fileName); Path renamed = file.resolveSibling(renamedName); addLineToFile(fileName, 1); Files.move(file, renamed); git.rm().addFilepattern(fileName).call(); commit(renamedName); Set<Path> files = newScmProvider().branchChangedFiles("main", worktree); // no shared history, so no diff assertThat(files).isNull(); }
ConcurrentPublication addPublication(final String channel, final int streamId) { clientLock.lock(); try { ensureActive(); ensureNotReentrant(); final long registrationId = driverProxy.addPublication(channel, streamId); stashedChannelByRegistrationId.put(registrationId, channel); awaitResponse(registrationId); return (ConcurrentPublication)resourceByRegIdMap.get(registrationId); } finally { clientLock.unlock(); } }
@Test void addPublicationShouldMapLogFile() { whenReceiveBroadcastOnMessage( ControlProtocolEvents.ON_PUBLICATION_READY, publicationReadyBuffer, (buffer) -> publicationReady.length()); conductor.addPublication(CHANNEL, STREAM_ID_1); verify(logBuffersFactory).map(SESSION_ID_1 + "-log"); }
public FEELFnResult<BigDecimal> invoke(@ParameterName("from") String from, @ParameterName("grouping separator") String group, @ParameterName("decimal separator") String decimal) { if ( from == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } if ( group != null && !group.equals( " " ) && !group.equals( "." ) && !group.equals( "," ) ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "group", "not a valid one, can only be one of: dot ('.'), comma (','), space (' ') ")); } if ( decimal != null ) { if (!decimal.equals( "." ) && !decimal.equals( "," )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "not a valid one, can only be one of: dot ('.'), comma (',') ")); } else if (group != null && decimal.equals( group )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "cannot be the same as parameter 'group' ")); } } if ( group != null ) { from = from.replaceAll( "\\" + group, "" ); } if ( decimal != null ) { from = from.replaceAll( "\\" + decimal, "." ); } BigDecimal result = NumberEvalHelper.getBigDecimalOrNull(from ); if( from != null && result == null ) { // conversion failed return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to calculate final number result" ) ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeGroupEqualsDecimal() { FunctionTestUtil.assertResultError(numberFunction.invoke("1 000.1", ".", "."), InvalidParametersEvent.class); }
@VisibleForTesting static SingleSegmentAssignment getNextSingleSegmentAssignment(Map<String, String> currentInstanceStateMap, Map<String, String> targetInstanceStateMap, int minAvailableReplicas, boolean lowDiskMode, Map<String, Integer> numSegmentsToOffloadMap, Map<Pair<Set<String>, Set<String>>, Set<String>> assignmentMap) { Map<String, String> nextInstanceStateMap = new TreeMap<>(); // Assign the segment the same way as other segments if the current and target instances are the same. We need this // to guarantee the mirror servers for replica-group based routing strategies. Set<String> currentInstances = currentInstanceStateMap.keySet(); Set<String> targetInstances = targetInstanceStateMap.keySet(); Pair<Set<String>, Set<String>> assignmentKey = Pair.of(currentInstances, targetInstances); Set<String> instancesToAssign = assignmentMap.get(assignmentKey); if (instancesToAssign != null) { Set<String> availableInstances = new TreeSet<>(); for (String instanceName : instancesToAssign) { String currentInstanceState = currentInstanceStateMap.get(instanceName); String targetInstanceState = targetInstanceStateMap.get(instanceName); if (currentInstanceState != null) { availableInstances.add(instanceName); // Use target instance state if available in case the state changes nextInstanceStateMap.put(instanceName, targetInstanceState != null ? targetInstanceState : currentInstanceState); } else { nextInstanceStateMap.put(instanceName, targetInstanceState); } } return new SingleSegmentAssignment(nextInstanceStateMap, availableInstances); } // Add all the common instances // Use target instance state in case the state changes for (Map.Entry<String, String> entry : targetInstanceStateMap.entrySet()) { String instanceName = entry.getKey(); if (currentInstanceStateMap.containsKey(instanceName)) { nextInstanceStateMap.put(instanceName, entry.getValue()); } } // Add current instances until the min available replicas achieved int numInstancesToKeep = minAvailableReplicas - nextInstanceStateMap.size(); if (numInstancesToKeep > 0) { // Sort instances by number of segments to offload, and keep the ones with the least segments to offload List<Triple<String, String, Integer>> instancesInfo = getSortedInstancesOnNumSegmentsToOffload(currentInstanceStateMap, nextInstanceStateMap, numSegmentsToOffloadMap); numInstancesToKeep = Integer.min(numInstancesToKeep, instancesInfo.size()); for (int i = 0; i < numInstancesToKeep; i++) { Triple<String, String, Integer> instanceInfo = instancesInfo.get(i); nextInstanceStateMap.put(instanceInfo.getLeft(), instanceInfo.getMiddle()); } } Set<String> availableInstances = new TreeSet<>(nextInstanceStateMap.keySet()); // After achieving the min available replicas, when low disk mode is enabled, only add new instances when all // current instances exist in the next assignment. // We want to first drop the extra instances as one step, then add the target instances as another step to avoid the // case where segments are first added to the instance before other segments are dropped from the instance, which // might cause server running out of disk. Note that even if segment addition and drop happen in the same step, // there is no guarantee that server process the segment drop before the segment addition. if (!lowDiskMode || currentInstanceStateMap.size() == nextInstanceStateMap.size()) { int numInstancesToAdd = targetInstanceStateMap.size() - nextInstanceStateMap.size(); if (numInstancesToAdd > 0) { // Sort instances by number of segments to offload, and add the ones with the least segments to offload List<Triple<String, String, Integer>> instancesInfo = getSortedInstancesOnNumSegmentsToOffload(targetInstanceStateMap, nextInstanceStateMap, numSegmentsToOffloadMap); for (int i = 0; i < numInstancesToAdd; i++) { Triple<String, String, Integer> instanceInfo = instancesInfo.get(i); nextInstanceStateMap.put(instanceInfo.getLeft(), instanceInfo.getMiddle()); } } } assignmentMap.put(assignmentKey, nextInstanceStateMap.keySet()); return new SingleSegmentAssignment(nextInstanceStateMap, availableInstances); }
@Test public void testDowntimeWithLowDiskMode() { // With common instance, first assignment should keep the common instance and remove the not common instance Map<String, String> currentInstanceStateMap = SegmentAssignmentUtils.getInstanceStateMap(Arrays.asList("host1", "host2"), ONLINE); Map<String, String> targetInstanceStateMap = SegmentAssignmentUtils.getInstanceStateMap(Arrays.asList("host1", "host3"), ONLINE); TableRebalancer.SingleSegmentAssignment assignment = getNextSingleSegmentAssignment(currentInstanceStateMap, targetInstanceStateMap, 0, true); assertEquals(assignment._instanceStateMap, Collections.singletonMap("host1", ONLINE)); assertEquals(assignment._availableInstances, Collections.singleton("host1")); // Second assignment should be the same as target assignment assignment = getNextSingleSegmentAssignment(assignment._instanceStateMap, targetInstanceStateMap, 0, true); assertEquals(assignment._instanceStateMap, targetInstanceStateMap); assertEquals(assignment._availableInstances, Collections.singleton("host1")); // Without common instance, first assignment should drop all instances targetInstanceStateMap = SegmentAssignmentUtils.getInstanceStateMap(Arrays.asList("host3", "host4"), ONLINE); assignment = getNextSingleSegmentAssignment(currentInstanceStateMap, targetInstanceStateMap, 0, true); assertTrue(assignment._instanceStateMap.isEmpty()); assertTrue(assignment._availableInstances.isEmpty()); // Second assignment should be the same as target assignment assignment = getNextSingleSegmentAssignment(assignment._instanceStateMap, targetInstanceStateMap, 0, true); assertEquals(assignment._instanceStateMap, targetInstanceStateMap); assertTrue(assignment._availableInstances.isEmpty()); // With increasing number of replicas, first assignment should drop all instances targetInstanceStateMap = SegmentAssignmentUtils.getInstanceStateMap(Arrays.asList("host3", "host4", "host5"), ONLINE); assignment = getNextSingleSegmentAssignment(currentInstanceStateMap, targetInstanceStateMap, 0, true); assertTrue(assignment._instanceStateMap.isEmpty()); assertTrue(assignment._availableInstances.isEmpty()); // Second assignment should be the same as target assignment assignment = getNextSingleSegmentAssignment(assignment._instanceStateMap, targetInstanceStateMap, 0, true); assertEquals(assignment._instanceStateMap, targetInstanceStateMap); assertTrue(assignment._availableInstances.isEmpty()); // With decreasing number of replicas, first assignment should drop all instances currentInstanceStateMap = SegmentAssignmentUtils.getInstanceStateMap(Arrays.asList("host1", "host2", "host3"), ONLINE); targetInstanceStateMap = SegmentAssignmentUtils.getInstanceStateMap(Arrays.asList("host4", "host5"), ONLINE); assignment = getNextSingleSegmentAssignment(currentInstanceStateMap, targetInstanceStateMap, 0, true); assertTrue(assignment._instanceStateMap.isEmpty()); assertTrue(assignment._availableInstances.isEmpty()); // Second assignment should be the same as target assignment assignment = getNextSingleSegmentAssignment(assignment._instanceStateMap, targetInstanceStateMap, 0, true); assertEquals(assignment._instanceStateMap, targetInstanceStateMap); assertTrue(assignment._availableInstances.isEmpty()); }
public String toCompactListString() { return id + COMMA + locType + COMMA + latOrY + COMMA + longOrX; }
@Test public void toCompactListStringNullList() { String s = toCompactListString((List<LayoutLocation>) null); assertEquals("not empty string", "", s); }
public static int readVarint(ByteBuffer buffer) { int value = readUnsignedVarint(buffer); return (value >>> 1) ^ -(value & 1); }
@Test public void testInvalidVarint() { // varint encoding has one overflow byte ByteBuffer buf = ByteBuffer.wrap(new byte[] {xFF, xFF, xFF, xFF, xFF, x01}); assertThrows(IllegalArgumentException.class, () -> ByteUtils.readVarint(buf)); }
@SuppressWarnings("unchecked") public static int compare(Comparable lhs, Comparable rhs) { assert lhs != null; assert rhs != null; if (lhs.getClass() == rhs.getClass()) { return lhs.compareTo(rhs); } if (lhs instanceof Number && rhs instanceof Number) { return Numbers.compare(lhs, rhs); } return lhs.compareTo(rhs); }
@Test(expected = Throwable.class) public void testIncompatibleTypesInCompare() { compare("string", 1); }
public static Class<?> defineClass( String className, Class<?> neighbor, ClassLoader loader, ProtectionDomain domain, byte[] bytecodes) { Preconditions.checkNotNull(loader); Preconditions.checkArgument(Platform.JAVA_VERSION >= 8); if (neighbor != null && Platform.JAVA_VERSION >= 9) { // classes in bytecode must be in same package as lookup class. MethodHandles.Lookup lookup = MethodHandles.lookup(); _JDKAccess.addReads(_JDKAccess.getModule(DefineClass.class), _JDKAccess.getModule(neighbor)); lookup = _Lookup.privateLookupIn(neighbor, lookup); return _Lookup.defineClass(lookup, bytecodes); } if (classloaderDefineClassHandle == null) { MethodHandles.Lookup lookup = _JDKAccess._trustedLookup(ClassLoader.class); try { classloaderDefineClassHandle = lookup.findVirtual( ClassLoader.class, "defineClass", MethodType.methodType( Class.class, String.class, byte[].class, int.class, int.class, ProtectionDomain.class)); } catch (NoSuchMethodException | IllegalAccessException e) { throw new RuntimeException(e); } } try { return (Class<?>) classloaderDefineClassHandle.invokeWithArguments( loader, className, bytecodes, 0, bytecodes.length, domain); } catch (Throwable e) { throw new RuntimeException(e); } }
@Test public void testDefineClass() throws ClassNotFoundException { String pkg = DefineClassTest.class.getPackage().getName(); CompileUnit unit = new CompileUnit( pkg, "A", ("package " + pkg + ";\n" + "public class A {\n" + " public static String hello() { return \"HELLO\"; }\n" + "}")); byte[] bytecodes = JaninoUtils.toBytecode(Thread.currentThread().getContextClassLoader(), unit) .values() .iterator() .next(); String className = pkg + ".A"; ClassLoaderUtils.ByteArrayClassLoader loader = new ClassLoaderUtils.ByteArrayClassLoader(Collections.singletonMap(className, bytecodes)); loader.loadClass(className); loader = new ClassLoaderUtils.ByteArrayClassLoader(Collections.singletonMap(className, bytecodes)); DefineClass.defineClass(className, DefineClassTest.class, loader, null, bytecodes); Class<?> clz = loader.loadClass(className); if (Platform.JAVA_VERSION >= 9) { Assert.assertEquals(clz.getClassLoader(), DefineClassTest.class.getClassLoader()); Assert.assertThrows( Exception.class, () -> DefineClass.defineClass( className, null, DefineClassTest.class.getClassLoader(), null, bytecodes)); } else { Assert.assertEquals(clz.getClassLoader(), loader); DefineClass.defineClass( className, null, DefineClassTest.class.getClassLoader(), null, bytecodes); } }
@Override public Connection connect(String url, Properties info) throws SQLException { // calciteConnection is initialized with an empty Beam schema, // we need to populate it with pipeline options, load table providers, etc return JdbcConnection.initialize((CalciteConnection) super.connect(url, info)); }
@Test public void testInternalConnect_unbounded_limit() throws Exception { ReadOnlyTableProvider tableProvider = new ReadOnlyTableProvider( "test", ImmutableMap.of( "test", TestUnboundedTable.of( Schema.FieldType.INT32, "order_id", Schema.FieldType.INT32, "site_id", Schema.FieldType.INT32, "price", Schema.FieldType.DATETIME, "order_time") .timestampColumnIndex(3) .addRows(Duration.ZERO, 1, 1, 1, FIRST_DATE, 1, 2, 6, FIRST_DATE))); CalciteConnection connection = JdbcDriver.connect(tableProvider, PipelineOptionsFactory.create()); Statement statement = connection.createStatement(); ResultSet resultSet1 = statement.executeQuery("SELECT * FROM test LIMIT 1"); assertTrue(resultSet1.next()); assertFalse(resultSet1.next()); ResultSet resultSet2 = statement.executeQuery("SELECT * FROM test LIMIT 2"); assertTrue(resultSet2.next()); assertTrue(resultSet2.next()); assertFalse(resultSet2.next()); }
@Override void toHtml() throws IOException { writeHtmlHeader(); htmlCoreReport.toHtml(); writeHtmlFooter(); }
@Test public void testCounter() throws IOException { // counter avec 3 requêtes setProperty(Parameter.WARNING_THRESHOLD_MILLIS, "500"); setProperty(Parameter.SEVERE_THRESHOLD_MILLIS, "1500"); setProperty(Parameter.ANALYTICS_ID, "123456789"); counter.addRequest("test1", 0, 0, 0, false, 1000); counter.addRequest("test2", 1000, 500, 500, false, 1000); counter.addRequest("test3", 100000, 50000, 50000, true, 10000); // requête pour businessFacadeCounter servicesCounter.addRequest("testServices", 100, 50, 50, false, -1); final StringBuilder sb = new StringBuilder(); for (int i = 0; i < 5010; i++) { // HtmlCounterReport.MAX_REQUEST_LENGTH = 5000 sb.append(i % 10); } final String longRequestName = sb.toString(); counter.addRequest(longRequestName, 0, 0, 0, false, 5000); collector.collectWithoutErrors(javaInformationsList); final HtmlReport htmlReport = new HtmlReport(collector, null, javaInformationsList, Period.TOUT, writer); htmlReport.toHtml("message 2", null); assertNotEmptyAndClear(writer); setProperty(Parameter.NO_DATABASE, Boolean.TRUE.toString()); collector.collectWithoutErrors(javaInformationsList); htmlReport.toHtml("message 2", null); assertNotEmptyAndClear(writer); setProperty(Parameter.NO_DATABASE, Boolean.FALSE.toString()); setProperty(Parameter.WARNING_THRESHOLD_MILLIS, "-1"); try { htmlReport.toHtml("message 2", null); } catch (final IllegalStateException e) { assertNotNull("ok", e); } setProperty(Parameter.WARNING_THRESHOLD_MILLIS, null); // cas counterReportsByCounterName.size() == 1 collector = new Collector("test", List.of(counter)); final HtmlReport htmlReport2 = new HtmlReport(collector, null, javaInformationsList, Period.TOUT, writer); htmlReport2.toHtml(null, null); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldFindFewerGenericsWithEarlierVariadic() { // Given: givenFunctions( function(EXPECTED, 0, INT_VARARGS, GenericType.of("A"), INT, INT), function(OTHER, 3, INT, GenericType.of("A"), GenericType.of("B"), INT_VARARGS) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of( SqlArgument.of(INTEGER), SqlArgument.of(INTEGER), SqlArgument.of(INTEGER), SqlArgument.of(INTEGER) )); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
@Override public void transform(Message message, DataType fromType, DataType toType) { final Map<String, Object> headers = message.getHeaders(); CloudEvent cloudEvent = CloudEvents.v1_0; headers.putIfAbsent(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_ID).http(), headers.getOrDefault(CloudEvent.CAMEL_CLOUD_EVENT_ID, message.getExchange().getExchangeId())); headers.putIfAbsent(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_VERSION).http(), headers.getOrDefault(CloudEvent.CAMEL_CLOUD_EVENT_VERSION, cloudEvent.version())); headers.putIfAbsent(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_TYPE).http(), headers.getOrDefault(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, CloudEvent.DEFAULT_CAMEL_CLOUD_EVENT_TYPE)); headers.putIfAbsent(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE).http(), headers.getOrDefault(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, CloudEvent.DEFAULT_CAMEL_CLOUD_EVENT_SOURCE)); if (headers.containsKey(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT)) { headers.putIfAbsent(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT).http(), headers.get(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT)); } if (headers.containsKey(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE)) { headers.putIfAbsent(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE).http(), headers.get(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE)); } headers.putIfAbsent(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_TIME).http(), headers.getOrDefault(CloudEvent.CAMEL_CLOUD_EVENT_TIME, cloudEvent.getEventTime(message.getExchange()))); headers.putIfAbsent(Exchange.CONTENT_TYPE, headers.getOrDefault(CloudEvent.CAMEL_CLOUD_EVENT_CONTENT_TYPE, "application/json")); cloudEvent.attributes().stream().map(CloudEvent.Attribute::id).forEach(headers::remove); }
@Test void shouldSetDefaultCloudEventAttributes() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(new ByteArrayInputStream("{}".getBytes(StandardCharsets.UTF_8))); transformer.transform(exchange.getMessage(), DataType.ANY, DataType.ANY); CloudEvent cloudEvent = CloudEvents.v1_0; assertTrue(exchange.getMessage().hasHeaders()); assertEquals(exchange.getExchangeId(), exchange.getMessage().getHeader(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_ID).http())); assertEquals(cloudEvent.version(), exchange.getMessage().getHeader(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_VERSION).http())); assertEquals(CloudEvent.DEFAULT_CAMEL_CLOUD_EVENT_TYPE, exchange.getMessage().getHeader(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_TYPE).http())); assertNull(exchange.getMessage().getHeader(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT).http())); assertEquals(CloudEvent.DEFAULT_CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE).http())); assertTrue(exchange.getMessage().getHeaders() .containsKey(cloudEvent.mandatoryAttribute(CloudEvent.CAMEL_CLOUD_EVENT_TIME).http())); assertEquals("application/json", exchange.getMessage().getHeader(Exchange.CONTENT_TYPE)); assertEquals("{}", exchange.getMessage().getBody(String.class)); }
@Override protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) { MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get()); decodeCommandPacket(payload, out); }
@Test void assertDecodeOkPacket() { MySQLCommandPacketDecoder commandPacketDecoder = new MySQLCommandPacketDecoder(); List<Object> actual = new LinkedList<>(); commandPacketDecoder.decode(channelHandlerContext, mockOkPacket(), actual); assertPacketByType(actual, MySQLOKPacket.class); }
@Override public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final EncryptRule rule) { SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof MySQLExplainStatement || sqlStatement instanceof MySQLShowColumnsStatement) { return new MergedEncryptShowColumnsMergedResult(queryResult, sqlStatementContext, rule); } if (sqlStatement instanceof MySQLShowCreateTableStatement) { return new MergedEncryptShowCreateTableMergedResult(globalRuleMetaData, queryResult, sqlStatementContext, rule); } return new TransparentMergedResult(queryResult); }
@Test void assertMergedResultWithOtherStatement() { sqlStatementContext = mock(SQLStatementContext.class); EncryptDALResultDecorator encryptDALResultDecorator = new EncryptDALResultDecorator(mock(RuleMetaData.class)); assertThat(encryptDALResultDecorator.decorate(mock(QueryResult.class), sqlStatementContext, rule), instanceOf(TransparentMergedResult.class)); assertThat(encryptDALResultDecorator.decorate(mock(MergedResult.class), sqlStatementContext, rule), instanceOf(MergedResult.class)); }
public static Builder builder() { return new Builder(); }
@Test public void testEqualsAndHashCode() { RuleData ruleData1 = RuleData.builder().id("id").name("name").pluginName("pluginName") .selectorId("selectorId").matchMode(1).sort(0).enabled(true).loged(true) .handle("handle").conditionDataList(new ArrayList<>()).build(); RuleData ruleData2 = RuleData.builder().id("id").name("name").pluginName("pluginName") .selectorId("selectorId").matchMode(1).sort(0).enabled(true).loged(true) .handle("handle").conditionDataList(new ArrayList<>()).build(); Set<RuleData> set = new HashSet<>(); set.add(ruleData1); set.add(ruleData2); assertThat(set, hasSize(1)); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentExecutor, TokenSecretAuthData authData, VideosContainerResource data) throws Exception { if (data == null) { // Nothing to do return ImportResult.OK; } BackblazeDataTransferClient b2Client = b2ClientFactory.getOrCreateB2Client(jobId, authData); final LongAdder totalImportedFilesSizes = new LongAdder(); if (data.getVideos() != null && data.getVideos().size() > 0) { for (VideoModel video : data.getVideos()) { idempotentExecutor.importAndSwallowIOExceptions( video, v -> { ItemImportResult<String> fileImportResult = importSingleVideo(jobId, b2Client, v); if (fileImportResult.hasBytes()) { totalImportedFilesSizes.add(fileImportResult.getBytes()); } return fileImportResult; }); } } return ImportResult.OK.copyWithBytes(totalImportedFilesSizes.longValue()); }
@Test public void testEmptyVideos() throws Exception { VideosContainerResource data = mock(VideosContainerResource.class); when(data.getVideos()).thenReturn(new ArrayList<>()); BackblazeVideosImporter sut = new BackblazeVideosImporter(monitor, dataStore, streamProvider, clientFactory); ImportResult result = sut.importItem(UUID.randomUUID(), executor, authData, data); assertEquals(ImportResult.ResultType.OK, result.getType()); }
@Override public double read() { return gaugeSource.read(); }
@Test public void whenNotVisitedWithCachedValueReadsDefault() { DynamicMetricsProvider concreteProvider = (descriptor, context) -> context.collect(descriptor.withPrefix("foo"), "doubleField", INFO, COUNT, 42.42D); metricsRegistry.registerDynamicMetricsProvider(concreteProvider); DoubleGauge doubleGauge = metricsRegistry.newDoubleGauge("foo.doubleField"); // needed to collect dynamic metrics and update the gauge created from them metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(42.42D, doubleGauge.read(), 10E-6); // clears the cached metric source metricsRegistry.deregisterDynamicMetricsProvider(concreteProvider); metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(DoubleGaugeImpl.DEFAULT_VALUE, doubleGauge.read(), 10E-6); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void deleteChatStickerSet() { BaseResponse response = bot.execute(new DeleteChatStickerSet(groupId)); assertFalse(response.isOk()); assertEquals(400, response.errorCode()); }
public CoercedExpressionResult coerce() { final Class<?> leftClass = left.getRawClass(); final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass); final Class<?> rightClass = right.getRawClass(); final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass); boolean sameClass = leftClass == rightClass; boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression; if (sameClass || isUnificationExpression) { return new CoercedExpressionResult(left, right); } if (!canCoerce()) { throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass)); } if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) { CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression()); return new CoercedExpressionResult( new TypedExpression(castExpression, double.class, left.getType()), right, false); } final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass ); final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass); boolean rightAsStaticField = false; final Expression rightExpression = right.getExpression(); final TypedExpression coercedRight; if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) { final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass); coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType); coercedRight.setType( leftClass ); } else if (shouldCoerceBToString(left, right)) { coercedRight = coerceToString(right); } else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) { coercedRight = castToClass(leftClass); } else if (leftClass == long.class && rightClass == int.class) { coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression())); } else if (leftClass == Date.class && rightClass == String.class) { coercedRight = coerceToDate(right); rightAsStaticField = true; } else if (leftClass == LocalDate.class && rightClass == String.class) { coercedRight = coerceToLocalDate(right); rightAsStaticField = true; } else if (leftClass == LocalDateTime.class && rightClass == String.class) { coercedRight = coerceToLocalDateTime(right); rightAsStaticField = true; } else if (shouldCoerceBToMap()) { coercedRight = castToClass(toNonPrimitiveType(leftClass)); } else if (isBoolean(leftClass) && !isBoolean(rightClass)) { coercedRight = coerceBoolean(right); } else { coercedRight = right; } final TypedExpression coercedLeft; if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) { coercedLeft = coerceToString(left); } else { coercedLeft = left; } return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField); }
@Test public void testStringToBooleanFalse() { final TypedExpression left = expr(THIS_PLACEHOLDER + ".getBooleanValue", Boolean.class); final TypedExpression right = expr("\"false\"", String.class); final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, false).coerce(); assertThat(coerce.getCoercedRight()).isEqualTo(expr("false", Boolean.class)); }
public static long write(InputStream is, OutputStream os) throws IOException { return write(is, os, BUFFER_SIZE); }
@Test void testWrite4() throws Exception { assertThat((int) IOUtils.write(is, os), equalTo(TEXT.length())); }
public String nextString() throws IOException { int p = peeked; if (p == PEEKED_NONE) { p = doPeek(); } String result; if (p == PEEKED_UNQUOTED) { result = nextUnquotedValue(); } else if (p == PEEKED_SINGLE_QUOTED) { result = nextQuotedValue('\''); } else if (p == PEEKED_DOUBLE_QUOTED) { result = nextQuotedValue('"'); } else if (p == PEEKED_BUFFERED) { result = peekedString; peekedString = null; } else if (p == PEEKED_LONG) { result = Long.toString(peekedLong); } else if (p == PEEKED_NUMBER) { result = new String(buffer, pos, peekedNumberLength); pos += peekedNumberLength; } else { throw unexpectedTokenError("a string"); } peeked = PEEKED_NONE; pathIndices[stackSize - 1]++; return result; }
@Test public void testEscapeCharacterQuoteWithoutStrictMode() throws IOException { String json = "\"\\'\""; JsonReader reader = new JsonReader(reader(json)); assertThat(reader.nextString()).isEqualTo("'"); }
public static DeploymentDescriptor merge(List<DeploymentDescriptor> descriptorHierarchy, MergeMode mode) { if (descriptorHierarchy == null || descriptorHierarchy.isEmpty()) { throw new IllegalArgumentException("Descriptor hierarchy list cannot be empty"); } if (descriptorHierarchy.size() == 1) { return descriptorHierarchy.get(0); } Deque<DeploymentDescriptor> stack = new ArrayDeque<>(); descriptorHierarchy.forEach(stack::push); while (stack.size() > 1) { stack.push(merge(stack.pop(), stack.pop(), mode)); } // last element from the stack is the one that contains all merged descriptors return stack.pop(); }
@Test public void testDeploymentDesciptorMergeMergeCollectionsAvoidDuplicatesNamedObject() { DeploymentDescriptor primary = new DeploymentDescriptorImpl("org.jbpm.domain"); primary.getBuilder() .addWorkItemHandler(new NamedObjectModel("mvel", "Log", "new org.jbpm.process.instance.impl.demo.SystemOutWorkItemHandler()")); assertThat(primary).isNotNull(); assertThat(primary.getPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(primary.getAuditPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(primary.getAuditMode()).isEqualTo(AuditMode.JPA); assertThat(primary.getPersistenceMode()).isEqualTo(PersistenceMode.JPA); assertThat(primary.getRuntimeStrategy()).isEqualTo(RuntimeStrategy.SINGLETON); assertThat(primary.getMarshallingStrategies().size()).isEqualTo(0); assertThat(primary.getConfiguration().size()).isEqualTo(0); assertThat(primary.getEnvironmentEntries().size()).isEqualTo(0); assertThat(primary.getEventListeners().size()).isEqualTo(0); assertThat(primary.getGlobals().size()).isEqualTo(0); assertThat(primary.getTaskEventListeners().size()).isEqualTo(0); assertThat(primary.getWorkItemHandlers().size()).isEqualTo(1); DeploymentDescriptor secondary = new DeploymentDescriptorImpl("org.jbpm.domain"); secondary.getBuilder() .auditMode(AuditMode.JMS) .persistenceMode(PersistenceMode.JPA) .persistenceUnit(null) .auditPersistenceUnit("") .addWorkItemHandler(new NamedObjectModel("mvel", "Log", "new org.jbpm.process.instance.impl.demo.CustomSystemOutWorkItemHandler()")); assertThat(secondary).isNotNull(); assertThat(secondary.getPersistenceUnit()).isEqualTo(null); assertThat(secondary.getAuditPersistenceUnit()).isEqualTo(""); assertThat(secondary.getAuditMode()).isEqualTo(AuditMode.JMS); assertThat(secondary.getPersistenceMode()).isEqualTo(PersistenceMode.JPA); assertThat(secondary.getRuntimeStrategy()).isEqualTo(RuntimeStrategy.SINGLETON); assertThat(secondary.getMarshallingStrategies().size()).isEqualTo(0); assertThat(secondary.getConfiguration().size()).isEqualTo(0); assertThat(secondary.getEnvironmentEntries().size()).isEqualTo(0); assertThat(secondary.getEventListeners().size()).isEqualTo(0); assertThat(secondary.getGlobals().size()).isEqualTo(0); assertThat(secondary.getTaskEventListeners().size()).isEqualTo(0); assertThat(secondary.getWorkItemHandlers().size()).isEqualTo(1); // and now let's merge them DeploymentDescriptor outcome = DeploymentDescriptorMerger.merge(primary, secondary, MergeMode.MERGE_COLLECTIONS); assertThat(outcome).isNotNull(); assertThat(outcome.getPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(outcome.getAuditPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(outcome.getAuditMode()).isEqualTo(AuditMode.JMS); assertThat(outcome.getPersistenceMode()).isEqualTo(PersistenceMode.JPA); assertThat(outcome.getRuntimeStrategy()).isEqualTo(RuntimeStrategy.SINGLETON); assertThat(outcome.getMarshallingStrategies().size()).isEqualTo(0); assertThat(outcome.getConfiguration().size()).isEqualTo(0); assertThat(outcome.getEnvironmentEntries().size()).isEqualTo(0); assertThat(outcome.getEventListeners().size()).isEqualTo(0); assertThat(outcome.getGlobals().size()).isEqualTo(0); assertThat(outcome.getTaskEventListeners().size()).isEqualTo(0); assertThat(outcome.getWorkItemHandlers().size()).isEqualTo(1); // let's check if the secondary version is preserved NamedObjectModel model = outcome.getWorkItemHandlers().get(0); assertThat(model.getName()).isEqualTo("Log"); assertThat(model.getIdentifier()).isEqualTo("new org.jbpm.process.instance.impl.demo.CustomSystemOutWorkItemHandler()"); }
public static void jsonEscape(CharSequence in, WriteBuffer out) { int length = in.length(); if (length == 0) return; int afterReplacement = 0; for (int i = 0; i < length; i++) { char c = in.charAt(i); String replacement; if (c < 0x80) { replacement = REPLACEMENT_CHARS[c]; if (replacement == null) continue; } else if (c == '\u2028') { replacement = U2028; } else if (c == '\u2029') { replacement = U2029; } else { continue; } if (afterReplacement < i) { // write characters between the last replacement and now out.writeUtf8(in, afterReplacement, i); } out.writeUtf8(replacement, 0, replacement.length()); afterReplacement = i + 1; } if (afterReplacement < length) { out.writeUtf8(in, afterReplacement, length); } }
@Test void testJsonEscape() { WriteBuffer buffer = new WriteBuffer(buf, 0); jsonEscape(new String(new char[] {0, 'a', 1}), buffer); assertThat(buffer).hasToString("\\u0000a\\u0001"); buffer.pos = 0; jsonEscape(new String(new char[] {'"', '\\', '\t', '\b'}), buffer); assertThat(buffer).hasToString("\\\"\\\\\\t\\b"); buffer.pos = 0; jsonEscape(new String(new char[] {'\n', '\r', '\f'}), buffer); assertThat(buffer).hasToString("\\n\\r\\f"); buffer.pos = 0; jsonEscape("\u2028 and \u2029", buffer); assertThat(buffer).hasToString("\\u2028 and \\u2029"); buffer.pos = 0; jsonEscape("\"foo", buffer); assertThat(buffer).hasToString("\\\"foo"); }
public static <InputT, OutputT> MapElements<InputT, OutputT> via( final InferableFunction<InputT, OutputT> fn) { return new MapElements<>(fn, fn.getInputTypeDescriptor(), fn.getOutputTypeDescriptor()); }
@Test @Category(NeedsRunner.class) public void testMapWrappedLambda() throws Exception { PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply(MapElements.via(new SimpleFunction<Integer, Integer>((Integer i) -> i * 2) {})); PAssert.that(output).containsInAnyOrder(6, 2, 4); pipeline.run(); }
@Override public boolean isInOneOf(Set<?> allowedPrincipals) { throw notAllowed("isInOneOf"); }
@Test(expected=UnsupportedOperationException.class) public void testInOneOf() { ctx.isInOneOf(null); }
public boolean isOther() { return type == Type.OTHER; }
@Test void testPojo() { JsValue jv = je.eval("new com.intuit.karate.core.SimplePojo()"); assertTrue(jv.isOther()); }
@Override protected String processLink(IExpressionContext context, String link) { if (link == null || !linkInSite(externalUrlSupplier.get(), link)) { return link; } if (StringUtils.isBlank(link)) { link = "/"; } if (isAssetsRequest(link)) { return PathUtils.combinePath(THEME_PREVIEW_PREFIX, theme.getName(), link); } // not assets link if (theme.isActive()) { return link; } return UriComponentsBuilder.fromUriString(link) .queryParam(ThemeContext.THEME_PREVIEW_PARAM_NAME, theme.getName()) .build().toString(); }
@Test void processTemplateLinkWithNoActive() { ThemeLinkBuilder themeLinkBuilder = new ThemeLinkBuilder(getTheme(false), externalUrlSupplier); String link = "/post"; String processed = themeLinkBuilder.processLink(null, link); assertThat(processed).isEqualTo("/post?preview-theme=test-theme"); processed = themeLinkBuilder.processLink(null, "/post?foo=bar"); assertThat(processed).isEqualTo("/post?foo=bar&preview-theme=test-theme"); }
public void clear() { throw e; }
@Test void require_that_clear_throws_exception() { assertThrows(NodeVector.ReadOnlyException.class, () -> new TestNodeVector("foo").clear()); }
public static CompositeData parseComposite(URI uri) throws URISyntaxException { CompositeData rc = new CompositeData(); rc.scheme = uri.getScheme(); String ssp = stripPrefix(uri.getRawSchemeSpecificPart().trim(), "//").trim(); parseComposite(uri, rc, ssp); rc.fragment = uri.getFragment(); return rc; }
@Test public void testCompositeWithParenthesisInParam() throws Exception { URI uri = new URI("failover://(test)?updateURIsURL=file:/C:/Dir(1)/a.csv"); CompositeData data = URISupport.parseComposite(uri); assertEquals(1, data.getComponents().length); assertEquals(1, data.getParameters().size()); assertTrue(data.getParameters().containsKey("updateURIsURL")); assertEquals("file:/C:/Dir(1)/a.csv", data.getParameters().get("updateURIsURL")); }
@Override public void addVisualizedAutoTrackActivity(Class<?> activity) { }
@Test public void addVisualizedAutoTrackActivity() { mSensorsAPI.addVisualizedAutoTrackActivity(EmptyActivity.class); Assert.assertFalse(mSensorsAPI.isVisualizedAutoTrackActivity(EmptyActivity.class)); }
public static <T> T createInstance(String userClassName, Class<T> xface, ClassLoader classLoader) { Class<?> theCls; try { theCls = Class.forName(userClassName, true, classLoader); } catch (ClassNotFoundException | NoClassDefFoundError cnfe) { throw new RuntimeException("User class must be in class path", cnfe); } if (!xface.isAssignableFrom(theCls)) { throw new RuntimeException(userClassName + " does not implement " + xface.getName()); } Class<T> tCls = (Class<T>) theCls.asSubclass(xface); T result; try { Constructor<T> meth = (Constructor<T>) constructorCache.get(theCls); if (null == meth) { meth = tCls.getDeclaredConstructor(); meth.setAccessible(true); constructorCache.put(theCls, meth); } result = meth.newInstance(); } catch (InstantiationException ie) { throw new RuntimeException("User class must be concrete", ie); } catch (NoSuchMethodException e) { throw new RuntimeException("User class must have a no-arg constructor", e); } catch (IllegalAccessException e) { throw new RuntimeException("User class must have a public constructor", e); } catch (InvocationTargetException e) { throw new RuntimeException("User class constructor throws exception", e); } return result; }
@Test public void testCreateTypedInstanceUnassignableClass() { try { createInstance(aImplementation.class.getName(), bInterface.class, classLoader); fail("Should fail to load a class that isn't assignable"); } catch (RuntimeException re) { assertEquals( aImplementation.class.getName() + " does not implement " + bInterface.class.getName(), re.getMessage()); } }
public ConsumerBuilder threads(Integer threads) { this.threads = threads; return getThis(); }
@Test void threads() { ConsumerBuilder builder = ConsumerBuilder.newBuilder(); builder.threads(100); Assertions.assertEquals(100, builder.build().getThreads()); }
public static String toString(String unicode) { if (StrUtil.isBlank(unicode)) { return unicode; } final int len = unicode.length(); StringBuilder sb = new StringBuilder(len); int i; int pos = 0; while ((i = StrUtil.indexOfIgnoreCase(unicode, "\\u", pos)) != -1) { sb.append(unicode, pos, i);//写入Unicode符之前的部分 pos = i; if (i + 5 < len) { char c; try { c = (char) Integer.parseInt(unicode.substring(i + 2, i + 6), 16); sb.append(c); pos = i + 6;//跳过整个Unicode符 } catch (NumberFormatException e) { //非法Unicode符,跳过 sb.append(unicode, pos, i + 2);//写入"\\u" pos = i + 2; } } else { //非Unicode符,结束 break; } } if (pos < len) { sb.append(unicode, pos, len); } return sb.toString(); }
@Test public void convertTest3() { String str = "aaa\\u111"; String res = UnicodeUtil.toString(str); assertEquals("aaa\\u111", res); }
FileDialogOperation createFileDialogOperation( SelectionOperation selectionOperation ) { switch ( selectionOperation ) { case FILE: return new FileDialogOperation( FileDialogOperation.SELECT_FILE, FileDialogOperation.ORIGIN_SPOON ); case FOLDER: return new FileDialogOperation( FileDialogOperation.SELECT_FOLDER, FileDialogOperation.ORIGIN_SPOON ); case FILE_OR_FOLDER: return new FileDialogOperation( FileDialogOperation.SELECT_FILE_FOLDER, FileDialogOperation.ORIGIN_SPOON ); case SAVE: return new FileDialogOperation( FileDialogOperation.SAVE, FileDialogOperation.ORIGIN_SPOON ); case SAVE_TO: return new FileDialogOperation( FileDialogOperation.SAVE_TO, FileDialogOperation.ORIGIN_SPOON ); case SAVE_TO_FILE_FOLDER: return new FileDialogOperation( FileDialogOperation.SAVE_TO_FILE_FOLDER, FileDialogOperation.ORIGIN_SPOON ); case OPEN: return new FileDialogOperation( FileDialogOperation.OPEN, FileDialogOperation.ORIGIN_SPOON ); default: throw new IllegalArgumentException( "Unexpected SelectionOperation: " + selectionOperation ); } }
@Test public void testCreateFileDialogOperation() { // TEST : SELECT file FileDialogOperation fdo1 = testInstance.createFileDialogOperation( SelectionOperation.FILE ); assertNotNull( fdo1 ); assertEquals( FileDialogOperation.SELECT_FILE, fdo1.getCommand() ); assertEquals( FileDialogOperation.ORIGIN_SPOON, fdo1.getOrigin() ); // TEST : SELECT folder FileDialogOperation fdo2 = testInstance.createFileDialogOperation( SelectionOperation.FOLDER ); assertNotNull( fdo2 ); assertEquals( FileDialogOperation.SELECT_FOLDER, fdo2.getCommand() ); assertEquals( FileDialogOperation.ORIGIN_SPOON, fdo2.getOrigin() ); // TEST : SELECT folder FileDialogOperation fdo3 = testInstance.createFileDialogOperation( SelectionOperation.FILE_OR_FOLDER ); assertNotNull( fdo3 ); assertEquals( FileDialogOperation.SELECT_FILE_FOLDER, fdo3.getCommand() ); assertEquals( FileDialogOperation.ORIGIN_SPOON, fdo3.getOrigin() ); }
@Override public CompletionStage<Void> write(int segment, MarshallableEntry<? extends K, ? extends V> entry) { return handler.write(segment, entry); }
@Test(groups = "stress") public void testConcurrentWrite() throws InterruptedException { final int THREADS = 8; final AtomicBoolean run = new AtomicBoolean(true); final AtomicInteger written = new AtomicInteger(); final CountDownLatch started = new CountDownLatch(THREADS); final CountDownLatch finished = new CountDownLatch(THREADS); for (int i = 0; i < THREADS; ++i) { final int thread = i; fork(() -> { try { started.countDown(); int i1 = 0; while (run.get()) { InternalCacheEntry entry = TestInternalCacheEntryFactory.create("k" + i1, "v" + i1); MarshallableEntry me = MarshalledEntryUtil.create(entry, getMarshaller()); try { store.write(me); ++i1; int prev; do { prev = written.get(); if ((prev & (1 << thread)) != 0) break; } while (written.compareAndSet(prev, prev | (1 << thread))); } catch (PersistenceException e) { // when the store is stopped, exceptions are thrown } } } catch (Exception e) { log.error("Failed", e); throw new RuntimeException(e); } finally { finished.countDown(); } }); } if (finished.await(1, TimeUnit.SECONDS)) { fail("Test shouldn't have finished yet"); } run.set(false); if (!finished.await(30, TimeUnit.SECONDS)) { fail("Test should have finished!"); } assertEquals("pre", (1 << THREADS) - 1, written.get()); }
public void assignBroker(NamespaceName nsname, BrokerStatus brkStatus, SortedSet<BrokerStatus> primaryCandidates, SortedSet<BrokerStatus> secondaryCandidates, SortedSet<BrokerStatus> sharedCandidates) { NamespaceIsolationPolicy nsPolicy = this.getPolicyByNamespace(nsname); BrokerAssignment brokerAssignment = this.getBrokerAssignment(nsPolicy, brkStatus.getBrokerAddress()); if (brokerAssignment == BrokerAssignment.primary) { // Only add to candidates if allowed by policy if (nsPolicy != null && nsPolicy.isPrimaryBrokerAvailable(brkStatus)) { primaryCandidates.add(brkStatus); } } else if (brokerAssignment == BrokerAssignment.secondary) { secondaryCandidates.add(brkStatus); } else if (brokerAssignment == BrokerAssignment.shared) { sharedCandidates.add(brkStatus); } }
@Test public void testBrokerAssignment() throws Exception { NamespaceIsolationPolicies policies = this.getDefaultTestPolicies(); NamespaceName ns = NamespaceName.get("pulsar/use/testns-1"); SortedSet<BrokerStatus> primaryCandidates = new TreeSet<>(); BrokerStatus primary = BrokerStatus.builder() .brokerAddress("prod1-broker1.messaging.use.example.com") .active(true) .loadFactor(0) .build(); BrokerStatus secondary = BrokerStatus.builder() .brokerAddress("prod1-broker4.messaging.use.example.com") .active(true) .loadFactor(0) .build(); BrokerStatus shared = BrokerStatus.builder() .brokerAddress("use.example.com") .active(true) .loadFactor(0) .build(); SortedSet<BrokerStatus> secondaryCandidates = new TreeSet<>(); SortedSet<BrokerStatus> sharedCandidates = new TreeSet<>(); policies.assignBroker(ns, primary, primaryCandidates, secondaryCandidates, sharedCandidates); assertEquals(primaryCandidates.size(), 1); assertEquals(secondaryCandidates.size(), 0); assertEquals(sharedCandidates.size(), 0); assertEquals(primary, primaryCandidates.first()); policies.assignBroker(ns, secondary, primaryCandidates, secondaryCandidates, sharedCandidates); assertEquals(primaryCandidates.size(), 1); assertEquals(secondaryCandidates.size(), 1); assertEquals(sharedCandidates.size(), 0); assertEquals(secondary, secondaryCandidates.first()); policies.assignBroker(NamespaceName.get("pulsar/use1/testns-1"), shared, primaryCandidates, secondaryCandidates, sharedCandidates); assertEquals(primaryCandidates.size(), 1); assertEquals(secondaryCandidates.size(), 1); assertEquals(sharedCandidates.size(), 1); assertEquals(shared, sharedCandidates.first()); }
public static <T> JSONSchema<T> of(SchemaDefinition<T> schemaDefinition) { SchemaReader<T> reader = schemaDefinition.getSchemaReaderOpt() .orElseGet(() -> new JacksonJsonReader<>(jsonMapper(), schemaDefinition.getPojo())); SchemaWriter<T> writer = schemaDefinition.getSchemaWriterOpt() .orElseGet(() -> new JacksonJsonWriter<>(jsonMapper())); return new JSONSchema<>(parseSchemaInfo(schemaDefinition, SchemaType.JSON), schemaDefinition.getPojo(), reader, writer); }
@Test public void testNotAllowNullSchema() throws JSONException { JSONSchema<Foo> jsonSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).withAlwaysAllowNull(false).build()); Assert.assertEquals(jsonSchema.getSchemaInfo().getType(), SchemaType.JSON); Schema.Parser parser = new Schema.Parser(); String schemaJson = new String(jsonSchema.getSchemaInfo().getSchema()); assertJSONEqual(schemaJson, SCHEMA_JSON_NOT_ALLOW_NULL); Schema schema = parser.parse(schemaJson); for (String fieldName : FOO_FIELDS) { Schema.Field field = schema.getField(fieldName); Assert.assertNotNull(field); if (field.name().equals("field4")) { Assert.assertNotNull(field.schema().getTypes().get(1).getField("field1")); } if (field.name().equals("fieldUnableNull")) { Assert.assertNotNull(field.schema().getType()); } } }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_non_serializable_object() { Object original = new NonSerializableObject("value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Override public boolean supportsOrderByUnrelated() { return false; }
@Test void assertSupportsOrderByUnrelated() { assertFalse(metaData.supportsOrderByUnrelated()); }
public static Builder newIntegerColumnDefBuilder() { return new Builder(); }
@Test public void builder_build_throws_NPE_if_no_name_was_set() { IntegerColumnDef.Builder builder = newIntegerColumnDefBuilder(); assertThatThrownBy(builder::build) .isInstanceOf(NullPointerException.class) .hasMessage("Column name cannot be null"); }
static Class<?> getCommandClass(String request) { try { return Class.forName("com.iluwatar.front.controller." + request + "Command"); } catch (ClassNotFoundException e) { return UnknownCommand.class; } }
@Test void testGetCommandClassUnknown() { Class<?> commandClass = Dispatcher.getCommandClass("Unknown"); assertNotNull(commandClass); assertEquals(UnknownCommand.class, commandClass); }
public void onChange(Multimap<QProfileName, ActiveRuleChange> changedProfiles, long startDate, long endDate) { if (config.getBoolean(DISABLE_NOTIFICATION_ON_BUILT_IN_QPROFILES).orElse(false)) { return; } BuiltInQPChangeNotificationBuilder builder = new BuiltInQPChangeNotificationBuilder(); changedProfiles.keySet().stream() .map(changedProfile -> { String profileName = changedProfile.getName(); Language language = languages.get(changedProfile.getLanguage()); Collection<ActiveRuleChange> activeRuleChanges = changedProfiles.get(changedProfile); int newRules = (int) activeRuleChanges.stream().map(ActiveRuleChange::getType).filter(ACTIVATED::equals).count(); int updatedRules = (int) activeRuleChanges.stream().map(ActiveRuleChange::getType).filter(UPDATED::equals).count(); int removedRules = (int) activeRuleChanges.stream().map(ActiveRuleChange::getType).filter(DEACTIVATED::equals).count(); return Profile.newBuilder() .setProfileName(profileName) .setLanguageKey(language.getKey()) .setLanguageName(language.getName()) .setNewRules(newRules) .setUpdatedRules(updatedRules) .setRemovedRules(removedRules) .setStartDate(startDate) .setEndDate(endDate) .build(); }) .forEach(builder::addProfile); notificationManager.scheduleForSending(builder.build()); }
@Test public void add_profile_to_notification_for_added_rules() { enableNotificationInGlobalSettings(); Multimap<QProfileName, ActiveRuleChange> profiles = ArrayListMultimap.create(); Languages languages = new Languages(); Tuple expectedTuple = addProfile(profiles, languages, ACTIVATED); BuiltInQualityProfilesUpdateListener underTest = new BuiltInQualityProfilesUpdateListener(notificationManager, languages, settings.asConfig()); underTest.onChange(profiles, 0, 1); ArgumentCaptor<Notification> notificationArgumentCaptor = ArgumentCaptor.forClass(Notification.class); verify(notificationManager).scheduleForSending(notificationArgumentCaptor.capture()); verifyNoMoreInteractions(notificationManager); assertThat(BuiltInQPChangeNotificationBuilder.parse(notificationArgumentCaptor.getValue()).getProfiles()) .extracting(Profile::getProfileName, Profile::getLanguageKey, Profile::getLanguageName, Profile::getNewRules) .containsExactlyInAnyOrder(expectedTuple); }
@Override protected InputStream dumpSnapshot() { Map<Service, ServiceMetadata> snapshot = metadataManager.getServiceMetadataSnapshot(); return new ByteArrayInputStream(serializer.serialize(snapshot)); }
@Test void testDumpSnapshot() { InputStream inputStream = serviceMetadataSnapshotOperation.dumpSnapshot(); assertNotNull(inputStream); }
Mono<Void> sendNotification(NotificationElement notificationElement) { var descriptor = notificationElement.descriptor(); var subscriber = notificationElement.subscriber(); final var notifierExtName = descriptor.getSpec().getNotifierExtName(); return notificationContextFrom(notificationElement) .flatMap(notificationContext -> notificationSender.sendNotification(notifierExtName, notificationContext) .onErrorResume(throwable -> { log.error( "Failed to send notification to subscriber [{}] through notifier [{}]", subscriber, descriptor.getSpec().getDisplayName(), throwable); return Mono.empty(); }) ) .then(); }
@Test public void testSendNotification() { var spyNotificationCenter = spy(notificationCenter); var context = mock(NotificationContext.class); doReturn(Mono.just(context)) .when(spyNotificationCenter).notificationContextFrom(any()); when(notificationSender.sendNotification(eq("fake-notifier-ext"), any())) .thenReturn(Mono.empty()); var element = mock(DefaultNotificationCenter.NotificationElement.class); var mockDescriptor = mock(NotifierDescriptor.class); when(element.descriptor()).thenReturn(mockDescriptor); when(element.subscriber()).thenReturn(mock(Subscriber.class)); var notifierDescriptorSpec = mock(NotifierDescriptor.Spec.class); when(mockDescriptor.getSpec()).thenReturn(notifierDescriptorSpec); when(notifierDescriptorSpec.getNotifierExtName()).thenReturn("fake-notifier-ext"); spyNotificationCenter.sendNotification(element).block(); verify(spyNotificationCenter).notificationContextFrom(any()); verify(notificationSender).sendNotification(any(), any()); }
@Override protected String getScheme() { return config.getScheme(); }
@Test public void testGetSchemeWithS3Options() { S3FileSystem s3FileSystem = new S3FileSystem(s3Options()); assertEquals("s3", s3FileSystem.getScheme()); }
@Override public Set<String> initialize() { try { checkpointFileCache.putAll(checkpointFile.read()); } catch (final IOException e) { throw new StreamsException("Failed to read checkpoints for global state globalStores", e); } final Set<String> changelogTopics = new HashSet<>(); for (final StateStore stateStore : topology.globalStateStores()) { final String sourceTopic = storeToChangelogTopic.get(stateStore.name()); changelogTopics.add(sourceTopic); stateStore.init((StateStoreContext) globalProcessorContext, stateStore); } // make sure each topic-partition from checkpointFileCache is associated with a global state store checkpointFileCache.keySet().forEach(tp -> { if (!changelogTopics.contains(tp.topic())) { log.error( "Encountered a topic-partition in the global checkpoint file not associated with any global" + " state store, topic-partition: {}, checkpoint file: {}. If this topic-partition is no longer valid," + " an application reset and state store directory cleanup will be required.", tp.topic(), checkpointFile ); throw new StreamsException("Encountered a topic-partition not associated with any global state store"); } }); return Collections.unmodifiableSet(globalStoreNames); }
@Test public void shouldReturnInitializedStoreNames() { final Set<String> storeNames = stateManager.initialize(); assertEquals(Utils.mkSet(storeName1, storeName2, storeName3, storeName4, storeName5), storeNames); }
@SuppressWarnings("unchecked") public static <T> NFAFactory<T> compileFactory( final Pattern<T, ?> pattern, boolean timeoutHandling) { if (pattern == null) { // return a factory for empty NFAs return new NFAFactoryImpl<>( 0, Collections.<String, Long>emptyMap(), Collections.<State<T>>emptyList(), timeoutHandling); } else { final NFAFactoryCompiler<T> nfaFactoryCompiler = new NFAFactoryCompiler<>(pattern); nfaFactoryCompiler.compileFactory(); return new NFAFactoryImpl<>( nfaFactoryCompiler.getWindowTime(), nfaFactoryCompiler.getWindowTimes(), nfaFactoryCompiler.getStates(), timeoutHandling); } }
@Test public void testCheckPatternWindowTimes() { expectedException.expect(MalformedPatternException.class); expectedException.expectMessage( "The window length between the previous and current event cannot be larger than the window length between the first and last event for a Pattern."); Pattern<Event, ?> pattern = Pattern.<Event>begin("start") .followedBy("middle") .within(Time.seconds(3), WithinType.PREVIOUS_AND_CURRENT) .followedBy("then") .within(Time.seconds(1), WithinType.PREVIOUS_AND_CURRENT) .followedBy("end") .within(Time.milliseconds(2)); NFACompiler.NFAFactoryCompiler<Event> factory = new NFACompiler.NFAFactoryCompiler<>(pattern); factory.compileFactory(); }
@ConstantFunction(name = "int_divide", argTypes = {LARGEINT, LARGEINT}, returnType = LARGEINT) public static ConstantOperator intDivideLargeInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createLargeInt(first.getLargeInt().divide(second.getLargeInt())); }
@Test public void intDivideLargeInt() { assertEquals("1", ScalarOperatorFunctions.intDivideLargeInt(O_LI_100, O_LI_100).getLargeInt().toString()); }
public boolean isOpen() { return opened; }
@Test void isOpenTest() throws InterpreterException { InterpreterResult interpreterResult = new InterpreterResult(InterpreterResult.Code.SUCCESS, ""); when(interpreter.interpret(any(String.class), any(InterpreterContext.class))) .thenReturn(interpreterResult); LazyOpenInterpreter lazyOpenInterpreter = new LazyOpenInterpreter(interpreter); assertFalse(lazyOpenInterpreter.isOpen(), "Interpreter is not open"); InterpreterContext interpreterContext = mock(InterpreterContext.class); lazyOpenInterpreter.interpret("intp 1", interpreterContext); assertTrue(lazyOpenInterpreter.isOpen(), "Interpeter is open"); }
@Override public <T> Optional<T> valueAs(Class<T> type) { checkNotNull(type); return id.lastComponentAs(type); }
@Test public void testValueAs() { DiscreteResource resource = Resources.discrete(D1).resource(); Optional<DeviceId> volume = resource.valueAs(DeviceId.class); assertThat(volume.get(), is(D1)); }
public static void determineTimestamps(Configuration job, Map<URI, FileStatus> statCache) throws IOException { URI[] tarchives = JobContextImpl.getCacheArchives(job); if (tarchives != null) { FileStatus status = getFileStatus(job, tarchives[0], statCache); StringBuilder archiveFileSizes = new StringBuilder(String.valueOf(status.getLen())); StringBuilder archiveTimestamps = new StringBuilder(String.valueOf(status.getModificationTime())); for (int i = 1; i < tarchives.length; i++) { status = getFileStatus(job, tarchives[i], statCache); archiveFileSizes.append(","); archiveFileSizes.append(String.valueOf(status.getLen())); archiveTimestamps.append(","); archiveTimestamps.append(String.valueOf(status.getModificationTime())); } job.set(MRJobConfig.CACHE_ARCHIVES_SIZES, archiveFileSizes.toString()); setArchiveTimestamps(job, archiveTimestamps.toString()); } URI[] tfiles = JobContextImpl.getCacheFiles(job); if (tfiles != null) { FileStatus status = getFileStatus(job, tfiles[0], statCache); StringBuilder fileSizes = new StringBuilder(String.valueOf(status.getLen())); StringBuilder fileTimestamps = new StringBuilder(String.valueOf( status.getModificationTime())); for (int i = 1; i < tfiles.length; i++) { status = getFileStatus(job, tfiles[i], statCache); fileSizes.append(","); fileSizes.append(String.valueOf(status.getLen())); fileTimestamps.append(","); fileTimestamps.append(String.valueOf(status.getModificationTime())); } job.set(MRJobConfig.CACHE_FILES_SIZES, fileSizes.toString()); setFileTimestamps(job, fileTimestamps.toString()); } }
@Test public void testDetermineTimestamps() throws IOException { Job job = Job.getInstance(conf); job.addCacheFile(firstCacheFile.toUri()); job.addCacheFile(secondCacheFile.toUri()); Configuration jobConf = job.getConfiguration(); Map<URI, FileStatus> statCache = new HashMap<>(); ClientDistributedCacheManager.determineTimestamps(jobConf, statCache); FileStatus firstStatus = statCache.get(firstCacheFile.toUri()); FileStatus secondStatus = statCache.get(secondCacheFile.toUri()); assertNotNull(firstCacheFile + " was not found in the stats cache", firstStatus); assertNotNull(secondCacheFile + " was not found in the stats cache", secondStatus); assertEquals("Missing/extra entries found in the stats cache", 2, statCache.size()); String expected = firstStatus.getModificationTime() + "," + secondStatus.getModificationTime(); assertEquals(expected, jobConf.get(MRJobConfig.CACHE_FILE_TIMESTAMPS)); job = Job.getInstance(conf); job.addCacheFile(new Path(TEST_VISIBILITY_CHILD_DIR, "*").toUri()); jobConf = job.getConfiguration(); statCache.clear(); ClientDistributedCacheManager.determineTimestamps(jobConf, statCache); FileStatus thirdStatus = statCache.get(TEST_VISIBILITY_CHILD_DIR.toUri()); assertEquals("Missing/extra entries found in the stats cache", 1, statCache.size()); assertNotNull(TEST_VISIBILITY_CHILD_DIR + " was not found in the stats cache", thirdStatus); expected = Long.toString(thirdStatus.getModificationTime()); assertEquals("Incorrect timestamp for " + TEST_VISIBILITY_CHILD_DIR, expected, jobConf.get(MRJobConfig.CACHE_FILE_TIMESTAMPS)); }
private void tick() { scheduledFuture = executor.schedule(this::run, delayMillis, MILLISECONDS); }
@Test(enabled = false) public void testTick() throws Exception { int numberOfTicks = 2; long durationBetweenTicksInSeconds = 2; CountDownLatch latch = new CountDownLatch(3); Runnable runnable = latch::countDown; try (PeriodicTaskExecutor executor = new PeriodicTaskExecutor(SECONDS.toMillis(durationBetweenTicksInSeconds), 500, executorService, runnable, i -> i)) { executor.start(); Stopwatch stopwatch = Stopwatch.createStarted(); latch.await(10, SECONDS); stopwatch.stop(); assertEquals((numberOfTicks * durationBetweenTicksInSeconds), stopwatch.elapsed(SECONDS)); assertEquals(latch.getCount(), 0); // latch was counted down 3 times } }
public static RestartBackoffTimeStrategy.Factory createRestartBackoffTimeStrategyFactory( final RestartStrategies.RestartStrategyConfiguration jobRestartStrategyConfiguration, final Configuration jobConfiguration, final Configuration clusterConfiguration, final boolean isCheckpointingEnabled) { checkNotNull(jobRestartStrategyConfiguration); checkNotNull(jobConfiguration); checkNotNull(clusterConfiguration); return getJobRestartStrategyFactory(jobRestartStrategyConfiguration) .orElse( getRestartStrategyFactoryFromConfig(jobConfiguration) .orElse( (getRestartStrategyFactoryFromConfig(clusterConfiguration) .orElse( getDefaultRestartStrategyFactory( isCheckpointingEnabled))))); }
@Test void testFailureRateRestartStrategySpecifiedInExecutionConfig() { final Configuration conf = new Configuration(); conf.set(RestartStrategyOptions.RESTART_STRATEGY, FIXED_DELAY.getMainValue()); final RestartBackoffTimeStrategy.Factory factory = RestartBackoffTimeStrategyFactoryLoader.createRestartBackoffTimeStrategyFactory( RestartStrategies.failureRateRestart( 1, Duration.ofMillis(1000), Duration.ofMillis(1000)), conf, conf, false); assertThat(factory) .isInstanceOf( FailureRateRestartBackoffTimeStrategy .FailureRateRestartBackoffTimeStrategyFactory.class); }
@Override public Object get(PropertyKey key) { return get(key, ConfigurationValueOptions.defaults()); }
@Test public void noWhitespaceTrailingInSiteProperties() throws Exception { Properties siteProps = new Properties(); siteProps.setProperty(PropertyKey.MASTER_HOSTNAME.toString(), " host-1 "); siteProps.setProperty(PropertyKey.WEB_THREADS.toString(), "\t123\t"); File propsFile = mFolder.newFile(Constants.SITE_PROPERTIES); siteProps.store(new FileOutputStream(propsFile), "tmp site properties file"); // Avoid interference from system properties. Reset SITE_CONF_DIR to include the temp // site-properties file HashMap<String, String> sysProps = new HashMap<>(); sysProps.put(PropertyKey.SITE_CONF_DIR.toString(), mFolder.getRoot().getAbsolutePath()); sysProps.put(PropertyKey.TEST_MODE.toString(), "false"); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { resetConf(); assertEquals("host-1", mConfiguration.get(PropertyKey.MASTER_HOSTNAME)); assertEquals(123, mConfiguration.get(PropertyKey.WEB_THREADS)); } }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Config config = (Config) o; return entries.equals(config.entries); }
@Test public void testEquals() { ConfigEntry ce0 = new ConfigEntry("abc", null, ConfigEntry.ConfigSource.DEFAULT_CONFIG, false, false, null, null, null); ConfigEntry ce1 = new ConfigEntry("abc", null, ConfigEntry.ConfigSource.DYNAMIC_BROKER_CONFIG, false, false, null, null, null); assertNotEquals(ce0, ce1); }
public static boolean isViewIgnored(View view) { try { //基本校验 if (view == null) { return true; } //ViewType 被忽略 List<Class<?>> mIgnoredViewTypeList = SensorsDataAPI.sharedInstance().getIgnoredViewTypeList(); if (mIgnoredViewTypeList != null) { for (Class<?> clazz : mIgnoredViewTypeList) { if (clazz.isAssignableFrom(view.getClass())) { return true; } } } //View 被忽略 return "1".equals(view.getTag(R.id.sensors_analytics_tag_view_ignored)); } catch (Exception e) { SALog.printStackTrace(e); return true; } }
@Test public void isViewIgnored() { SensorsDataAPI sensorsDataAPI = SAHelper.initSensors(mApplication); TextView textView1 = new TextView(mApplication); textView1.setText("child1"); sensorsDataAPI.ignoreView(textView1); Assert.assertTrue(SAViewUtils.isViewIgnored(textView1)); }
HasRuleEngineProfile getRuleEngineProfileForEntityOrElseNull(TenantId tenantId, EntityId entityId, TbMsg tbMsg) { if (entityId.getEntityType().equals(EntityType.DEVICE)) { if (TbMsgType.ENTITY_DELETED.equals(tbMsg.getInternalType())) { try { Device deletedDevice = JacksonUtil.fromString(tbMsg.getData(), Device.class); if (deletedDevice == null) { return null; } return deviceProfileCache.get(tenantId, deletedDevice.getDeviceProfileId()); } catch (Exception e) { log.warn("[{}][{}] Failed to deserialize device: {}", tenantId, entityId, tbMsg, e); return null; } } else { return deviceProfileCache.get(tenantId, new DeviceId(entityId.getId())); } } else if (entityId.getEntityType().equals(EntityType.DEVICE_PROFILE)) { return deviceProfileCache.get(tenantId, new DeviceProfileId(entityId.getId())); } else if (entityId.getEntityType().equals(EntityType.ASSET)) { if (TbMsgType.ENTITY_DELETED.equals(tbMsg.getInternalType())) { try { Asset deletedAsset = JacksonUtil.fromString(tbMsg.getData(), Asset.class); if (deletedAsset == null) { return null; } return assetProfileCache.get(tenantId, deletedAsset.getAssetProfileId()); } catch (Exception e) { log.warn("[{}][{}] Failed to deserialize asset: {}", tenantId, entityId, tbMsg, e); return null; } } else { return assetProfileCache.get(tenantId, new AssetId(entityId.getId())); } } else if (entityId.getEntityType().equals(EntityType.ASSET_PROFILE)) { return assetProfileCache.get(tenantId, new AssetProfileId(entityId.getId())); } return null; }
@Test public void testGetRuleEngineProfileForUpdatedAndDeletedAsset() { AssetId assetId = new AssetId(UUID.randomUUID()); TenantId tenantId = new TenantId(UUID.randomUUID()); AssetProfileId assetProfileId = new AssetProfileId(UUID.randomUUID()); Asset asset = new Asset(assetId); asset.setAssetProfileId(assetProfileId); // asset updated TbMsg tbMsg = TbMsg.builder().internalType(TbMsgType.ENTITY_UPDATED).build(); ((DefaultTbClusterService) clusterService).getRuleEngineProfileForEntityOrElseNull(tenantId, assetId, tbMsg); verify(assetProfileCache, times(1)).get(tenantId, assetId); // asset deleted tbMsg = TbMsg.builder().internalType(TbMsgType.ENTITY_DELETED).data(JacksonUtil.toString(asset)).build(); ((DefaultTbClusterService) clusterService).getRuleEngineProfileForEntityOrElseNull(tenantId, assetId, tbMsg); verify(assetProfileCache, times(1)).get(tenantId, assetProfileId); }
public abstract ImmutableSet<String> objectClasses();
@Test void objectClasses() { final LDAPEntry entry = LDAPEntry.builder() .dn("cn=jane,ou=people,dc=example,dc=com") .base64UniqueId(Base64.encode("unique-id")) .addAttribute("foo", "bar") .build(); assertThat(entry.objectClasses()).isEmpty(); final LDAPEntry entry2 = LDAPEntry.builder() .dn("cn=jane,ou=people,dc=example,dc=com") .base64UniqueId(Base64.encode("unique-id")) .addAttribute("foo", "bar") .objectClasses(Collections.singleton("inetOrgPerson")) .build(); assertThat(entry2.objectClasses()).containsExactlyInAnyOrder("inetOrgPerson"); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("System"); setAttribute(protobuf, "Server ID", server.getId()); setAttribute(protobuf, "Version", getVersion()); setAttribute(protobuf, "Edition", sonarRuntime.getEdition().getLabel()); setAttribute(protobuf, NCLOC.getName(), statisticsSupport.getLinesOfCode()); setAttribute(protobuf, "Container", containerSupport.isRunningInContainer()); setAttribute(protobuf, "External Users and Groups Provisioning", commonSystemInformation.getManagedInstanceProviderName()); setAttribute(protobuf, "External User Authentication", commonSystemInformation.getExternalUserAuthentication()); addIfNotEmpty(protobuf, "Accepted external identity providers", commonSystemInformation.getEnabledIdentityProviders()); addIfNotEmpty(protobuf, "External identity providers whose users are allowed to sign themselves up", commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()); setAttribute(protobuf, "High Availability", false); setAttribute(protobuf, "Official Distribution", officialDistribution.check()); setAttribute(protobuf, "Force authentication", commonSystemInformation.getForceAuthentication()); setAttribute(protobuf, "Home Dir", config.get(PATH_HOME.getKey()).orElse(null)); setAttribute(protobuf, "Data Dir", config.get(PATH_DATA.getKey()).orElse(null)); setAttribute(protobuf, "Temp Dir", config.get(PATH_TEMP.getKey()).orElse(null)); setAttribute(protobuf, "Processors", Runtime.getRuntime().availableProcessors()); return protobuf.build(); }
@Test public void return_nb_of_processors() { ProtobufSystemInfo.Section protobuf = underTest.toProtobuf(); assertThat(attribute(protobuf, "Processors").getLongValue()).isPositive(); }
@Override public void execute(Exchange exchange) throws SmppException { byte[] message = getShortMessage(exchange.getIn()); ReplaceSm replaceSm = createReplaceSmTempate(exchange); replaceSm.setShortMessage(message); if (log.isDebugEnabled()) { log.debug("Sending replacement command for a short message for exchange id '{}' and message id '{}'", exchange.getExchangeId(), replaceSm.getMessageId()); } try { session.replaceShortMessage( replaceSm.getMessageId(), TypeOfNumber.valueOf(replaceSm.getSourceAddrTon()), NumberingPlanIndicator.valueOf(replaceSm.getSourceAddrNpi()), replaceSm.getSourceAddr(), replaceSm.getScheduleDeliveryTime(), replaceSm.getValidityPeriod(), new RegisteredDelivery(replaceSm.getRegisteredDelivery()), replaceSm.getSmDefaultMsgId(), replaceSm.getShortMessage()); } catch (Exception e) { throw new SmppException(e); } if (log.isDebugEnabled()) { log.debug("Sent replacement command for a short message for exchange id '{}' and message id '{}'", exchange.getExchangeId(), replaceSm.getMessageId()); } Message rspMsg = ExchangeHelper.getResultMessage(exchange); rspMsg.setHeader(SmppConstants.ID, replaceSm.getMessageId()); }
@Test public void latin1DataCodingOverridesEightBitAlphabet() throws Exception { final int latin1DataCoding = 0x03; /* ISO-8859-1 (Latin1) */ byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF }; byte[] bodyNarrowed = { '?', 'A', 'B', '\0', '?', (byte) 0x7F, 'C', '?' }; Exchange exchange = new DefaultExchange( new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "ReplaceSm"); exchange.getIn().setHeader(SmppConstants.ALPHABET, Alphabet.ALPHA_8_BIT.value()); exchange.getIn().setHeader(SmppConstants.DATA_CODING, latin1DataCoding); exchange.getIn().setBody(body); command.execute(exchange); verify(session).replaceShortMessage((String) isNull(), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq((byte) 0), eq(bodyNarrowed)); }
@Override public void registerRemote(RemoteInstance remoteInstance) throws ServiceRegisterException { if (needUsingInternalAddr()) { remoteInstance = new RemoteInstance( new Address(config.getInternalComHost(), config.getInternalComPort(), true)); } this.selfAddress = remoteInstance.getAddress(); try { AgentClient agentClient = client.agentClient(); Registration registration = ImmutableRegistration.builder() .id(remoteInstance.getAddress().toString()) .name(serviceName) .address(remoteInstance.getAddress().getHost()) .port(remoteInstance.getAddress().getPort()) .check(Registration.RegCheck.grpc( remoteInstance.getAddress() .getHost() + ":" + remoteInstance .getAddress() .getPort(), 5 )) // registers with a TTL of 5 seconds .build(); agentClient.register(registration); healthChecker.health(); } catch (Throwable e) { healthChecker.unHealth(e); throw new ServiceRegisterException(e.getMessage()); } }
@Test public void registerSelfRemote() { registerRemote(selfRemoteAddress); }
public static ResourceBundle getBundledResource(String basename) { return ResourceBundle.getBundle(basename, new UTF8Control()); }
@Test public void getBundleByClassname() { title("getBundleByClassname"); res = LionUtils.getBundledResource(LionUtils.class); assertNotNull("missing resource bundle", res); String v1 = res.getString("foo"); String v2 = res.getString("boo"); print("v1 is %s, v2 is %s", v1, v2); assertEquals("v1 value wrong", "bar", v1); assertEquals("v2 value wrong", "ghost", v2); }
@Override public void startTrackingPartition( ResourceID producingTaskExecutorId, ResultPartitionDeploymentDescriptor resultPartitionDeploymentDescriptor) { Preconditions.checkNotNull(producingTaskExecutorId); Preconditions.checkNotNull(resultPartitionDeploymentDescriptor); // non-releaseByScheduler partitions don't require explicit partition release calls. if (!resultPartitionDeploymentDescriptor.getPartitionType().isReleaseByScheduler()) { return; } final ResultPartitionID resultPartitionId = resultPartitionDeploymentDescriptor.getShuffleDescriptor().getResultPartitionID(); startTrackingPartition( producingTaskExecutorId, resultPartitionId, resultPartitionDeploymentDescriptor); }
@Test void testGetJobPartitionClusterPartition() { final TestingShuffleMaster shuffleMaster = new TestingShuffleMaster(); final Queue<ReleaseCall> releaseCalls = new ArrayBlockingQueue<>(4); final Queue<PromoteCall> promoteCalls = new ArrayBlockingQueue<>(4); final JobMasterPartitionTrackerImpl partitionTracker = new JobMasterPartitionTrackerImpl( new JobID(), shuffleMaster, resourceId -> Optional.of( createTaskExecutorGateway( resourceId, releaseCalls, promoteCalls))); final ResourceID taskExecutorId = ResourceID.generate(); final ResultPartitionID resultPartitionId1 = new ResultPartitionID(); final ResultPartitionID resultPartitionId2 = new ResultPartitionID(); final ResultPartitionDeploymentDescriptor clusterPartition = AbstractPartitionTrackerTest.createResultPartitionDeploymentDescriptor( resultPartitionId1, ResultPartitionType.BLOCKING_PERSISTENT, false); final ResultPartitionDeploymentDescriptor jobPartition = AbstractPartitionTrackerTest.createResultPartitionDeploymentDescriptor( resultPartitionId2, false); partitionTracker.startTrackingPartition(taskExecutorId, clusterPartition); partitionTracker.startTrackingPartition(taskExecutorId, jobPartition); assertThat(partitionTracker.getAllTrackedNonClusterPartitions()) .containsExactly(jobPartition); assertThat(partitionTracker.getAllTrackedClusterPartitions()) .containsExactly(clusterPartition); }
@ConstantFunction(name = "concat_ws", argTypes = {VARCHAR, VARCHAR}, returnType = VARCHAR) public static ConstantOperator concat_ws(ConstantOperator split, ConstantOperator... values) { Preconditions.checkArgument(values.length > 0); if (split.isNull()) { return ConstantOperator.createNull(Type.VARCHAR); } final StringBuilder resultBuilder = new StringBuilder(); for (int i = 0; i < values.length - 1; i++) { if (values[i].isNull()) { continue; } resultBuilder.append(values[i].getVarchar()).append(split.getVarchar()); } resultBuilder.append(values[values.length - 1].getVarchar()); return ConstantOperator.createVarchar(resultBuilder.toString()); }
@Test public void concat_ws() { ConstantOperator[] arg = {ConstantOperator.createVarchar("1"), ConstantOperator.createVarchar("2"), ConstantOperator.createVarchar("3")}; ConstantOperator result = ScalarOperatorFunctions.concat_ws(ConstantOperator.createVarchar(","), arg); assertEquals(Type.VARCHAR, result.getType()); assertEquals("1,2,3", result.getVarchar()); }
public static NotFoundException roleNotFound(String roleName) { return new NotFoundException( "role not found for roleName:%s, please check apollo portal DB table 'Role'", roleName ); }
@Test void roleNotFound() { NotFoundException exception = NotFoundException.roleNotFound("CreateApplication+SystemRole"); assertEquals(exception.getMessage(), "role not found for roleName:CreateApplication+SystemRole, please check apollo portal DB table 'Role'"); }
public static void main(String[] args) throws Throwable { if (!parseInputArgs(args)) { usage(); System.exit(EXIT_FAILED); } if (sHelp) { usage(); System.exit(EXIT_SUCCEEDED); } try { dumpJournal(); } catch (Exception exc) { System.out.printf("Journal tool failed: %s%n", exc); } }
@Test public void relativeLocalJournalInput() throws Throwable { String journalPath = "fileA"; JournalTool.main(new String[]{"-inputDir", journalPath}); String inputUri = Whitebox.getInternalState(JournalTool.class, "sInputDir"); Assert.assertEquals( Paths.get(System.getProperty("user.dir"), journalPath).toString(), inputUri); }
public static String getTablesPath(final String databaseName, final String schemaName) { return String.join("/", getSchemaDataPath(databaseName, schemaName), TABLES_NODE); }
@Test void assertGetTablesPath() { assertThat(ShardingSphereDataNode.getTablesPath("db_name", "db_schema"), is("/statistics/databases/db_name/schemas/db_schema/tables")); }
@Override public void onWorkflowFinalized(Workflow workflow) { WorkflowSummary summary = StepHelper.retrieveWorkflowSummary(objectMapper, workflow.getInput()); WorkflowRuntimeSummary runtimeSummary = retrieveWorkflowRuntimeSummary(workflow); String reason = workflow.getReasonForIncompletion(); LOG.info( "Workflow {} with execution_id [{}] is finalized with internal state [{}] and reason [{}]", summary.getIdentity(), workflow.getWorkflowId(), workflow.getStatus(), reason); metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "onWorkflowFinalized", MetricConstants.STATUS_TAG, workflow.getStatus().name()); if (reason != null && workflow.getStatus() == Workflow.WorkflowStatus.FAILED && reason.startsWith(MaestroStartTask.DEDUP_FAILURE_PREFIX)) { LOG.info( "Workflow {} with execution_id [{}] has not actually started, thus skip onWorkflowFinalized.", summary.getIdentity(), workflow.getWorkflowId()); return; // special case doing nothing } WorkflowInstance.Status instanceStatus = instanceDao.getWorkflowInstanceStatus( summary.getWorkflowId(), summary.getWorkflowInstanceId(), summary.getWorkflowRunId()); if (instanceStatus == null || (instanceStatus.isTerminal() && workflow.getStatus().isTerminal())) { LOG.info( "Workflow {} with execution_id [{}] does not exist or already " + "in a terminal state [{}] with internal state [{}], thus skip onWorkflowFinalized.", summary.getIdentity(), workflow.getWorkflowId(), instanceStatus, workflow.getStatus()); return; } Map<String, Task> realTaskMap = TaskHelper.getUserDefinedRealTaskMap(workflow); // cancel internally failed tasks realTaskMap.values().stream() .filter(task -> !StepHelper.retrieveStepStatus(task.getOutputData()).isTerminal()) .forEach(task -> maestroTask.cancel(workflow, task, null)); WorkflowRuntimeOverview overview = TaskHelper.computeOverview( objectMapper, summary, runtimeSummary.getRollupBase(), realTaskMap); try { validateAndUpdateOverview(overview, summary); switch (workflow.getStatus()) { case TERMINATED: // stopped due to stop request if (reason != null && reason.startsWith(FAILURE_REASON_PREFIX)) { update(workflow, WorkflowInstance.Status.FAILED, summary, overview); } else { update(workflow, WorkflowInstance.Status.STOPPED, summary, overview); } break; case TIMED_OUT: update(workflow, WorkflowInstance.Status.TIMED_OUT, summary, overview); break; default: // other status (FAILED, COMPLETED, PAUSED, RUNNING) to be handled here. Optional<Task.Status> done = TaskHelper.checkProgress(realTaskMap, summary, overview, true); switch (done.orElse(Task.Status.IN_PROGRESS)) { /** * This is a special status to indicate that the workflow has succeeded. Check {@link * TaskHelper#checkProgress} for more details. */ case FAILED_WITH_TERMINAL_ERROR: WorkflowInstance.Status nextStatus = AggregatedViewHelper.deriveAggregatedStatus( instanceDao, summary, WorkflowInstance.Status.SUCCEEDED, overview); if (!nextStatus.isTerminal()) { throw new MaestroInternalError( "Invalid status: [%s], expecting a terminal one", nextStatus); } update(workflow, nextStatus, summary, overview); break; case FAILED: case CANCELED: // due to step failure update(workflow, WorkflowInstance.Status.FAILED, summary, overview); break; case TIMED_OUT: update(workflow, WorkflowInstance.Status.TIMED_OUT, summary, overview); break; // all other status are invalid default: metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "invalidStatusOnWorkflowFinalized"); throw new MaestroInternalError( "Invalid status [%s] onWorkflowFinalized", workflow.getStatus()); } break; } } catch (MaestroInternalError | IllegalArgumentException e) { // non-retryable error and still fail the instance LOG.warn("onWorkflowFinalized is failed with a non-retryable error", e); metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "nonRetryableErrorOnWorkflowFinalized"); update( workflow, WorkflowInstance.Status.FAILED, summary, overview, Details.create( e.getMessage(), "onWorkflowFinalized is failed with non-retryable error.")); } }
@Test public void testNonTerminalStatusOnWorkflowFinalizedError() { StepRuntimeState state = new StepRuntimeState(); state.setStatus(StepInstance.Status.RUNNING); when(stepInstanceDao.getAllStepStates(any(), anyLong(), anyLong())) .thenReturn(singletonMap("foo", state)); when(workflow.getStatus()).thenReturn(Workflow.WorkflowStatus.TERMINATED); when(instanceDao.getWorkflowInstanceStatus(eq("test-workflow-id"), anyLong(), anyLong())) .thenReturn(WorkflowInstance.Status.IN_PROGRESS); AssertHelper.assertThrows( "Retry nonterminal status", MaestroRetryableError.class, "step status is invalid and will retry termination", () -> statusListener.onWorkflowFinalized(workflow)); Assert.assertEquals( 1L, metricRepo .getCounter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, MaestroWorkflowStatusListener.class, "type", "nonTerminalStatusOnWorkflowFinalized") .count()); }
public Clock clock() { return new Clock() { @Override public long currentTimeMicroseconds() { return System.currentTimeMillis() * 1000; } @Override public String toString() { return "System.currentTimeMillis()"; } }; }
@Test void clock_hasNiceToString_jre7() { Platform platform = new Platform.Jre7(); assertThat(platform.clock()).hasToString("System.currentTimeMillis()"); }
@Override public Checksum compute(final InputStream in, final TransferStatus status) throws BackgroundException { return new Checksum(HashAlgorithm.md5, this.digest("MD5", this.normalize(in, status), status)); }
@Test public void testComputeEmptyString() throws Exception { assertEquals("d41d8cd98f00b204e9800998ecf8427e", new MD5ChecksumCompute().compute(IOUtils.toInputStream("", Charset.defaultCharset()), new TransferStatus()).hash); assertEquals("d41d8cd98f00b204e9800998ecf8427e", new MD5ChecksumCompute().compute(new NullInputStream(0L), new TransferStatus().withLength(0)).hash); }
@Override public boolean remove(Object objectToRemove) { return remove(objectToRemove, objectToRemove.hashCode()); }
@Test(expected = NullPointerException.class) public void testRemoveNull() { final OAHashSet<Integer> set = new OAHashSet<>(8); set.remove(null); }
@Override public CloseableIterator<ScannerReport.Measure> readComponentMeasures(int componentRef) { ensureInitialized(); return delegate.readComponentMeasures(componentRef); }
@Test public void verify_readComponentMeasures_returns_measures() { writer.appendComponentMeasure(COMPONENT_REF, MEASURE); try (CloseableIterator<ScannerReport.Measure> measures = underTest.readComponentMeasures(COMPONENT_REF)) { assertThat(measures.next()).isEqualTo(MEASURE); assertThat(measures.hasNext()).isFalse(); } }
public synchronized <KIn, VIn> Topology addReadOnlyStateStore(final StoreBuilder<?> storeBuilder, final String sourceName, final TimestampExtractor timestampExtractor, final Deserializer<KIn> keyDeserializer, final Deserializer<VIn> valueDeserializer, final String topic, final String processorName, final ProcessorSupplier<KIn, VIn, Void, Void> stateUpdateSupplier) { storeBuilder.withLoggingDisabled(); internalTopologyBuilder.addSource(AutoOffsetReset.EARLIEST, sourceName, timestampExtractor, keyDeserializer, valueDeserializer, topic); internalTopologyBuilder.addProcessor(processorName, stateUpdateSupplier, sourceName); internalTopologyBuilder.addStateStore(storeBuilder, processorName); internalTopologyBuilder.connectSourceStoreAndTopic(storeBuilder.name(), topic); return this; }
@Test public void readOnlyStateStoresShouldNotLog() { final String sourceName = "source"; final String storeName = "store"; final String topicName = "topic"; final String processorName = "processor"; final KeyValueStoreBuilder<?, ?> storeBuilder = mock(KeyValueStoreBuilder.class); when(storeBuilder.name()).thenReturn(storeName); topology.addReadOnlyStateStore( storeBuilder, sourceName, null, null, null, topicName, processorName, new MockProcessorSupplier<>()); final StoreFactory stateStoreFactory = topology.internalTopologyBuilder.stateStores().get(storeName); assertThat(stateStoreFactory.loggingEnabled(), equalTo(false)); }
public static void verifyGroupId(final String groupId) { if (StringUtils.isBlank(groupId)) { throw new IllegalArgumentException("Blank groupId"); } if (!GROUP_ID_PATTER.matcher(groupId).matches()) { throw new IllegalArgumentException( "Invalid group id, it should be started with character 'a'-'z' or 'A'-'Z'," + " and followed with numbers, english alphabet, '-' or '_'. "); } }
@Test(expected = IllegalArgumentException.class) public void tetsVerifyGroupId4() { Utils.verifyGroupId("*test"); }
protected org.graylog2.plugin.indexer.searches.timeranges.TimeRange restrictTimeRange(final org.graylog2.plugin.indexer.searches.timeranges.TimeRange timeRange) { final DateTime originalFrom = timeRange.getFrom(); final DateTime to = timeRange.getTo(); final DateTime from; final SearchesClusterConfig config = clusterConfigService.get(SearchesClusterConfig.class); if (config == null || Period.ZERO.equals(config.queryTimeRangeLimit())) { from = originalFrom; } else { final DateTime limitedFrom = to.minus(config.queryTimeRangeLimit()); from = limitedFrom.isAfter(originalFrom) ? limitedFrom : originalFrom; } return AbsoluteRange.create(from, to); }
@Test public void restrictTimeRangeReturnsGivenTimeRangeWithinLimit() { when(clusterConfigService.get(SearchesClusterConfig.class)).thenReturn(SearchesClusterConfig.createDefault() .toBuilder() .queryTimeRangeLimit(queryLimitPeriod) .build()); final DateTime from = new DateTime(2015, 1, 15, 12, 0, DateTimeZone.UTC); final DateTime to = from.plusHours(1); final TimeRange timeRange = AbsoluteRange.create(from, to); final TimeRange restrictedTimeRange = searchResource.restrictTimeRange(timeRange); assertThat(restrictedTimeRange).isNotNull(); assertThat(restrictedTimeRange.getFrom()).isEqualTo(from); assertThat(restrictedTimeRange.getTo()).isEqualTo(to); }
@Override @MethodNotAvailable public void loadAll(boolean replaceExistingValues) { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testLoadAllWithListener() { adapter.loadAll(Collections.emptySet(), true, null); }
@SuppressWarnings("unchecked") @VisibleForTesting Schema<T> initializeSchema() throws ClassNotFoundException { if (StringUtils.isEmpty(this.pulsarSinkConfig.getTypeClassName())) { return (Schema<T>) Schema.BYTES; } Class<?> typeArg = Reflections.loadClass(this.pulsarSinkConfig.getTypeClassName(), functionClassLoader); if (Void.class.equals(typeArg)) { // return type is 'void', so there's no schema to check return null; } ConsumerConfig consumerConfig = new ConsumerConfig(); consumerConfig.setSchemaProperties(pulsarSinkConfig.getSchemaProperties()); if (!StringUtils.isEmpty(pulsarSinkConfig.getSchemaType())) { if (GenericRecord.class.isAssignableFrom(typeArg)) { consumerConfig.setSchemaType(SchemaType.AUTO_CONSUME.toString()); SchemaType configuredSchemaType = SchemaType.valueOf(pulsarSinkConfig.getSchemaType()); if (SchemaType.AUTO_CONSUME != configuredSchemaType) { log.info("The configured schema type {} is not able to write GenericRecords." + " So overwrite the schema type to be {}", configuredSchemaType, SchemaType.AUTO_CONSUME); } } else { consumerConfig.setSchemaType(pulsarSinkConfig.getSchemaType()); } return (Schema<T>) topicSchema.getSchema(pulsarSinkConfig.getTopic(), typeArg, consumerConfig, false); } else { consumerConfig.setSchemaType(pulsarSinkConfig.getSerdeClassName()); return (Schema<T>) topicSchema.getSchema(pulsarSinkConfig.getTopic(), typeArg, consumerConfig, false, functionClassLoader); } }
@Test public void testExplicitDefaultSerDe() throws PulsarClientException { PulsarSinkConfig pulsarConfig = getPulsarConfigs(); // set type to void pulsarConfig.setTypeClassName(String.class.getName()); pulsarConfig.setSerdeClassName(TopicSchema.DEFAULT_SERDE); PulsarSink pulsarSink = new PulsarSink(getPulsarClient(), pulsarConfig, new HashMap<>(), mock(ComponentStatsManager.class), Thread.currentThread().getContextClassLoader(), producerCache); try { pulsarSink.initializeSchema(); } catch (Exception ex) { ex.printStackTrace(); fail(); } }
private static Map<String, Object> flatten(Map<String, Object> map) { return map.entrySet().stream().flatMap(JsonUtils::flatten) .collect(LinkedHashMap::new, (m, e) -> m.put("/" + e.getKey(), e.getValue()), LinkedHashMap::putAll); }
@Test public void testFlatten() throws IOException { JsonIndexConfig jsonIndexConfig = new JsonIndexConfig(); { JsonNode jsonNode = JsonUtils.stringToJsonNode("null"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertTrue(flattenedRecords.isEmpty()); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("123"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 1); assertEquals(flattenedRecords.get(0), Collections.singletonMap("", "123")); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("[]"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertTrue(flattenedRecords.isEmpty()); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("[1,2,3]"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 3); Map<String, String> flattenedRecord0 = flattenedRecords.get(0); assertEquals(flattenedRecord0.size(), 2); assertEquals(flattenedRecord0.get(".$index"), "0"); assertEquals(flattenedRecord0.get("."), "1"); Map<String, String> flattenedRecord1 = flattenedRecords.get(1); assertEquals(flattenedRecord1.size(), 2); assertEquals(flattenedRecord1.get(".$index"), "1"); assertEquals(flattenedRecord1.get("."), "2"); Map<String, String> flattenedRecord2 = flattenedRecords.get(2); assertEquals(flattenedRecord2.size(), 2); assertEquals(flattenedRecord2.get(".$index"), "2"); assertEquals(flattenedRecord2.get("."), "3"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("[1,[2,3],[4,[5,6]]]]"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 6); Map<String, String> flattenedRecord0 = flattenedRecords.get(0); assertEquals(flattenedRecord0.size(), 2); assertEquals(flattenedRecord0.get(".$index"), "0"); assertEquals(flattenedRecord0.get("."), "1"); Map<String, String> flattenedRecord1 = flattenedRecords.get(1); assertEquals(flattenedRecord1.size(), 3); assertEquals(flattenedRecord1.get(".$index"), "1"); assertEquals(flattenedRecord1.get("..$index"), "0"); assertEquals(flattenedRecord1.get(".."), "2"); Map<String, String> flattenedRecord2 = flattenedRecords.get(2); assertEquals(flattenedRecord2.size(), 3); assertEquals(flattenedRecord2.get(".$index"), "1"); assertEquals(flattenedRecord2.get("..$index"), "1"); assertEquals(flattenedRecord2.get(".."), "3"); Map<String, String> flattenedRecord3 = flattenedRecords.get(3); assertEquals(flattenedRecord3.size(), 3); assertEquals(flattenedRecord3.get(".$index"), "2"); assertEquals(flattenedRecord3.get("..$index"), "0"); assertEquals(flattenedRecord3.get(".."), "4"); Map<String, String> flattenedRecord4 = flattenedRecords.get(4); assertEquals(flattenedRecord4.size(), 4); assertEquals(flattenedRecord4.get(".$index"), "2"); assertEquals(flattenedRecord4.get("..$index"), "1"); assertEquals(flattenedRecord4.get("...$index"), "0"); assertEquals(flattenedRecord4.get("..."), "5"); Map<String, String> flattenedRecord5 = flattenedRecords.get(5); assertEquals(flattenedRecord5.size(), 4); assertEquals(flattenedRecord5.get(".$index"), "2"); assertEquals(flattenedRecord5.get("..$index"), "1"); assertEquals(flattenedRecord5.get("...$index"), "1"); assertEquals(flattenedRecord5.get("..."), "6"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("{}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertTrue(flattenedRecords.isEmpty()); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("{\"key\":{}}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertTrue(flattenedRecords.isEmpty()); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("[{},{},{}]"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertTrue(flattenedRecords.isEmpty()); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("{\"key\":[]}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertTrue(flattenedRecords.isEmpty()); } { JsonNode jsonNode = JsonUtils.stringToJsonNode("{\"name\":\"adam\",\"age\":20}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 1); Map<String, String> flattenedRecord = flattenedRecords.get(0); assertEquals(flattenedRecord.size(), 2); assertEquals(flattenedRecord.get(".name"), "adam"); assertEquals(flattenedRecord.get(".age"), "20"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode( "[{\"country\":\"us\",\"street\":\"main st\",\"number\":1},{\"country\":\"ca\",\"street\":\"second st\"," + "\"number\":2}]"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 2); for (Map<String, String> flattenedRecord : flattenedRecords) { assertEquals(flattenedRecord.size(), 4); assertTrue(flattenedRecord.containsKey(".$index")); assertTrue(flattenedRecord.containsKey("..country")); assertTrue(flattenedRecord.containsKey("..street")); assertTrue(flattenedRecord.containsKey("..number")); } Map<String, String> flattenedRecord0 = flattenedRecords.get(0); assertEquals(flattenedRecord0.get(".$index"), "0"); assertEquals(flattenedRecord0.get("..country"), "us"); assertEquals(flattenedRecord0.get("..street"), "main st"); assertEquals(flattenedRecord0.get("..number"), "1"); Map<String, String> flattenedRecord1 = flattenedRecords.get(1); assertEquals(flattenedRecord1.get(".$index"), "1"); assertEquals(flattenedRecord1.get("..country"), "ca"); assertEquals(flattenedRecord1.get("..street"), "second st"); assertEquals(flattenedRecord1.get("..number"), "2"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode( "{\"name\":\"adam\",\"addresses\":[{\"country\":\"us\",\"street\":\"main st\",\"number\":1}," + "{\"country\":\"ca\",\"street\":\"second st\",\"number\":2}]}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 2); for (Map<String, String> flattenedRecord : flattenedRecords) { assertEquals(flattenedRecord.size(), 5); assertEquals(flattenedRecord.get(".name"), "adam"); assertTrue(flattenedRecord.containsKey(".addresses.$index")); assertTrue(flattenedRecord.containsKey(".addresses..country")); assertTrue(flattenedRecord.containsKey(".addresses..street")); assertTrue(flattenedRecord.containsKey(".addresses..number")); } Map<String, String> flattenedRecord0 = flattenedRecords.get(0); assertEquals(flattenedRecord0.get(".addresses.$index"), "0"); assertEquals(flattenedRecord0.get(".addresses..country"), "us"); assertEquals(flattenedRecord0.get(".addresses..street"), "main st"); assertEquals(flattenedRecord0.get(".addresses..number"), "1"); Map<String, String> flattenedRecord1 = flattenedRecords.get(1); assertEquals(flattenedRecord1.get(".addresses.$index"), "1"); assertEquals(flattenedRecord1.get(".addresses..country"), "ca"); assertEquals(flattenedRecord1.get(".addresses..street"), "second st"); assertEquals(flattenedRecord1.get(".addresses..number"), "2"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode( "{\"name\":\"adam\",\"age\":20,\"addresses\":[{\"country\":\"us\",\"street\":\"main st\",\"number\":1}," + "{\"country\":\"ca\",\"street\":\"second st\",\"number\":2}],\"skills\":[\"english\"," + "\"programming\"]}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 4); for (Map<String, String> flattenedRecord : flattenedRecords) { assertEquals(flattenedRecord.size(), 8); assertEquals(flattenedRecord.get(".name"), "adam"); assertEquals(flattenedRecord.get(".age"), "20"); assertTrue(flattenedRecord.containsKey(".addresses.$index")); assertTrue(flattenedRecord.containsKey(".addresses..country")); assertTrue(flattenedRecord.containsKey(".addresses..street")); assertTrue(flattenedRecord.containsKey(".addresses..number")); assertTrue(flattenedRecord.containsKey(".skills.$index")); assertTrue(flattenedRecord.containsKey(".skills.")); } Map<String, String> flattenedRecord0 = flattenedRecords.get(0); assertEquals(flattenedRecord0.get(".addresses.$index"), "0"); assertEquals(flattenedRecord0.get(".addresses..country"), "us"); assertEquals(flattenedRecord0.get(".addresses..street"), "main st"); assertEquals(flattenedRecord0.get(".addresses..number"), "1"); assertEquals(flattenedRecord0.get(".skills.$index"), "0"); assertEquals(flattenedRecord0.get(".skills."), "english"); Map<String, String> flattenedRecord3 = flattenedRecords.get(3); assertEquals(flattenedRecord3.get(".addresses.$index"), "1"); assertEquals(flattenedRecord3.get(".addresses..country"), "ca"); assertEquals(flattenedRecord3.get(".addresses..street"), "second st"); assertEquals(flattenedRecord3.get(".addresses..number"), "2"); assertEquals(flattenedRecord3.get(".skills.$index"), "1"); assertEquals(flattenedRecord3.get(".skills."), "programming"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode( "{\"name\":\"bob\",\"age\":null,\"addresses\":[{\"country\":\"us\",\"street\":\"main st\"}],\"skills\":[]," + "\"hobbies\":[null]}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 1); Map<String, String> flattenedRecord = flattenedRecords.get(0); assertEquals(flattenedRecord.size(), 4); assertEquals(flattenedRecord.get(".name"), "bob"); assertEquals(flattenedRecord.get(".addresses.$index"), "0"); assertEquals(flattenedRecord.get(".addresses..country"), "us"); assertEquals(flattenedRecord.get(".addresses..street"), "main st"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode( "{\"name\":\"bob\",\"age\":null,\"addresses\":[{\"country\":\"us\",\"street\":\"main st\"}],\"skills\":[]," + "\"hobbies\":[null," + "\"football\"]}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 1); Map<String, String> flattenedRecord = flattenedRecords.get(0); assertEquals(flattenedRecord.size(), 6); assertEquals(flattenedRecord.get(".name"), "bob"); assertEquals(flattenedRecord.get(".addresses.$index"), "0"); assertEquals(flattenedRecord.get(".addresses..country"), "us"); assertEquals(flattenedRecord.get(".addresses..street"), "main st"); assertEquals(flattenedRecord.get(".hobbies.$index"), "1"); assertEquals(flattenedRecord.get(".hobbies."), "football"); } { JsonNode jsonNode = JsonUtils.stringToJsonNode( "{\"name\":\"charles\",\"addresses\":[{\"country\":\"us\",\"street\":\"main st\",\"types\":[\"home\"," + "\"office\"]}," + "{\"country\":\"ca\",\"street\":\"second st\"}]}"); List<Map<String, String>> flattenedRecords = JsonUtils.flatten(jsonNode, jsonIndexConfig); assertEquals(flattenedRecords.size(), 3); Map<String, String> flattenedRecord0 = flattenedRecords.get(0); assertEquals(flattenedRecord0.size(), 6); assertEquals(flattenedRecord0.get(".name"), "charles"); assertEquals(flattenedRecord0.get(".addresses.$index"), "0"); assertEquals(flattenedRecord0.get(".addresses..country"), "us"); assertEquals(flattenedRecord0.get(".addresses..street"), "main st"); assertEquals(flattenedRecord0.get(".addresses..types.$index"), "0"); assertEquals(flattenedRecord0.get(".addresses..types."), "home"); Map<String, String> flattenedRecord1 = flattenedRecords.get(1); assertEquals(flattenedRecord1.size(), 6); assertEquals(flattenedRecord1.get(".name"), "charles"); assertEquals(flattenedRecord1.get(".addresses.$index"), "0"); assertEquals(flattenedRecord1.get(".addresses..country"), "us"); assertEquals(flattenedRecord1.get(".addresses..street"), "main st"); assertEquals(flattenedRecord1.get(".addresses..types.$index"), "1"); assertEquals(flattenedRecord1.get(".addresses..types."), "office"); Map<String, String> flattenedRecord2 = flattenedRecords.get(2); assertEquals(flattenedRecord2.size(), 4); assertEquals(flattenedRecord2.get(".name"), "charles"); assertEquals(flattenedRecord2.get(".addresses.$index"), "1"); assertEquals(flattenedRecord2.get(".addresses..country"), "ca"); assertEquals(flattenedRecord2.get(".addresses..street"), "second st"); } }
@Delete(uri = "{namespace}/{id}") @ExecuteOn(TaskExecutors.IO) @Operation(tags = {"Flows"}, summary = "Delete a flow") @ApiResponse(responseCode = "204", description = "On success") public HttpResponse<Void> delete( @Parameter(description = "The flow namespace") @PathVariable String namespace, @Parameter(description = "The flow id") @PathVariable String id ) { Optional<Flow> flow = flowRepository.findById(tenantService.resolveTenant(), namespace, id); if (flow.isPresent()) { flowRepository.delete(flow.get()); return HttpResponse.status(HttpStatus.NO_CONTENT); } else { return HttpResponse.status(HttpStatus.NOT_FOUND); } }
@Test void deleteFlowsByQuery(){ postFlow("flow-a","io.kestra.tests.delete", "a"); postFlow("flow-b","io.kestra.tests.delete", "b"); postFlow("flow-c","io.kestra.tests.delete", "c"); List<IdWithNamespace> ids = List.of( new IdWithNamespace("io.kestra.tests.delete", "flow-a"), new IdWithNamespace("io.kestra.tests.delete", "flow-b"), new IdWithNamespace("io.kestra.tests.delete", "flow-c") ); HttpResponse<BulkResponse> response = client .toBlocking() .exchange(DELETE("/api/v1/flows/delete/by-ids", ids), BulkResponse.class); assertThat(response.getBody().get().getCount(), is(3)); HttpClientResponseException flowA = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve(HttpRequest.GET("/api/v1/flows/io.kestra.unittest.disabled/flow-a")); }); HttpClientResponseException flowB = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve(HttpRequest.GET("/api/v1/flows/io.kestra.unittest.disabled/flow-b")); }); HttpClientResponseException flowC = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve(HttpRequest.GET("/api/v1/flows/io.kestra.unittest.disabled/flow-c")); }); assertThat(flowA.getStatus(), is(HttpStatus.NOT_FOUND)); assertThat(flowB.getStatus(), is(HttpStatus.NOT_FOUND)); assertThat(flowC.getStatus(), is(HttpStatus.NOT_FOUND)); }
public static Map<String, String> deserialize2Map(String jsonStr) { try { if (StringUtils.hasText(jsonStr)) { Map<String, Object> temp = OM.readValue(jsonStr, Map.class); Map<String, String> result = new HashMap<>(); temp.forEach((key, value) -> { result.put(String.valueOf(key), String.valueOf(value)); }); return result; } return new HashMap<>(); } catch (JsonProcessingException e) { LOG.error( "Json to map failed. check if the format of the json string[{}] is correct.", jsonStr, e); throw new RuntimeException("Json to map failed.", e); } }
@Test public void testDeserializeBlankIntoEmptyMap() { Map<String, String> map = JacksonUtils.deserialize2Map(""); assertThat(map).isNotNull(); assertThat(map).isEmpty(); }
public LineString extractPart(double startDistance, double endDistance) { LineString result = new LineString(); for (int i = 0; i < this.segments.size(); startDistance -= this.segments.get(i).length(), endDistance -= this.segments.get(i).length(), i++) { LineSegment segment = this.segments.get(i); // Skip first segments that we don't need double length = segment.length(); if (length < startDistance) { continue; } Point startPoint = null, endPoint = null; if (startDistance >= 0) { // This will be our starting point startPoint = segment.pointAlongLineSegment(startDistance); } if (endDistance < length) { // this will be our ending point endPoint = segment.pointAlongLineSegment(endDistance); } if (startPoint != null && endPoint == null) { // This ist the starting segment, end will come in a later segment result.segments.add(new LineSegment(startPoint, segment.end)); } else if (startPoint == null && endPoint == null) { // Center segment between start and end segment, add completely result.segments.add(segment); } else if (startPoint == null && endPoint != null) { // End segment, start was in earlier segment result.segments.add(new LineSegment(segment.start, endPoint)); } else if (startPoint != null && endPoint != null) { // Start and end on same segment result.segments.add(new LineSegment(startPoint, endPoint)); } if (endPoint != null) break; } return result; }
@Test public void extractPartTest() { Point point1 = new Point(0, 0); Point point2 = new Point(1, 0); Point point3 = new Point(1, 1); LineString lineString = new LineString(); lineString.segments.add(new LineSegment(point1, point2)); lineString.segments.add(new LineSegment(point2, point3)); LineString extract1 = new LineString(); extract1.segments.add(new LineSegment(new Point(0, 0), new Point(0.5, 0))); LineString extract2 = new LineString(); extract2.segments.add(new LineSegment(new Point(0.5, 0), new Point(1, 0))); extract2.segments.add(new LineSegment(new Point(1, 0), new Point(1, 0.5))); LineString extract3 = new LineString(); extract3.segments.add(new LineSegment(new Point(1, 0.5), new Point(1, 1))); Assert.assertEquals(extract1, lineString.extractPart(0, 0.5)); Assert.assertEquals(extract2, lineString.extractPart(0.5, 1.5)); Assert.assertEquals(extract3, lineString.extractPart(1.5, 2)); }
public boolean submitProcessingErrors(Message message) { return submitProcessingErrorsInternal(message, message.processingErrors()); }
@Test public void submitProcessingErrors_nothingSubmittedAndMessageNotFilteredOut_ifSubmissionDisabledAndDuplicatesAreKept() throws Exception { // given final Message msg = Mockito.mock(Message.class); when(msg.getMessageId()).thenReturn("msg-x"); when(msg.supportsFailureHandling()).thenReturn(true); when(msg.processingErrors()).thenReturn(List.of( new Message.ProcessingError(() -> "Cause 1", "Message 1", "Details 1"), new Message.ProcessingError(() -> "Cause 2", "Message 2", "Details 2") )); when(failureHandlingConfiguration.submitProcessingFailures()).thenReturn(false); when(failureHandlingConfiguration.keepFailedMessageDuplicate()).thenReturn(true); // when final boolean notFilterOut = underTest.submitProcessingErrors(msg); // then assertThat(notFilterOut).isTrue(); verifyNoInteractions(failureSubmissionQueue); }
public static Key of(String key, ApplicationId appId) { return new StringKey(key, appId); }
@Test public void longKeyCompare() { Key longKey1 = Key.of(LONG_KEY_1, NetTestTools.APP_ID); Key copyOfLongKey1 = Key.of(LONG_KEY_1, NetTestTools.APP_ID); Key longKey2 = Key.of(LONG_KEY_2, NetTestTools.APP_ID); Key copyOfLongKey2 = Key.of(LONG_KEY_2, NetTestTools.APP_ID); Key longKey3 = Key.of(LONG_KEY_3, NetTestTools.APP_ID); Key copyOfLongKey3 = Key.of(LONG_KEY_3, NetTestTools.APP_ID); assertThat(longKey1, comparesEqualTo(copyOfLongKey1)); assertThat(longKey1, lessThan(longKey2)); assertThat(longKey1, lessThan(longKey3)); assertThat(longKey2, greaterThan(longKey1)); assertThat(longKey2, comparesEqualTo(copyOfLongKey2)); assertThat(longKey2, lessThan(longKey3)); assertThat(longKey3, greaterThan(longKey1)); assertThat(longKey3, greaterThan(longKey2)); assertThat(longKey3, comparesEqualTo(copyOfLongKey3)); }
public static byte[] compress(String urlString) throws MalformedURLException { byte[] compressedBytes = null; if (urlString != null) { // Figure the compressed bytes can't be longer than the original string. byte[] byteBuffer = new byte[urlString.length()]; int byteBufferIndex = 0; Arrays.fill(byteBuffer, (byte) 0x00); Pattern urlPattern = Pattern.compile(EDDYSTONE_URL_REGEX); Matcher urlMatcher = urlPattern.matcher(urlString); if (urlMatcher.matches()) { // www. String wwwdot = urlMatcher.group(EDDYSTONE_URL_WWW_GROUP); boolean haswww = (wwwdot != null); // Protocol. String rawProtocol = urlMatcher.group(EDDYSTONE_URL_PROTOCOL_GROUP); String protocol = rawProtocol.toLowerCase(); if (protocol.equalsIgnoreCase(URL_PROTOCOL_HTTP)) { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTP_WWW : EDDYSTONE_URL_PROTOCOL_HTTP); } else { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTPS_WWW : EDDYSTONE_URL_PROTOCOL_HTTPS); } byteBufferIndex++; // Fully-qualified domain name (FQDN). This includes the hostname and any other components after the dots // but BEFORE the first single slash in the URL. byte[] hostnameBytes = urlMatcher.group(EDDYSTONE_URL_FQDN_GROUP).getBytes(); String rawHostname = new String(hostnameBytes); String hostname = rawHostname.toLowerCase(); String[] domains = hostname.split(Pattern.quote(".")); boolean consumedSlash = false; if (domains != null) { // Write the hostname/subdomains prior to the last one. If there's only one (e. g. http://localhost) // then that's the only thing to write out. byte[] periodBytes = {'.'}; int writableDomainsCount = (domains.length == 1 ? 1 : domains.length - 1); for (int domainIndex = 0; domainIndex < writableDomainsCount; domainIndex++) { // Write out leading period, if necessary. if (domainIndex > 0) { System.arraycopy(periodBytes, 0, byteBuffer, byteBufferIndex, periodBytes.length); byteBufferIndex += periodBytes.length; } byte[] domainBytes = domains[domainIndex].getBytes(); int domainLength = domainBytes.length; System.arraycopy(domainBytes, 0, byteBuffer, byteBufferIndex, domainLength); byteBufferIndex += domainLength; } // Is the TLD one that we can encode? if (domains.length > 1) { String tld = "." + domains[domains.length - 1]; String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); String encodableTLDCandidate = (slash == null ? tld : tld + slash); byte encodedTLDByte = encodedByteForTopLevelDomain(encodableTLDCandidate); if (encodedTLDByte != TLD_NOT_ENCODABLE) { byteBuffer[byteBufferIndex++] = encodedTLDByte; consumedSlash = (slash != null); } else { byte[] tldBytes = tld.getBytes(); int tldLength = tldBytes.length; System.arraycopy(tldBytes, 0, byteBuffer, byteBufferIndex, tldLength); byteBufferIndex += tldLength; } } } // Optional slash. if (! consumedSlash) { String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); if (slash != null) { int slashLength = slash.length(); System.arraycopy(slash.getBytes(), 0, byteBuffer, byteBufferIndex, slashLength); byteBufferIndex += slashLength; } } // Path. String path = urlMatcher.group(EDDYSTONE_URL_PATH_GROUP); if (path != null) { int pathLength = path.length(); System.arraycopy(path.getBytes(), 0, byteBuffer, byteBufferIndex, pathLength); byteBufferIndex += pathLength; } // Copy the result. compressedBytes = new byte[byteBufferIndex]; System.arraycopy(byteBuffer, 0, compressedBytes, 0, compressedBytes.length); } else { throw new MalformedURLException(); } } else { throw new MalformedURLException(); } return compressedBytes; }
@Test public void testCompressWithDotCoTLD() throws MalformedURLException { String testURL = "http://google.co"; byte[] expectedBytes = {0x02, 'g', 'o', 'o', 'g', 'l', 'e', '.', 'c', 'o'}; String hexBytes = bytesToHex(UrlBeaconUrlCompressor.compress(testURL)); assertTrue(Arrays.equals(expectedBytes, UrlBeaconUrlCompressor.compress(testURL))); }
@Override public ConfigDef config() { return CONFIG_DEF; }
@Test public void testConfig() { TopicNameMatches<SourceRecord> predicate = new TopicNameMatches<>(); predicate.config().validate(Collections.singletonMap("pattern", "my-prefix-.*")); List<ConfigValue> configs = predicate.config().validate(Collections.singletonMap("pattern", "*")); List<String> errorMsgs = configs.get(0).errorMessages(); assertEquals(1, errorMsgs.size()); assertTrue(errorMsgs.get(0).contains("Invalid regex")); }
@Override public String service() { // MethodDescriptor.getServiceName() is not in our floor version: gRPC 1.2 return GrpcParser.service(methodDescriptor.getFullMethodName()); }
@Test void service() { assertThat(request.service()).isEqualTo("helloworld.Greeter"); }
@VisibleForTesting public ProcessContinuation run( RestrictionTracker<OffsetRange, Long> tracker, OutputReceiver<PartitionRecord> receiver, ManualWatermarkEstimator<Instant> watermarkEstimator, InitialPipelineState initialPipelineState) throws Exception { LOG.debug("DNP: Watermark: " + watermarkEstimator.getState()); LOG.debug("DNP: CurrentTracker: " + tracker.currentRestriction().getFrom()); if (tracker.currentRestriction().getFrom() == 0L) { if (!tracker.tryClaim(0L)) { LOG.error( "Could not claim initial DetectNewPartition restriction. No partitions are outputted."); return ProcessContinuation.stop(); } watermarkEstimator.setWatermark(initialPipelineState.getStartTime()); if (initialPipelineState.isResume()) { resumeFromPreviousPipelineAction.run(receiver); } else { generateInitialPartitionsAction.run(receiver, initialPipelineState.getStartTime()); } return ProcessContinuation.resume(); } // Create a new partition reconciler every run to reset the state each time. partitionReconciler = new PartitionReconciler(metadataTableDao, metrics); orphanedMetadataCleaner = new OrphanedMetadataCleaner(); // Calculating the new value of watermark is a resource intensive process. We have to do a full // scan of the metadata table and then ensure we're not missing partitions and then calculate // the low watermark. This is usually a fairly fast process even with thousands of partitions. // However, sometimes this may take so long that the runner checkpoints before the watermark is // calculated. Because the checkpoint takes place before tryClaim, this forces the DoFn to // restart, wasting the resources spent calculating the watermark. On restart, we will try to // calculate the watermark again. The problem causing the slow watermark calculation can persist // leading to a crash loop. In order to ensure we persist the calculated watermark, we calculate // the watermark after successful tryClaim. Then we write to the metadata table the new // watermark. On the start of each run we read the watermark and update the DoFn's watermark. DetectNewPartitionsState detectNewPartitionsState = metadataTableDao.readDetectNewPartitionsState(); if (detectNewPartitionsState != null) { watermarkEstimator.setWatermark(detectNewPartitionsState.getWatermark()); } // Terminate if endTime <= watermark that means all partitions have read up to or beyond // watermark. We no longer need to manage splits and merges, we can terminate. if (endTime != null && !watermarkEstimator.currentWatermark().isBefore(endTime)) { tracker.tryClaim(tracker.currentRestriction().getTo()); return ProcessContinuation.stop(); } if (!tracker.tryClaim(tracker.currentRestriction().getFrom())) { LOG.warn("DNP: Checkpointing, stopping this run: " + tracker.currentRestriction()); return ProcessContinuation.stop(); } // Read StreamPartitions to calculate watermark. List<StreamPartitionWithWatermark> streamPartitionsWithWatermark = null; if (shouldUpdateWatermark(tracker.currentRestriction().getFrom(), detectNewPartitionsState)) { streamPartitionsWithWatermark = metadataTableDao.readStreamPartitionsWithWatermark(); } // Process NewPartitions and track the ones successfully outputted. List<NewPartition> newPartitions = metadataTableDao.readNewPartitions(); List<ByteStringRange> outputtedNewPartitions = new ArrayList<>(); for (NewPartition newPartition : newPartitions) { if (processNewPartitionsAction.processNewPartition(newPartition, receiver)) { outputtedNewPartitions.add(newPartition.getPartition()); } else if (streamPartitionsWithWatermark != null) { // streamPartitionsWithWatermark is not null on runs that we update watermark. We only run // reconciliation when we update watermark. Only add incompleteNewPartitions if // reconciliation is being run partitionReconciler.addIncompleteNewPartitions(newPartition); orphanedMetadataCleaner.addIncompleteNewPartitions(newPartition); } } // Process the watermark using read StreamPartitions and NewPartitions. if (streamPartitionsWithWatermark != null) { Optional<Instant> maybeWatermark = getNewWatermark(streamPartitionsWithWatermark, newPartitions); maybeWatermark.ifPresent(metadataTableDao::updateDetectNewPartitionWatermark); // Only start reconciling after the pipeline has been running for a while. if (tracker.currentRestriction().getFrom() > 50) { // Using NewPartitions and StreamPartitions, evaluate partitions that are possibly not being // streamed. This isn't perfect because there may be partitions moving between // StreamPartitions and NewPartitions while scanning the metadata table. Also, this does not // include NewPartitions marked as deleted from a previous DNP run not yet processed by // RCSP. List<ByteStringRange> existingPartitions = streamPartitionsWithWatermark.stream() .map(StreamPartitionWithWatermark::getPartition) .collect(Collectors.toList()); existingPartitions.addAll(outputtedNewPartitions); List<ByteStringRange> missingStreamPartitions = getMissingPartitionsFromEntireKeySpace(existingPartitions); orphanedMetadataCleaner.addMissingPartitions(missingStreamPartitions); partitionReconciler.addMissingPartitions(missingStreamPartitions); processReconcilerPartitions( receiver, watermarkEstimator, initialPipelineState.getStartTime()); cleanUpOrphanedMetadata(); } } return ProcessContinuation.resume().withResumeDelay(Duration.millis(100)); }
@Test public void testDoNotUpdateWatermarkLessThan10s() throws Exception { // We update watermark every 2 iterations only if it's been more than 10s since the last update. OffsetRange offsetRange = new OffsetRange(2, Long.MAX_VALUE); when(tracker.currentRestriction()).thenReturn(offsetRange); when(tracker.tryClaim(offsetRange.getFrom())).thenReturn(true); assertEquals( DoFn.ProcessContinuation.resume().withResumeDelay(Duration.millis(100)), action.run( tracker, receiver, watermarkEstimator, new InitialPipelineState(startTime, false))); assertEquals(startTime, watermarkEstimator.currentWatermark()); assertNull(metadataTableDao.readDetectNewPartitionsState()); // Manually set the watermark of DNP to start time with a timestamp of 1s prior. RowMutation rowMutation = RowMutation.create( MetadataTableAdminDao.DEFAULT_METADATA_TABLE_NAME, metadataTableDao .getChangeStreamNamePrefix() .concat(MetadataTableAdminDao.DETECT_NEW_PARTITION_SUFFIX)) .setCell( MetadataTableAdminDao.CF_WATERMARK, MetadataTableAdminDao.QUALIFIER_DEFAULT, Instant.now().minus(Duration.standardSeconds(1)).getMillis() * 1000L, startTime.getMillis()); dataClient.mutateRow(rowMutation); // Create a partition covering the entire keyspace with watermark after endTime. ByteStringRange partition1 = ByteStringRange.create("", ""); Instant watermark1 = endTime.plus(Duration.millis(100)); PartitionRecord partitionRecord1 = new PartitionRecord( partition1, watermark1, UniqueIdGenerator.getNextId(), watermark1, Collections.emptyList(), null); metadataTableDao.lockAndRecordPartition(partitionRecord1); // Watermark doesn't get updated because the last time watermark was updated was less than 10s. assertEquals( DoFn.ProcessContinuation.resume().withResumeDelay(Duration.millis(100)), action.run( tracker, receiver, watermarkEstimator, new InitialPipelineState(startTime, false))); assertEquals(startTime, watermarkEstimator.currentWatermark()); assertEquals(startTime, metadataTableDao.readDetectNewPartitionsState().getWatermark()); // On the 2nd run, watermark estimator is still the same since watermark isn't updated. assertEquals( DoFn.ProcessContinuation.resume().withResumeDelay(Duration.millis(100)), action.run( tracker, receiver, watermarkEstimator, new InitialPipelineState(startTime, false))); assertEquals(startTime, watermarkEstimator.currentWatermark()); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeNestedMapNoOverwrite() throws JsonProcessingException { Map<String, ParamDefinition> allParams = parseParamDefMap( "{'tomergemap': {'type': 'MAP', 'source': 'SYSTEM', 'value': {'tomerge1': {'type': 'STRING','value': 'hello', 'meta': {'source': 'DEFINITION'}}}}}"); Map<String, ParamDefinition> paramsToMerge = parseParamDefMap( "{'tomergemap': {'type': 'MAP', 'value': {'tomerge2':{'type': 'STRING', 'value': 'goodbye'}}}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext); assertEquals(1, allParams.size()); MapParamDefinition tomergemap = allParams.get("tomergemap").asMapParamDef(); assertEquals("hello", tomergemap.getValue().get("tomerge1").getValue()); assertEquals( ParamSource.DEFINITION, tomergemap.getValue().get("tomerge1").asStringParamDef().getSource()); assertEquals("goodbye", tomergemap.getValue().get("tomerge2").getValue()); assertEquals( ParamSource.DEFINITION, tomergemap.getValue().get("tomerge2").asStringParamDef().getSource()); // update to definition since map was merged assertEquals(ParamSource.DEFINITION, tomergemap.getSource()); }
@SuppressWarnings("unchecked") public static <T> T[] distinct(T[] array) { if (isEmpty(array)) { return array; } final Set<T> set = new LinkedHashSet<>(array.length, 1); Collections.addAll(set, array); return toArray(set, (Class<T>) getComponentType(array)); }
@Test public void distinctTest() { String[] array = {"aa", "bb", "cc", "dd", "bb", "dd"}; String[] distinct = ArrayUtil.distinct(array); assertArrayEquals(new String[]{"aa", "bb", "cc", "dd"}, distinct); }
public static String generateRandomAlphanumericPassword(int length) { char[][] pairs = {{'a', 'z'}, {'A', 'Z'}, {'0', '9'}}; RandomStringGenerator pwdGenerator = new RandomStringGenerator.Builder() .usingRandom(LazySecureRandom.INSTANCE::nextInt) .withinRange(pairs) .build(); return pwdGenerator.generate(length); }
@Test public void testGenerateRandomAlphanumericPassword20() { assertThat(JOrphanUtils.generateRandomAlphanumericPassword(20), Matchers.matchesPattern("[A-Za-z0-9]{20}")); }
@Override public Serde<GenericKey> create( final FormatInfo format, final PersistenceSchema schema, final KsqlConfig ksqlConfig, final Supplier<SchemaRegistryClient> schemaRegistryClientFactory, final String loggerNamePrefix, final ProcessingLogContext processingLogContext, final Optional<TrackedCallback> tracker ) { return createInner( format, schema, ksqlConfig, schemaRegistryClientFactory, loggerNamePrefix, processingLogContext, tracker ); }
@Test public void shouldReturnedTimeWindowedSerdeForNonSessionWindowed() { // When: final Serde<Windowed<GenericKey>> result = factory .create(format, TIMED_WND, schema, config, srClientFactory, LOGGER_PREFIX, processingLogCxt, Optional.empty()); // Then: assertThat(result, is(instanceOf(TimeWindowedSerde.class))); }
public static byte[] checkPassword(String passwdString) { if (Strings.isNullOrEmpty(passwdString)) { return EMPTY_PASSWORD; } byte[] passwd; passwdString = passwdString.toUpperCase(); passwd = passwdString.getBytes(StandardCharsets.UTF_8); if (passwd.length != SCRAMBLE_LENGTH_HEX_LENGTH || passwd[0] != PVERSION41_CHAR) { throw ErrorReportException.report(ErrorCode.ERR_PASSWD_LENGTH, 41); } for (int i = 1; i < passwd.length; ++i) { if (!((passwd[i] <= '9' && passwd[i] >= '0') || passwd[i] >= 'A' && passwd[i] <= 'F')) { throw ErrorReportException.report(ErrorCode.ERR_PASSWD_LENGTH, 41); } } return passwd; }
@Test(expected = ErrorReportException.class) public void testCheckPasswdFail2() { Assert.assertNotNull(MysqlPassword.checkPassword("*9A6EC51164108A8D3DA3BE3F35A56F6499B6FC32")); MysqlPassword.checkPassword("*9A6EC51164108A8D3DA3BE3F35A56F6499B6FC3H"); Assert.fail("No exception throws"); }
@Override public boolean canDeserialize(String topic, Target type) { return topic.equals(TOPIC); }
@Test void canOnlyDeserializeConsumerOffsetsTopic() { var serde = new ConsumerOffsetsSerde(); assertThat(serde.canDeserialize(ConsumerOffsetsSerde.TOPIC, Serde.Target.KEY)).isTrue(); assertThat(serde.canDeserialize(ConsumerOffsetsSerde.TOPIC, Serde.Target.VALUE)).isTrue(); assertThat(serde.canDeserialize("anyOtherTopic", Serde.Target.KEY)).isFalse(); assertThat(serde.canDeserialize("anyOtherTopic", Serde.Target.VALUE)).isFalse(); }
@Override public TransactionRuleConfiguration build() { return new TransactionRuleConfiguration(TransactionType.LOCAL.name(), null, new Properties()); }
@Test void assertBuild() { TransactionRuleConfiguration actual = new DefaultTransactionRuleConfigurationBuilder().build(); assertThat(actual.getDefaultType(), is(TransactionType.LOCAL.name())); assertNull(actual.getProviderType()); assertThat(actual.getProps(), is(new Properties())); }
public static void validateTableName(TableConfig tableConfig) { String tableName = tableConfig.getTableName(); int dotCount = StringUtils.countMatches(tableName, '.'); if (dotCount > 1) { throw new IllegalStateException("Table name: '" + tableName + "' containing more than one '.' is not allowed"); } if (StringUtils.containsWhitespace(tableName)) { throw new IllegalStateException("Table name: '" + tableName + "' containing space is not allowed"); } }
@Test public void testTableName() { String[] malformedTableName = {"test.test.table", "test table"}; for (int i = 0; i < 2; i++) { String tableName = malformedTableName[i]; TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(tableName).build(); try { TableConfigUtils.validateTableName(tableConfig); Assert.fail("Should fail for malformed table name : " + tableName); } catch (IllegalStateException e) { // expected } } String[] allowedTableName = {"test.table", "testTable"}; for (int i = 0; i < 2; i++) { String tableName = allowedTableName[i]; TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(tableName).build(); TableConfigUtils.validateTableName(tableConfig); } }
public static void batchSetInstanceIdIfEmpty(List<Instance> instances, String groupedServiceName) { if (null != instances) { for (Instance instance : instances) { setInstanceIdIfEmpty(instance, groupedServiceName); } } }
@Test void testBatchSetInstanceIdIfEmpty() { final List<Instance> instances = new ArrayList<>(); Instance instance1 = new Instance(); instance1.setServiceName("test"); Instance instance2 = new Instance(); instance2.setServiceName("test"); Instance instance3 = new Instance(); instance3.setServiceName("test"); instances.add(instance1); instances.add(instance2); instances.add(instance3); InstanceUtil.batchSetInstanceIdIfEmpty(instances, "test"); assertNotNull(instance1.getInstanceId()); assertNotNull(instance2.getInstanceId()); assertNotNull(instance3.getInstanceId()); }
@Bean @ConfigurationProperties(prefix = "shenyu.sync.nacos") public NacosConfig nacosConfig() { return new NacosConfig(); }
@Test public void nacosConfigTest() { assertNotNull(nacosConfig); }
public void deleteRole(String role, String userName) { rolePersistService.deleteRole(role, userName); }
@Test void deleteRole() { try { nacosRoleService.deleteRole("role-admin"); } catch (Exception e) { assertNull(e); } }