focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static boolean isNullOrEmpty(Collection<?> collection) { return collection == null || collection.isEmpty(); }
@Test public void testIsNullOrEmpty() { assertTrue(Tools.isNullOrEmpty(null)); assertTrue(Tools.isNullOrEmpty(Collections.emptyList())); }
public void forEachNormalised(BiConsumer<? super String, ? super String> entryConsumer) { for (int i = 0; i < size(); i++) { entryConsumer.accept(name(i), value(i)); } }
@Test void forEachNormalised() { Headers headers = new Headers(); headers.add("Via", "duct"); headers.add("Cookie", "this=that"); headers.add("Cookie", "frizzle=Frazzle"); Map<String, List<String>> result = new LinkedHashMap<>(); headers.forEachNormalised((k, v) -> result.computeIfAbsent(k, discard -> new ArrayList<>()).add(v)); Truth.assertThat(result) .containsExactly( "via", Collections.singletonList("duct"), "cookie", Arrays.asList("this=that", "frizzle=Frazzle")) .inOrder(); }
public String decode(byte[] val) { return codecs[0].decode(val, 0, val.length); }
@Test public void testDecodeKoreanPersonName() { assertEquals(KOREAN_PERSON_NAME, ksx1001().decode(KOREAN_PERSON_NAME_BYTES)); }
public static String next() { return next(false); }
@Test @Disabled public void nextTest() { Console.log(ObjectId.next()); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { attributes.find(file, listener); return true; } catch(NotfoundException e) { return false; } catch(AccessDeniedException e) { // Object is inaccessible to current user, but does exist. return true; } }
@Test public void testFindCommonPrefix() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); assertTrue(new GoogleStorageFindFeature(session).find(container)); final String prefix = new AlphanumericRandomStringService().random(); final Path test = new GoogleStorageTouchFeature(session).touch( new Path(new Path(container, prefix, EnumSet.of(Path.Type.directory)), new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertTrue(new GoogleStorageFindFeature(session).find(test)); assertFalse(new GoogleStorageFindFeature(session).find(new Path(test.getAbsolute(), EnumSet.of(Path.Type.directory)))); assertTrue(new GoogleStorageFindFeature(session).find(new Path(container, prefix, EnumSet.of(Path.Type.directory)))); assertTrue(new GoogleStorageFindFeature(session).find(new Path(container, prefix, EnumSet.of(Path.Type.directory, Path.Type.placeholder)))); assertTrue(new GoogleStorageObjectListService(session).list(new Path(container, prefix, EnumSet.of(Path.Type.directory)), new DisabledListProgressListener()).contains(test)); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new GoogleStorageFindFeature(session).find(test)); assertFalse(new GoogleStorageFindFeature(session).find(new Path(container, prefix, EnumSet.of(Path.Type.directory)))); final PathCache cache = new PathCache(1); final Path directory = new Path(container, prefix, EnumSet.of(Path.Type.directory, Path.Type.placeholder)); assertFalse(new CachingFindFeature(session, cache, new GoogleStorageFindFeature(session)).find(directory)); assertFalse(cache.isCached(directory)); assertFalse(new GoogleStorageFindFeature(session).find(new Path(container, prefix, EnumSet.of(Path.Type.directory, Path.Type.placeholder)))); }
@VisibleForTesting void addQueues(String args, SchedConfUpdateInfo updateInfo) { if (args == null) { return; } ArrayList<QueueConfigInfo> queueConfigInfos = new ArrayList<>(); for (String arg : args.split(";")) { queueConfigInfos.add(getQueueConfigInfo(arg)); } updateInfo.setAddQueueInfo(queueConfigInfos); }
@Test(timeout = 10000) public void testAddQueues() { SchedConfUpdateInfo schedUpdateInfo = new SchedConfUpdateInfo(); cli.addQueues("root.a:a1=aVal1,a2=aVal2,a3=", schedUpdateInfo); Map<String, String> paramValues = new HashMap<>(); List<QueueConfigInfo> addQueueInfo = schedUpdateInfo.getAddQueueInfo(); paramValues.put("a1", "aVal1"); paramValues.put("a2", "aVal2"); paramValues.put("a3", null); validateQueueConfigInfo(addQueueInfo, 0, "root.a", paramValues); schedUpdateInfo = new SchedConfUpdateInfo(); cli.addQueues("root.b:b1=bVal1;root.c:c1=cVal1", schedUpdateInfo); addQueueInfo = schedUpdateInfo.getAddQueueInfo(); assertEquals(2, addQueueInfo.size()); paramValues.clear(); paramValues.put("b1", "bVal1"); validateQueueConfigInfo(addQueueInfo, 0, "root.b", paramValues); paramValues.clear(); paramValues.put("c1", "cVal1"); validateQueueConfigInfo(addQueueInfo, 1, "root.c", paramValues); }
@Override public Set<String> initialize() { try { checkpointFileCache.putAll(checkpointFile.read()); } catch (final IOException e) { throw new StreamsException("Failed to read checkpoints for global state globalStores", e); } final Set<String> changelogTopics = new HashSet<>(); for (final StateStore stateStore : topology.globalStateStores()) { final String sourceTopic = storeToChangelogTopic.get(stateStore.name()); changelogTopics.add(sourceTopic); stateStore.init((StateStoreContext) globalProcessorContext, stateStore); } // make sure each topic-partition from checkpointFileCache is associated with a global state store checkpointFileCache.keySet().forEach(tp -> { if (!changelogTopics.contains(tp.topic())) { log.error( "Encountered a topic-partition in the global checkpoint file not associated with any global" + " state store, topic-partition: {}, checkpoint file: {}. If this topic-partition is no longer valid," + " an application reset and state store directory cleanup will be required.", tp.topic(), checkpointFile ); throw new StreamsException("Encountered a topic-partition not associated with any global state store"); } }); return Collections.unmodifiableSet(globalStoreNames); }
@Test public void shouldThrowStreamsExceptionForOldTopicPartitions() throws IOException { final HashMap<TopicPartition, Long> expectedOffsets = new HashMap<>(); expectedOffsets.put(t1, 1L); expectedOffsets.put(t2, 1L); expectedOffsets.put(t3, 1L); expectedOffsets.put(t4, 1L); // add an old topic (a topic not associated with any global state store) final HashMap<TopicPartition, Long> startOffsets = new HashMap<>(expectedOffsets); final TopicPartition tOld = new TopicPartition("oldTopic", 1); startOffsets.put(tOld, 1L); // start with a checkpoint file will all topic-partitions: expected and old (not // associated with any global state store). final OffsetCheckpoint checkpoint = new OffsetCheckpoint(checkpointFile); checkpoint.write(startOffsets); // initialize will throw exception final StreamsException e = assertThrows(StreamsException.class, () -> stateManager.initialize()); assertThat(e.getMessage(), equalTo("Encountered a topic-partition not associated with any global state store")); }
public static <T extends TypedSPI> T getService(final Class<T> serviceInterface, final Object type) { return getService(serviceInterface, type, new Properties()); }
@Test void assertGetServiceWithoutProperties() { assertThat(TypedSPILoader.getService(TypedSPIFixture.class, "TYPED.FIXTURE"), instanceOf(TypedSPIFixtureImpl.class)); }
public void parseStepParameter( Map<String, Map<String, Object>> allStepOutputData, Map<String, Parameter> workflowParams, Map<String, Parameter> stepParams, Parameter param, String stepId) { parseStepParameter( allStepOutputData, workflowParams, stepParams, param, stepId, new HashSet<>()); }
@Test public void testParseStepParameterUsingParamsToGetWorkflowParams() { StringParameter bar = StringParameter.builder().name("bar").expression("params.get('foo') + '-1'").build(); paramEvaluator.parseStepParameter( Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().evaluatedResult("123").build()), Collections.emptyMap(), bar, "step1"); assertEquals("123-1", bar.getEvaluatedResult()); bar = StringParameter.builder().name("bar").expression("params['foo'] + '-1'").build(); paramEvaluator.parseStepParameter( Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().evaluatedResult("123").build()), Collections.emptyMap(), bar, "step1"); assertEquals("123-1", bar.getEvaluatedResult()); BooleanParameter bat = BooleanParameter.builder().name("bar").expression("params.get('bar') == null").build(); paramEvaluator.parseStepParameter( Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().evaluatedResult("123").build()), Collections.emptyMap(), bat, "step1"); assertTrue(bat.getEvaluatedResult()); bat = BooleanParameter.builder().name("bar").expression("params['bar'] != null").build(); paramEvaluator.parseStepParameter( Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().evaluatedResult("123").build()), Collections.emptyMap(), bat, "step1"); assertFalse(bat.getEvaluatedResult()); }
public PaginatedList<StreamDestinationFilterRuleDTO> findPaginatedForStream( String streamId, String queryString, Bson sort, int perPage, int page, Predicate<String> permissionSelector ) { final var query = parseQuery(queryString); return paginationHelper.filter(and(eq(FIELD_STREAM_ID, streamId), query)) .sort(sort) .perPage(perPage) .page(page, dto -> permissionSelector.test(dto.id())); }
@Test @MongoDBFixtures("StreamDestinationFilterServiceTest-2024-07-01-1.json") void findPaginatedForStream() { final var result = service.findPaginatedForStream("54e3deadbeefdeadbeef1000", "", Sorts.ascending("title"), 10, 1, id -> true); assertThat(result.delegate()).hasSize(3); }
static boolean isEndpointAvailable(String url) { return !RestClient.create(url, 1) .withRequestTimeoutSeconds(1) .withRetries(1) .withHeader("Metadata", "True") .get() .getBody() .isEmpty(); }
@Test public void isEndpointAvailable() { // given String endpoint = "/some-endpoint"; String url = String.format("http://localhost:%d%s", wireMockRule.port(), endpoint); stubFor(get(urlEqualTo(endpoint)).willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK).withBody("some-body"))); // when boolean isAvailable = AzureDiscoveryStrategyFactory.isEndpointAvailable(url); // then assertTrue(isAvailable); }
public StatementExecutorResponse execute( final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext executionContext, final KsqlSecurityContext securityContext ) { final String commandRunnerWarningString = commandRunnerWarning.get(); if (!commandRunnerWarningString.equals("")) { throw new KsqlServerException("Failed to handle Ksql Statement." + System.lineSeparator() + commandRunnerWarningString); } final InjectorWithSideEffects injector = InjectorWithSideEffects.wrap( injectorFactory.apply(executionContext, securityContext.getServiceContext())); final ConfiguredStatementWithSideEffects<?> injectedWithSideEffects = injector.injectWithSideEffects(statement); try { return executeInjected( injectedWithSideEffects.getStatement(), statement, executionContext, securityContext); } catch (Exception e) { injector.revertSideEffects(injectedWithSideEffects); throw e; } }
@Test public void shouldAbortOnError_ProducerFencedException() { // When: doThrow(new ProducerFencedException("Error!")).when(transactionalProducer).commitTransaction(); final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> distributor.execute(CONFIGURED_STATEMENT, executionContext, securityContext) ); assertThat(e.getMessage(), containsString("Could not write the statement into the command topic.")); assertThat(e.getUnloggedMessage(), containsString("Could not write the statement " + "'statement' into the command topic.")); assertThat(e.getSqlStatement(), containsString("statement")); // Then: verify(queue).abortCommand(IDGEN.getCommandId(CONFIGURED_STATEMENT.getStatement())); }
@Override public ConsumerConnection examineConsumerConnectionInfo( String consumerGroup) throws InterruptedException, MQBrokerException, RemotingException, MQClientException { return defaultMQAdminExtImpl.examineConsumerConnectionInfo(consumerGroup); }
@Test public void testExamineConsumerConnectionInfo() throws InterruptedException, RemotingException, MQClientException, MQBrokerException { ConsumerConnection consumerConnection = defaultMQAdminExt.examineConsumerConnectionInfo("default-consumer-group"); assertThat(consumerConnection.getConsumeType()).isEqualTo(ConsumeType.CONSUME_PASSIVELY); assertThat(consumerConnection.getMessageModel()).isEqualTo(MessageModel.CLUSTERING); consumerConnection = defaultMQAdminExt.examineConsumerConnectionInfo("default-consumer-group", "127.0.0.1:10911"); assertThat(consumerConnection.getConsumeType()).isEqualTo(ConsumeType.CONSUME_PASSIVELY); assertThat(consumerConnection.getMessageModel()).isEqualTo(MessageModel.CLUSTERING); }
public static WriteRequest convertToWriteRequest(Log log) { return WriteRequest.newBuilder().setKey(log.getKey()).setGroup(log.getGroup()) .setData(log.getData()) .setType(log.getType()) .setOperation(log.getOperation()) .putAllExtendInfo(log.getExtendInfoMap()) .build(); }
@Test void testConvertToWriteRequest() { ByteString data = ByteString.copyFrom("data".getBytes()); Log log = Log.newBuilder().setKey("key").setGroup("group").setData(data).setOperation("o").putExtendInfo("k", "v").build(); WriteRequest writeRequest = ProtoMessageUtil.convertToWriteRequest(log); assertEquals(1, writeRequest.getExtendInfoCount()); assertEquals(data, writeRequest.getData()); assertEquals("key", writeRequest.getKey()); assertEquals("group", writeRequest.getGroup()); assertEquals("o", writeRequest.getOperation()); }
public static void insert( final UnsafeBuffer termBuffer, final int termOffset, final UnsafeBuffer packet, final int length) { if (0 == termBuffer.getInt(termOffset)) { termBuffer.putBytes(termOffset + HEADER_LENGTH, packet, HEADER_LENGTH, length - HEADER_LENGTH); termBuffer.putLong(termOffset + 24, packet.getLong(24)); termBuffer.putLong(termOffset + 16, packet.getLong(16)); termBuffer.putLong(termOffset + 8, packet.getLong(8)); termBuffer.putLongOrdered(termOffset, packet.getLong(0)); } }
@Test void shouldInsertLastFrameIntoBuffer() { final int frameLength = BitUtil.align(256, FRAME_ALIGNMENT); final int srcOffset = 0; final int tail = TERM_BUFFER_CAPACITY - frameLength; final int termOffset = tail; final UnsafeBuffer packet = new UnsafeBuffer(ByteBuffer.allocate(frameLength)); packet.putShort(typeOffset(srcOffset), (short)PADDING_FRAME_TYPE, LITTLE_ENDIAN); packet.putInt(srcOffset, frameLength, LITTLE_ENDIAN); TermRebuilder.insert(termBuffer, termOffset, packet, frameLength); verify(termBuffer).putBytes( tail + HEADER_LENGTH, packet, srcOffset + HEADER_LENGTH, frameLength - HEADER_LENGTH); }
@Override public void process(Exchange exchange) throws Exception { final String msg = exchange.getIn().getBody(String.class); final String sendTo = exchange.getIn().getHeader(IrcConstants.IRC_SEND_TO, String.class); if (connection == null || !connection.isConnected()) { reconnect(); } if (connection == null || !connection.isConnected()) { throw new RuntimeCamelException("Lost connection" + (connection == null ? "" : " to " + connection.getHost())); } if (msg != null) { if (isMessageACommand(msg)) { LOG.debug("Sending command: {}", msg); connection.send(msg); } else if (sendTo != null) { LOG.debug("Sending to: {} message: {}", sendTo, msg); connection.doPrivmsg(sendTo, msg); } else { for (IrcChannel channel : getEndpoint().getConfiguration().getChannelList()) { LOG.debug("Sending to: {} message: {}", channel, msg); connection.doPrivmsg(channel.getName(), msg); } } } }
@Test public void processTestException() { when(exchange.getIn()).thenReturn(message); when(message.getBody(String.class)).thenReturn("PART foo"); when(message.getHeader(IrcConstants.IRC_TARGET, String.class)).thenReturn("bottest"); when(connection.isConnected()).thenReturn(false); assertThrows(RuntimeCamelException.class, () -> producer.process(exchange)); }
public Optional<UserDto> authenticate(HttpRequest request) { return extractCredentialsFromHeader(request) .flatMap(credentials -> Optional.ofNullable(authenticate(credentials, request))); }
@Test public void authenticate_from_basic_http_header_with_password_containing_semi_colon() { String password = "!ascii-only:-)@"; when(request.getHeader(AUTHORIZATION_HEADER)).thenReturn("Basic " + toBase64(A_LOGIN + ":" + password)); when(credentialsAuthentication.authenticate(new Credentials(A_LOGIN, password), request, BASIC)).thenReturn(USER); underTest.authenticate(request); verify(credentialsAuthentication).authenticate(new Credentials(A_LOGIN, password), request, BASIC); verifyNoMoreInteractions(authenticationEvent); }
public String getGroup() { return group; }
@Test void testGetGroup() { String group = metadataOperation.getGroup(); assertNull(group); }
public static Writer writerForAppendable(Appendable appendable) { return appendable instanceof Writer ? (Writer) appendable : new AppendableWriter(appendable); }
@Test public void testWriterForAppendable() throws IOException { StringBuilder stringBuilder = new StringBuilder(); Writer writer = Streams.writerForAppendable(stringBuilder); writer.append('a'); writer.append('\u1234'); writer.append("test"); writer.append(null); // test custom null handling mandated by `append` writer.append("abcdef", 2, 4); writer.append(null, 1, 3); // test custom null handling mandated by `append` writer.append(','); writer.write('a'); writer.write('\u1234'); // Should only consider the 16 low-order bits writer.write(0x4321_1234); writer.append(','); writer.write("chars".toCharArray()); assertThrows(NullPointerException.class, () -> writer.write((char[]) null)); writer.write("chars".toCharArray(), 1, 2); assertThrows(NullPointerException.class, () -> writer.write((char[]) null, 1, 2)); writer.append(','); writer.write("string"); assertThrows(NullPointerException.class, () -> writer.write((String) null)); writer.write("string", 1, 2); assertThrows(NullPointerException.class, () -> writer.write((String) null, 1, 2)); String actualOutput = stringBuilder.toString(); assertThat(actualOutput).isEqualTo("a\u1234testnullcdul,a\u1234\u1234,charsha,stringtr"); writer.flush(); writer.close(); // flush() and close() calls should have had no effect assertThat(stringBuilder.toString()).isEqualTo(actualOutput); }
@UdafFactory(description = "Compute average of column with type Long.", aggregateSchema = "STRUCT<SUM bigint, COUNT bigint>") public static TableUdaf<Long, Struct, Double> averageLong() { return getAverageImplementation( 0L, STRUCT_LONG, (sum, newValue) -> sum.getInt64(SUM) + newValue, (sum, count) -> sum.getInt64(SUM) / count, (sum1, sum2) -> sum1.getInt64(SUM) + sum2.getInt64(SUM), (sum, valueToUndo) -> sum.getInt64(SUM) - valueToUndo); }
@Test public void shouldUndoSummedCountedValues() { final TableUdaf<Long, Struct, Double> udaf = AverageUdaf.averageLong(); Struct agg = udaf.initialize(); final Long[] values = new Long[] {1L, 1L, 1L, 1L, 1L}; for (final Long thisValue : values) { agg = udaf.aggregate(thisValue, agg); } agg = udaf.undo(1L, agg); assertThat(4L, equalTo(agg.getInt64(COUNT))); assertThat(4L, equalTo(agg.getInt64(SUM))); }
@Udf public Double random() { return Math.random(); }
@Test public void shouldReturnDistinctValueEachInvocation() { int capacity = 1000; final Set<Double> outputs = new HashSet<>(capacity); for (int i = 0; i < capacity; i++) { outputs.add(udf.random()); } assertThat(outputs, hasSize(capacity)); assertThat(outputs, everyItem(greaterThanOrEqualTo(0.0))); assertThat(outputs, everyItem(lessThan(1.0))); }
void regionUnfinished(SchedulingPipelinedRegion region) { for (ConsumerRegionGroupExecutionView executionView : executionViewByRegion.getOrDefault(region, Collections.emptySet())) { executionView.regionUnfinished(region); } }
@Test void testUnfinishedWrongRegion() { consumerRegionGroupExecutionViewMaintainer.regionUnfinished(producerRegion); assertThat(consumerRegionGroupExecutionView.isFinished()).isFalse(); }
public static String decode(InputStream in) { StringBuilder strBuild = new StringBuilder(); Collection<? extends Certificate> certificates = readCertificates(in); if (certificates != null) { for (Certificate cert : certificates) { CertificateManager certificateManager = new CertificateManager(cert); strBuild.append(certificateManager.generateText()); } } return strBuild.toString(); }
@Test public void decodeNotCertificateFile() throws IOException { try (InputStream in = new FileInputStream(emptyPath)) { String result = CertificateManager.decode(in); assertThat(result).isEmpty(); } }
void flush() { producer.flush(); }
@Test public void shouldForwardCallToFlush() { streamsProducerWithMock.flush(); verify(mockedProducer).flush(); }
@Override public int inferParallelism(Context dynamicParallelismContext) { FileEnumerator fileEnumerator; List<HiveTablePartition> partitions; if (dynamicFilterPartitionKeys != null) { fileEnumerator = new HiveSourceDynamicFileEnumerator.Provider( tablePath.getFullName(), dynamicFilterPartitionKeys, partitionBytes, hiveVersion, jobConfWrapper) .create(); if (dynamicParallelismContext.getDynamicFilteringInfo().isPresent()) { DynamicFilteringInfo dynamicFilteringInfo = dynamicParallelismContext.getDynamicFilteringInfo().get(); if (dynamicFilteringInfo instanceof DynamicFilteringEvent) { ((HiveSourceDynamicFileEnumerator) fileEnumerator) .setDynamicFilteringData( ((DynamicFilteringEvent) dynamicFilteringInfo).getData()); } } partitions = ((HiveSourceDynamicFileEnumerator) fileEnumerator).getFinalPartitions(); } else { fileEnumerator = getEnumeratorFactory().create(); partitions = ((HiveSourceFileEnumerator) fileEnumerator).getPartitions(); } return new HiveDynamicParallelismInferenceFactory( tablePath, jobConfWrapper.conf(), dynamicParallelismContext.getParallelismInferenceUpperBound()) .create() .infer( () -> HiveSourceFileEnumerator.getNumFiles( partitions, jobConfWrapper.conf()), () -> HiveSourceFileEnumerator.createInputSplits( 0, partitions, jobConfWrapper.conf(), true) .size()) .limit(limit); }
@Test void testDynamicParallelismInferenceWithFiltering() throws Exception { ObjectPath tablePath = new ObjectPath("default", "hiveTbl3"); createTable(tablePath, hiveCatalog, true); HiveSource<RowData> hiveSource = createHiveSourceWithPartition(tablePath, new Configuration(), -1, keys); DynamicParallelismInference.Context context = genDynamicParallelismContext(10, Arrays.asList(1, 2)); assertThat(hiveSource.inferParallelism(context)).isEqualTo(2); hiveCatalog.dropTable(tablePath, false); }
public void withLock(final List<String> e164s, final Runnable task, final Executor lockAcquisitionExecutor) { if (e164s.isEmpty()) { throw new IllegalArgumentException("List of e164s to lock must not be empty"); } final List<LockItem> lockItems = new ArrayList<>(e164s.size()); try { // Offload the acquire/release tasks to the dedicated lock acquisition executor. The lock client performs blocking // operations while holding locks which forces thread pinning when this method runs on a virtual thread. // https://github.com/awslabs/amazon-dynamodb-lock-client/issues/97 CompletableFuture.runAsync(() -> { for (final String e164 : e164s) { try { lockItems.add(lockClient.acquireLock(AcquireLockOptions.builder(e164) .withAcquireReleasedLocksConsistently(true) .build())); } catch (final InterruptedException e) { throw new CompletionException(e); } } }, lockAcquisitionExecutor).join(); task.run(); } finally { CompletableFuture.runAsync(() -> { for (final LockItem lockItem : lockItems) { lockClient.releaseLock(ReleaseLockOptions.builder(lockItem) .withBestEffort(true) .build()); } }, lockAcquisitionExecutor).join(); } }
@Test void withLockTaskThrowsException() throws InterruptedException { assertThrows(RuntimeException.class, () -> accountLockManager.withLock(List.of(FIRST_NUMBER, SECOND_NUMBER), () -> { throw new RuntimeException(); }, executor)); verify(lockClient, times(2)).acquireLock(any()); verify(lockClient, times(2)).releaseLock(any(ReleaseLockOptions.class)); }
@Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (!(request instanceof HttpServletRequest)) { super.doFilter(request, response, chain); return; } final HttpServletRequest httpRequest = (HttpServletRequest) request; registerSessionIfNeeded(httpRequest); super.doFilter(request, response, chain); // si logout on prend en compte de suite la destruction de la session unregisterSessionIfNeeded(httpRequest); }
@Test public void testWithRequestSessionIdValid() throws IOException, ServletException { final int sessionCount = SessionListener.getSessionCount(); final HttpServletRequest request = createNiceMock(HttpServletRequest.class); final HttpServletResponse response = createNiceMock(HttpServletResponse.class); final FilterChain chain = createNiceMock(FilterChain.class); expect(request.getRequestURI()).andReturn(CONTEXT_PATH + TEST_REQUEST).anyTimes(); expect(request.getContextPath()).andReturn(CONTEXT_PATH).anyTimes(); expect(request.isRequestedSessionIdValid()).andReturn(true).anyTimes(); expect(request.getLocale()).andReturn(Locale.FRANCE).anyTimes(); replay(request); replay(response); replay(chain); pluginMonitoringFilter.doFilter(request, response, chain); assertEquals("sessionCount", sessionCount, SessionListener.getSessionCount()); verify(request); verify(response); verify(chain); }
public static void setValue( Object object, Field field, String value ) { try { Method setMethod = getDeclaredMethod( object.getClass(), SET_PREFIX + StringUtils.capitalize( field.getName() ), String.class ); setMethod.invoke( object, value ); } catch ( NoSuchMethodException | InvocationTargetException | IllegalAccessException ignore ) { // ignore } }
@Test public void testSetValue() throws NoSuchFieldException { TestConnectionWithBucketsDetailsChild testConnectionDetails = new TestConnectionWithBucketsDetailsChild(); testConnectionDetails.setPassword( PASSWORD ); testConnectionDetails.setPassword3( PASSWORD3 ); EncryptUtils.setValue( testConnectionDetails, testConnectionDetails.getClass().getDeclaredField( "password3" ), PASSWORD2 ); Assert.assertEquals( testConnectionDetails.getPassword3(), PASSWORD2 ); }
@Override public boolean find(final Path file, final ListProgressListener listener) { return session.getClient().existsAndIsAccessible(file.getAbsolute()); }
@Test public void testFindFileNotFound() throws Exception { final MantaFindFeature f = new MantaFindFeature(session); assertFalse(f.find(new Path( new MantaAccountHomeInfo(session.getHost().getCredentials().getUsername(), session.getHost().getDefaultPath()).getAccountPrivateRoot(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)))); }
public RuntimeOptionsBuilder parse(Class<?> clazz) { RuntimeOptionsBuilder args = new RuntimeOptionsBuilder(); for (Class<?> classWithOptions = clazz; hasSuperClass( classWithOptions); classWithOptions = classWithOptions.getSuperclass()) { CucumberOptions options = requireNonNull(optionsProvider).getOptions(classWithOptions); if (options != null) { addDryRun(options, args); addMonochrome(options, args); addTags(classWithOptions, options, args); addPlugins(options, args); addPublish(options, args); addName(options, args); addSnippets(options, args); addGlue(options, args); addFeatures(options, args); addObjectFactory(options, args); addUuidGenerator(options, args); } } addDefaultFeaturePathIfNoFeaturePathIsSpecified(args, clazz); addDefaultGlueIfNoOverridingGlueIsSpecified(args, clazz); return args; }
@Test void create_with_snippets() { RuntimeOptions runtimeOptions = parser().parse(Snippets.class).build(); assertThat(runtimeOptions.getSnippetType(), is(equalTo(SnippetType.CAMELCASE))); }
@Override public <T> T deserialize(byte[] data, Class<T> clazz) { return JacksonUtils.toObj(data, clazz); }
@Test @SuppressWarnings("checkstyle:linelength") void testDeserialize() { String example = "{\"adWeightMap\":{},\"defaultPushCacheMillis\":10000,\"clientBeatInterval\":5000,\"defaultCacheMillis\":3000,\"distroThreshold\":0.7,\"healthCheckEnabled\":true,\"autoChangeHealthCheckEnabled\":true,\"distroEnabled\":true,\"enableStandalone\":true,\"pushEnabled\":true,\"checkTimes\":3,\"httpHealthParams\":{\"max\":5000,\"min\":500,\"factor\":0.85},\"tcpHealthParams\":{\"max\":5000,\"min\":1000,\"factor\":0.75},\"mysqlHealthParams\":{\"max\":3000,\"min\":2000,\"factor\":0.65},\"incrementalList\":[],\"serverStatusSynchronizationPeriodMillis\":2000,\"serviceStatusSynchronizationPeriodMillis\":5000,\"disableAddIP\":false,\"sendBeatOnly\":false,\"lightBeatEnabled\":true,\"doubleWriteEnabled\":true,\"limitedUrlMap\":{},\"distroServerExpiredMillis\":10000,\"pushGoVersion\":\"0.1.0\",\"pushJavaVersion\":\"0.1.0\",\"pushPythonVersion\":\"0.4.3\",\"pushCVersion\":\"1.0.12\",\"pushCSharpVersion\":\"0.9.0\",\"enableAuthentication\":false,\"defaultInstanceEphemeral\":true,\"healthCheckWhiteList\":[],\"name\":\"00-00---000-NACOS_SWITCH_DOMAIN-000---00-00\"}"; SwitchDomain actual = serializer.deserialize(ByteUtils.toBytes(example), SwitchDomain.class); assertEquals(10000, actual.getDefaultPushCacheMillis()); assertEquals(5000, actual.getClientBeatInterval()); assertEquals(3000, actual.getDefaultCacheMillis()); assertTrue(actual.isDistroEnabled()); }
@Override public String toString() { if (columns.isEmpty()) { return ""; } StringJoiner result = new StringJoiner(", ", ", ", ""); columns.forEach(result::add); return result.toString(); }
@Test void assertToStringWithEmptyColumn() { assertThat(new InsertColumnsToken(0, Collections.emptyList()).toString(), is("")); }
public XATopicConnection xaTopicConnection(XATopicConnection connection) { return TracingXAConnection.create(connection, this); }
@Test void xaTopicConnection_wrapsInput() { assertThat(jmsTracing.xaTopicConnection(mock(XATopicConnection.class))) .isInstanceOf(TracingXAConnection.class); }
public static ThreadPoolExecutor newCachedThreadPool(int corePoolSize, int maximumPoolSize) { return new ThreadPoolExecutor(corePoolSize, maximumPoolSize, DateUtils.MILLISECONDS_PER_MINUTE, TimeUnit.MILLISECONDS, new SynchronousQueue<Runnable>()); }
@Test public void newCachedThreadPool3() throws Exception { BlockingQueue<Runnable> queue = new SynchronousQueue<Runnable>(); RejectedExecutionHandler handler = new RejectedExecutionHandler() { @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { } }; ThreadFactory factory = new NamedThreadFactory("xxx"); ThreadPoolExecutor executor = ThreadPoolUtils.newCachedThreadPool(10, 20, queue, factory, handler); Assert.assertEquals(executor.getCorePoolSize(), 10); Assert.assertEquals(executor.getMaximumPoolSize(), 20); Assert.assertEquals(executor.getQueue(), queue); Assert.assertEquals(executor.getThreadFactory(), factory); Assert.assertEquals(executor.getRejectedExecutionHandler(), handler); }
public T maxInitialLineLength(int value) { if (value <= 0) { throw new IllegalArgumentException("maxInitialLineLength must be strictly positive"); } this.maxInitialLineLength = value; return get(); }
@Test void maxInitialLineLength() { checkDefaultMaxInitialLineLength(conf); conf.maxInitialLineLength(123); assertThat(conf.maxInitialLineLength()).as("initial line length").isEqualTo(123); checkDefaultMaxHeaderSize(conf); checkDefaultMaxChunkSize(conf); checkDefaultValidateHeaders(conf); checkDefaultInitialBufferSize(conf); checkDefaultAllowDuplicateContentLengths(conf); }
@CanIgnoreReturnValue @SuppressWarnings("deprecation") // TODO(b/134064106): design an alternative to no-arg check() public final Ordered containsExactly() { return check().about(iterableEntries()).that(checkNotNull(actual).entries()).containsExactly(); }
@Test public void containsExactlyNoArg() { ImmutableMultimap<Integer, String> actual = ImmutableMultimap.of(); assertThat(actual).containsExactly(); assertThat(actual).containsExactly().inOrder(); expectFailureWhenTestingThat(ImmutableMultimap.of(42, "Answer", 42, "6x7")).containsExactly(); assertFailureKeys("expected to be empty", "but was"); }
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) { final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps()); map.put( MetricCollectors.RESOURCE_LABEL_PREFIX + StreamsConfig.APPLICATION_ID_CONFIG, applicationId ); // Streams client metrics aren't used in Confluent deployment possiblyConfigureConfluentTelemetry(map); return Collections.unmodifiableMap(map); }
@Test public void shouldSetStreamsConfigAdminClientProperties() { final KsqlConfig ksqlConfig = new KsqlConfig( Collections.singletonMap(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 3)); final Object result = ksqlConfig.getKsqlStreamConfigProps().get( AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG); assertThat(result, equalTo(3)); }
@Override public Object pageListService(String namespaceId, String groupName, String serviceName, int pageNo, int pageSize, String instancePattern, boolean ignoreEmptyService) throws NacosException { ObjectNode result = JacksonUtils.createEmptyJsonNode(); List<ServiceView> serviceViews = new LinkedList<>(); Collection<Service> services = patternServices(namespaceId, groupName, serviceName); if (ignoreEmptyService) { services = services.stream().filter(each -> 0 != serviceStorage.getData(each).ipCount()) .collect(Collectors.toList()); } result.put(FieldsConstants.COUNT, services.size()); services = doPage(services, pageNo - 1, pageSize); for (Service each : services) { ServiceMetadata serviceMetadata = metadataManager.getServiceMetadata(each).orElseGet(ServiceMetadata::new); ServiceView serviceView = new ServiceView(); serviceView.setName(each.getName()); serviceView.setGroupName(each.getGroup()); serviceView.setClusterCount(serviceStorage.getClusters(each).size()); serviceView.setIpCount(serviceStorage.getData(each).ipCount()); serviceView.setHealthyInstanceCount(countHealthyInstance(serviceStorage.getData(each))); serviceView.setTriggerFlag(isProtectThreshold(serviceView, serviceMetadata) ? "true" : "false"); serviceViews.add(serviceView); } result.set(FieldsConstants.SERVICE_LIST, JacksonUtils.transferToJsonNode(serviceViews)); return result; }
@Test void testPageListService() throws NacosException { ServiceInfo serviceInfo = new ServiceInfo(); serviceInfo.setHosts(Collections.singletonList(new Instance())); Mockito.when(serviceStorage.getData(Mockito.any())).thenReturn(serviceInfo); ServiceMetadata metadata = new ServiceMetadata(); metadata.setProtectThreshold(0.75F); Mockito.when(metadataManager.getServiceMetadata(Mockito.any())).thenReturn(Optional.of(metadata)); ObjectNode obj = (ObjectNode) catalogServiceV2Impl.pageListService("A", "B", "C", 1, 10, null, false); assertEquals(1, obj.get(FieldsConstants.COUNT).asInt()); }
public void close(final boolean closeQueries) { primaryContext.getQueryRegistry().close(closeQueries); try { cleanupService.stopAsync().awaitTerminated( this.primaryContext.getKsqlConfig() .getLong(KsqlConfig.KSQL_QUERY_CLEANUP_SHUTDOWN_TIMEOUT_MS), TimeUnit.MILLISECONDS); } catch (final TimeoutException e) { log.warn("Timed out while closing cleanup service. " + "External resources for the following applications may be orphaned: {}", cleanupService.pendingApplicationIds() ); } engineMetrics.close(); aggregateMetricsCollector.shutdown(); }
@Test public void shouldHardDeleteSchemaOnEngineCloseForTransientQueries() throws IOException, RestClientException { // Given: setupKsqlEngineWithSharedRuntimeDisabled(); final QueryMetadata query = KsqlEngineTestUtil.executeQuery( serviceContext, ksqlEngine, "select * from test1 EMIT CHANGES;", ksqlConfig, Collections.emptyMap() ); final String internalTopic1Val = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject1" + KsqlConstants.STREAMS_CHANGELOG_TOPIC_SUFFIX, false); final String internalTopic2Val = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject3" + KsqlConstants.STREAMS_REPARTITION_TOPIC_SUFFIX, false); final String internalTopic1Key = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject1" + KsqlConstants.STREAMS_CHANGELOG_TOPIC_SUFFIX, true); final String internalTopic2Key = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject3" + KsqlConstants.STREAMS_REPARTITION_TOPIC_SUFFIX, true); when(schemaRegistryClient.getAllSubjects()).thenReturn( Arrays.asList( internalTopic1Val, internalTopic1Key, "subject2", internalTopic2Val, internalTopic2Key)); query.start(); // When: query.close(); // Then: awaitCleanupComplete(); verify(schemaRegistryClient, times(4)).deleteSubject(any()); verify(schemaRegistryClient).deleteSubject(internalTopic1Val, true); verify(schemaRegistryClient).deleteSubject(internalTopic2Val, true); verify(schemaRegistryClient).deleteSubject(internalTopic1Key, true); verify(schemaRegistryClient).deleteSubject(internalTopic2Key, true); verify(schemaRegistryClient, never()).deleteSubject("subject2"); }
@Override public CompletableFuture<Void> asyncAddTaskExecutionLogs(List<TaskExecLog> logs) { return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs)); }
@Test public void asyncAddTaskExecutionLogsTest() throws Exception { List<TaskExecLog> logs = new ArrayList<>(); logs.add(createLog(TEST_TASK_ID_2, "log1")); logs.add(createLog(TEST_TASK_ID_2, "log2")); logs.add(createLog(TEST_TASK_ID_2, "log3")); dao.asyncAddTaskExecutionLogs(logs).get(); List<TaskExecLog> indexedLogs = tryFindResults(() -> dao.getTaskExecutionLogs(TEST_TASK_ID_2), 3); assertEquals(3, indexedLogs.size()); assertTrue("Not all logs was indexed", indexedLogs.containsAll(logs)); }
@Override public SmsSendRespDTO sendSms(Long sendLogId, String mobile, String apiTemplateId, List<KeyValue<String, Object>> templateParams) throws Throwable { // 1. 执行请求 // 参考链接 https://cloud.tencent.com/document/product/382/55981 TreeMap<String, Object> body = new TreeMap<>(); body.put("PhoneNumberSet", new String[]{mobile}); body.put("SmsSdkAppId", getSdkAppId()); body.put("SignName", properties.getSignature()); body.put("TemplateId",apiTemplateId); body.put("TemplateParamSet", ArrayUtils.toArray(templateParams, param -> String.valueOf(param.getValue()))); JSONObject response = request("SendSms", body); // 2. 解析请求 JSONObject responseResult = response.getJSONObject("Response"); JSONObject error = responseResult.getJSONObject("Error"); if (error != null) { return new SmsSendRespDTO().setSuccess(false) .setApiRequestId(responseResult.getStr("RequestId")) .setApiCode(error.getStr("Code")) .setApiMsg(error.getStr("Message")); } JSONObject responseData = responseResult.getJSONArray("SendStatusSet").getJSONObject(0); return new SmsSendRespDTO().setSuccess(Objects.equals(API_CODE_SUCCESS, responseData.getStr("Code"))) .setApiRequestId(responseResult.getStr("RequestId")) .setSerialNo(responseData.getStr("SerialNo")) .setApiMsg(responseData.getStr("Message")); }
@Test public void testDoSendSms_fail() throws Throwable { try (MockedStatic<HttpUtils> httpUtilsMockedStatic = mockStatic(HttpUtils.class)) { // 准备参数 Long sendLogId = randomLongId(); String mobile = randomString(); String apiTemplateId = randomString(); List<KeyValue<String, Object>> templateParams = Lists.newArrayList( new KeyValue<>("1", 1234), new KeyValue<>("2", "login")); // mock 方法 httpUtilsMockedStatic.when(() -> HttpUtils.post(anyString(), anyMap(), anyString())) .thenReturn("{\n" + " \"Response\": {\n" + " \"SendStatusSet\": [\n" + " {\n" + " \"SerialNo\": \"5000:1045710669157053657849499619\",\n" + " \"PhoneNumber\": \"+8618511122233\",\n" + " \"Fee\": 1,\n" + " \"SessionContext\": \"test\",\n" + " \"Code\": \"ERROR\",\n" + " \"Message\": \"send success\",\n" + " \"IsoCode\": \"CN\"\n" + " },\n" + " ],\n" + " \"RequestId\": \"a0aabda6-cf91-4f3e-a81f-9198114a2279\"\n" + " }\n" + "}"); // 调用 SmsSendRespDTO result = smsClient.sendSms(sendLogId, mobile, apiTemplateId, templateParams); // 断言 assertFalse(result.getSuccess()); assertEquals("5000:1045710669157053657849499619", result.getSerialNo()); assertEquals("a0aabda6-cf91-4f3e-a81f-9198114a2279", result.getApiRequestId()); assertEquals("send success", result.getApiMsg()); } }
@ConstantFunction(name = "bitShiftRight", argTypes = {TINYINT, BIGINT}, returnType = TINYINT) public static ConstantOperator bitShiftRightTinyInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createTinyInt((byte) (first.getTinyInt() >> second.getBigint())); }
@Test public void bitShiftRightTinyInt() { assertEquals(1, ScalarOperatorFunctions.bitShiftRightTinyInt(O_TI_10, O_BI_3).getTinyInt()); }
public static LinkDescription combine(BasicLinkConfig cfg, LinkDescription descr) { if (cfg == null) { return descr; } Link.Type type = descr.type(); if (cfg.isTypeConfigured()) { if (cfg.type() != type) { type = cfg.type(); } } SparseAnnotations sa = combine(cfg, descr.annotations()); return new DefaultLinkDescription(descr.src(), descr.dst(), type, sa); }
@Test public void testDescOps() { LinkDescription desc = BasicLinkOperator.combine(BLC, LD); assertEquals(String.valueOf(NTIME), desc.annotations().value(AnnotationKeys.LATENCY)); assertEquals("true", desc.annotations().value(AnnotationKeys.DURABLE)); }
public List<String> split(String in) { final StringBuilder result = new StringBuilder(); final char[] chars = in.toCharArray(); for (int i = 0; i < chars.length; i++) { final char c = chars[i]; if (CHAR_OPERATORS.contains(String.valueOf(c))) { if (i < chars.length - 2 && CHAR_OPERATORS.contains(String.valueOf(chars[i + 1])) && !("(".equals(String.valueOf(chars[i + 1])) || ")".equals(String.valueOf(chars[i + 1])))) { result.append(" ").append(c).append(chars[i + 1]).append(" "); i++; } else { result.append(" ").append(c).append(" "); } } else { result.append(c); } } final String[] tokens = result.toString().split(SPLIT_EXPRESSION); final List<String> list = new ArrayList<>(); for (int i = 0; i < tokens.length; i++) { tokens[i] = tokens[i].trim(); if (!tokens[i].equals("")) { list.add(tokens[i]); } } return list; }
@Test public void split1() { List<String> tokens = parser.split("a and b"); assertEquals(Arrays.asList("a", "and", "b"), tokens); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config), Clock.systemDefaultZone() ); }
@Test public void shouldResetPropertiesBetweenMigrations() throws Exception { // Given: command = PARSER.parse("-a"); createMigrationFile(1, NAME, migrationsDir, "SET 'cat'='pat';"); createMigrationFile(2, NAME, migrationsDir, COMMAND); when(versionQueryResult.get()).thenReturn(ImmutableList.of()); givenAppliedMigration(1, NAME, MigrationState.MIGRATED); // When: final int result = command.command(config, (cfg, headers) -> ksqlClient, migrationsDir, Clock.fixed( Instant.ofEpochMilli(1000), ZoneId.systemDefault())); // Then: assertThat(result, is(0)); final InOrder inOrder = inOrder(ksqlClient); inOrder.verify(ksqlClient).executeStatement(COMMAND, ImmutableMap.of()); inOrder.verify(ksqlClient).close(); inOrder.verifyNoMoreInteractions(); }
static void validateConnectors(KafkaMirrorMaker2 kafkaMirrorMaker2) { if (kafkaMirrorMaker2.getSpec() == null) { throw new InvalidResourceException(".spec section is required for KafkaMirrorMaker2 resource"); } else { if (kafkaMirrorMaker2.getSpec().getClusters() == null || kafkaMirrorMaker2.getSpec().getMirrors() == null) { throw new InvalidResourceException(".spec.clusters and .spec.mirrors sections are required in KafkaMirrorMaker2 resource"); } else { Set<String> existingClusterAliases = kafkaMirrorMaker2.getSpec().getClusters().stream().map(KafkaMirrorMaker2ClusterSpec::getAlias).collect(Collectors.toSet()); Set<String> errorMessages = new HashSet<>(); String connectCluster = kafkaMirrorMaker2.getSpec().getConnectCluster(); for (KafkaMirrorMaker2MirrorSpec mirror : kafkaMirrorMaker2.getSpec().getMirrors()) { if (mirror.getSourceCluster() == null) { errorMessages.add("Each MirrorMaker 2 mirror definition has to specify the source cluster alias"); } else if (!existingClusterAliases.contains(mirror.getSourceCluster())) { errorMessages.add("Source cluster alias " + mirror.getSourceCluster() + " is used in a mirror definition, but cluster with this alias does not exist in cluster definitions"); } if (mirror.getTargetCluster() == null) { errorMessages.add("Each MirrorMaker 2 mirror definition has to specify the target cluster alias"); } else if (!existingClusterAliases.contains(mirror.getTargetCluster())) { errorMessages.add("Target cluster alias " + mirror.getTargetCluster() + " is used in a mirror definition, but cluster with this alias does not exist in cluster definitions"); } if (!mirror.getTargetCluster().equals(connectCluster)) { errorMessages.add("Connect cluster alias (currently set to " + connectCluster + ") has to be the same as the target cluster alias " + mirror.getTargetCluster()); } } if (!errorMessages.isEmpty()) { throw new InvalidResourceException("KafkaMirrorMaker2 resource validation failed: " + errorMessages); } } } }
@Test public void testFailingValidation() { // Missing spec KafkaMirrorMaker2 kmm2WithoutSpec = new KafkaMirrorMaker2Builder(KMM2) .withSpec(null) .build(); InvalidResourceException ex = assertThrows(InvalidResourceException.class, () -> KafkaMirrorMaker2Connectors.validateConnectors(kmm2WithoutSpec)); assertThat(ex.getMessage(), is(".spec section is required for KafkaMirrorMaker2 resource")); // Missing clusters KafkaMirrorMaker2 kmm2WithoutClusters = new KafkaMirrorMaker2Builder(KMM2) .withNewSpec() .withMirrors(List.of()) .endSpec() .build(); ex = assertThrows(InvalidResourceException.class, () -> KafkaMirrorMaker2Connectors.validateConnectors(kmm2WithoutClusters)); assertThat(ex.getMessage(), is(".spec.clusters and .spec.mirrors sections are required in KafkaMirrorMaker2 resource")); // Missing mirrors KafkaMirrorMaker2 kmm2WithoutMirrors = new KafkaMirrorMaker2Builder(KMM2) .withNewSpec() .withClusters(List.of()) .endSpec() .build(); ex = assertThrows(InvalidResourceException.class, () -> KafkaMirrorMaker2Connectors.validateConnectors(kmm2WithoutMirrors)); assertThat(ex.getMessage(), is(".spec.clusters and .spec.mirrors sections are required in KafkaMirrorMaker2 resource")); // Missing alias KafkaMirrorMaker2 kmm2WrongAlias = new KafkaMirrorMaker2Builder(KMM2) .editSpec() .editMirror(0) .withSourceCluster(null) .withTargetCluster("wrong-target") .endMirror() .endSpec() .build(); ex = assertThrows(InvalidResourceException.class, () -> KafkaMirrorMaker2Connectors.validateConnectors(kmm2WrongAlias)); assertThat(ex.getMessage(), is("KafkaMirrorMaker2 resource validation failed: " + "[Each MirrorMaker 2 mirror definition has to specify the source cluster alias, " + "Target cluster alias wrong-target is used in a mirror definition, but cluster with this alias does not exist in cluster definitions, " + "Connect cluster alias (currently set to target) has to be the same as the target cluster alias wrong-target]")); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchIPDscpTest() { Criterion criterion = Criteria.matchIPDscp((byte) 63); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
@Override public void validatePostList(Collection<Long> ids) { if (CollUtil.isEmpty(ids)) { return; } // 获得岗位信息 List<PostDO> posts = postMapper.selectBatchIds(ids); Map<Long, PostDO> postMap = convertMap(posts, PostDO::getId); // 校验 ids.forEach(id -> { PostDO post = postMap.get(id); if (post == null) { throw exception(POST_NOT_FOUND); } if (!CommonStatusEnum.ENABLE.getStatus().equals(post.getStatus())) { throw exception(POST_NOT_ENABLE, post.getName()); } }); }
@Test public void testValidatePostList_success() { // mock 数据 PostDO postDO = randomPostDO().setStatus(CommonStatusEnum.ENABLE.getStatus()); postMapper.insert(postDO); // 准备参数 List<Long> ids = singletonList(postDO.getId()); // 调用,无需断言 postService.validatePostList(ids); }
@Override public Throwable getException() { return exception; }
@Test void testAppResponseWithNormalException() { NullPointerException npe = new NullPointerException(); AppResponse appResponse = new AppResponse(npe); StackTraceElement[] stackTrace = appResponse.getException().getStackTrace(); Assertions.assertNotNull(stackTrace); Assertions.assertTrue(stackTrace.length > 1); }
@Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { String key = invoker.getInterface().getName() + "." + RpcUtils.getMethodName(invocation); if (!LOGGED.contains(key)) { LOGGED.add(key); if (invoker.getUrl().getMethodParameter(RpcUtils.getMethodName(invocation), DEPRECATED_KEY, false)) { LOGGER.error( COMMON_UNSUPPORTED_INVOKER, "", "", "The service method " + invoker.getInterface().getName() + "." + getMethodSignature(invocation) + " is DEPRECATED! Declare from " + invoker.getUrl()); } } return invoker.invoke(invocation); }
@Test void testDeprecatedFilter() { URL url = URL.valueOf("test://test:11/test?group=dubbo&version=1.1&echo." + DEPRECATED_KEY + "=true"); LogUtil.start(); deprecatedFilter.invoke(new MyInvoker<DemoService>(url), new MockInvocation()); assertEquals( 1, LogUtil.findMessage( "The service method org.apache.dubbo.rpc.support.DemoService.echo(String) is DEPRECATED")); LogUtil.stop(); }
@Override public String lock(final Path file) throws BackgroundException { try { return session.getClient().lock(new DAVPathEncoder().encode(file)); } catch(SardineException e) { throw new DAVExceptionMappingService().map("Failure to write attributes of {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e, file); } }
@Test public void testLockNotSupported() throws Exception { final TransferStatus status = new TransferStatus(); final Local local = new Local(System.getProperty("java.io.tmpdir"), new AlphanumericRandomStringService().random()); final byte[] content = "test".getBytes(StandardCharsets.UTF_8); final OutputStream out = local.getOutputStream(false); IOUtils.write(content, out); out.close(); status.setLength(content.length); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final HttpUploadFeature upload = new DAVUploadFeature(session); upload.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), status, new DisabledConnectionCallback()); String lock = null; try { lock = new DAVLockFeature(session).lock(test); } catch(InteroperabilityException e) { // Expected } local.delete(); new DAVDeleteFeature(session).delete(Collections.singletonMap(test, new TransferStatus().withLockId(lock)), new DisabledPasswordCallback(), new Delete.DisabledCallback()); }
public static boolean isMobile(CharSequence value) { return isMatchRegex(MOBILE, value); }
@Test public void isMobileTest() { final boolean m1 = Validator.isMobile("13900221432"); assertTrue(m1); final boolean m2 = Validator.isMobile("015100221432"); assertTrue(m2); final boolean m3 = Validator.isMobile("+8618600221432"); assertTrue(m3); final boolean m4 = Validator.isMobile("19312341234"); assertTrue(m4); }
public static KiePMMLExtension getKiePMMLExtension(Extension extension) { return new KiePMMLExtension(extension.getExtender(), extension.getName(), extension.getValue(), extension.getContent()); }
@Test void getKiePMMLExtension() { // TODO {@gcardosi} }
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception { CruiseConfig configForEdit; CruiseConfig config; LOGGER.debug("[Config Save] Loading config holder"); configForEdit = deserializeConfig(content); if (callback != null) callback.call(configForEdit); config = preprocessAndValidate(configForEdit); return new GoConfigHolder(config, configForEdit); }
@Test void shouldNotAllowParamsToBeUsedInNames() { String content = (""" <cruise schemaVersion='%d'> <server /> <pipelines> <pipeline name='dev'> <params> <param name='command'>ls</param> </params> <materials> <svn url ="svnurl"/> </materials> <stage name='stage#{command}ab'> <jobs> <job name='job1'> <tasks> <exec command='/bin/#{command}##{b}' args='#{dir}'/> </tasks> </job> </jobs> </stage> </pipeline> </pipelines> </cruise>""").formatted(CONFIG_SCHEMA_VERSION); assertThatThrownBy(() -> xmlLoader.loadConfigHolder(content)) .as("Should not allow params in stage name") .hasMessageContaining("\"stage#{command}ab\" should conform to the pattern - [a-zA-Z0-9_\\-]{1}[a-zA-Z0-9_\\-.]*"); }
@Override public int run(String[] args) throws Exception { try { webServiceClient = WebServiceClient.getWebServiceClient().createClient(); return runCommand(args); } finally { if (yarnClient != null) { yarnClient.close(); } if (webServiceClient != null) { webServiceClient.destroy(); } } }
@Test(timeout = 5000L) public void testUnknownApplicationAttemptId() throws Exception { YarnClient mockYarnClient = createMockYarnClientUnknownApp(); LogsCLI cli = new LogsCLIForTest(mockYarnClient); cli.setConf(conf); ApplicationId appId = ApplicationId.newInstance(0, 1); int exitCode = cli.run(new String[] {"-applicationAttemptId", ApplicationAttemptId.newInstance(appId, 1).toString() }); // Error since no logs present for the app. assertTrue(exitCode != 0); assertTrue(sysErrStream.toString().contains( "Unable to get ApplicationState.")); }
public DeleteGranularity deleteGranularity() { String valueAsString = confParser .stringConf() .option(SparkWriteOptions.DELETE_GRANULARITY) .tableProperty(TableProperties.DELETE_GRANULARITY) .defaultValue(TableProperties.DELETE_GRANULARITY_DEFAULT) .parse(); return DeleteGranularity.fromString(valueAsString); }
@Test public void testDeleteGranularityTableProperty() { Table table = validationCatalog.loadTable(tableIdent); table .updateProperties() .set(TableProperties.DELETE_GRANULARITY, DeleteGranularity.FILE.toString()) .commit(); SparkWriteConf writeConf = new SparkWriteConf(spark, table, ImmutableMap.of()); DeleteGranularity value = writeConf.deleteGranularity(); assertThat(value).isEqualTo(DeleteGranularity.FILE); }
public static InRawPredicateEvaluator newRawValueBasedEvaluator(InPredicate inPredicate, DataType dataType) { switch (dataType) { case INT: { int[] intValues = inPredicate.getIntValues(); IntSet matchingValues = new IntOpenHashSet(HashUtil.getMinHashSetSize(intValues.length)); for (int value : intValues) { matchingValues.add(value); } return new IntRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case LONG: { long[] longValues = inPredicate.getLongValues(); LongSet matchingValues = new LongOpenHashSet(HashUtil.getMinHashSetSize(longValues.length)); for (long value : longValues) { matchingValues.add(value); } return new LongRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case FLOAT: { float[] floatValues = inPredicate.getFloatValues(); FloatSet matchingValues = new FloatOpenHashSet(HashUtil.getMinHashSetSize(floatValues.length)); for (float value : floatValues) { matchingValues.add(value); } return new FloatRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case DOUBLE: { double[] doubleValues = inPredicate.getDoubleValues(); DoubleSet matchingValues = new DoubleOpenHashSet(HashUtil.getMinHashSetSize(doubleValues.length)); for (double value : doubleValues) { matchingValues.add(value); } return new DoubleRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case BIG_DECIMAL: { BigDecimal[] bigDecimalValues = inPredicate.getBigDecimalValues(); // NOTE: Use TreeSet because BigDecimal's compareTo() is not consistent with equals() // E.g. compareTo(3.0, 3) returns 0 but equals(3.0, 3) returns false TreeSet<BigDecimal> matchingValues = new TreeSet<>(Arrays.asList(bigDecimalValues)); return new BigDecimalRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case BOOLEAN: { int[] booleanValues = inPredicate.getBooleanValues(); IntSet matchingValues = new IntOpenHashSet(HashUtil.getMinHashSetSize(booleanValues.length)); for (int value : booleanValues) { matchingValues.add(value); } return new IntRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case TIMESTAMP: { long[] timestampValues = inPredicate.getTimestampValues(); LongSet matchingValues = new LongOpenHashSet(HashUtil.getMinHashSetSize(timestampValues.length)); for (long value : timestampValues) { matchingValues.add(value); } return new LongRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case STRING: case JSON: { List<String> stringValues = inPredicate.getValues(); Set<String> matchingValues = new ObjectOpenHashSet<>(HashUtil.getMinHashSetSize(stringValues.size())); // NOTE: Add value-by-value to avoid overhead for (String value : stringValues) { //noinspection UseBulkOperation matchingValues.add(value); } return new StringRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } case BYTES: { ByteArray[] bytesValues = inPredicate.getBytesValues(); Set<ByteArray> matchingValues = new ObjectOpenHashSet<>(HashUtil.getMinHashSetSize(bytesValues.length)); // NOTE: Add value-by-value to avoid overhead //noinspection ManualArrayToCollectionCopy for (ByteArray value : bytesValues) { //noinspection UseBulkOperation matchingValues.add(value); } return new BytesRawValueBasedInPredicateEvaluator(inPredicate, matchingValues); } default: throw new IllegalStateException("Unsupported data type: " + dataType); } }
@Test void canBeVisited() { // Given a visitor MultiValueVisitor<Integer> valueLengthVisitor = Mockito.spy(createValueLengthVisitor()); // When int predicate is used InPredicate predicate = new InPredicate(ExpressionContext.forIdentifier("ident"), Lists.newArrayList("1", "2")); InPredicateEvaluatorFactory.InRawPredicateEvaluator intEvaluator = InPredicateEvaluatorFactory.newRawValueBasedEvaluator(predicate, FieldSpec.DataType.INT); // Only the int[] method is called int length = intEvaluator.accept(valueLengthVisitor); Assert.assertEquals(length, 2); Mockito.verify(valueLengthVisitor).visitInt(new int[] {2, 1}); Mockito.verifyNoMoreInteractions(valueLengthVisitor); // And given a string predicate InPredicateEvaluatorFactory.InRawPredicateEvaluator strEvaluator = InPredicateEvaluatorFactory.newRawValueBasedEvaluator(predicate, FieldSpec.DataType.STRING); // Only the string[] method is called length = strEvaluator.accept(valueLengthVisitor); Assert.assertEquals(length, 2); Mockito.verify(valueLengthVisitor).visitString(new String[] {"2", "1"}); Mockito.verifyNoMoreInteractions(valueLengthVisitor); }
public int doWork(final long nowMs) { return delegateResolver.doWork(nowMs); }
@Test void doWorkShouldCallActualMethod() { final NameResolver delegateResolver = mock(NameResolver.class); final NanoClock clock = mock(NanoClock.class); final DutyCycleTracker maxTime = mock(DutyCycleTracker.class); final TimeTrackingNameResolver resolver = new TimeTrackingNameResolver(delegateResolver, clock, maxTime); final long nowMs = 1111; resolver.doWork(nowMs); verify(delegateResolver).doWork(nowMs); verifyNoMoreInteractions(delegateResolver); verifyNoInteractions(clock, maxTime); }
@Override protected ServerSocketFactory getServerSocketFactory() throws Exception { if (socketFactory == null) { SSLContext sslContext = getSsl().createContext(this); SSLParametersConfiguration parameters = getSsl().getParameters(); parameters.setContext(getContext()); socketFactory = new ConfigurableSSLServerSocketFactory( parameters, sslContext.getServerSocketFactory()); } return socketFactory; }
@Test public void testGetServerSocketFactory() throws Exception { ServerSocketFactory socketFactory = receiver.getServerSocketFactory(); assertNotNull(socketFactory); assertTrue(ssl.isContextCreated()); assertTrue(parameters.isContextInjected()); }
@Override public QuoteCharacter getQuoteCharacter() { return QuoteCharacter.QUOTE; }
@Test void assertGetQuoteCharacter() { assertThat(dialectDatabaseMetaData.getQuoteCharacter(), is(QuoteCharacter.QUOTE)); }
public static void getSemanticPropsSingleFromString( SingleInputSemanticProperties result, String[] forwarded, String[] nonForwarded, String[] readSet, TypeInformation<?> inType, TypeInformation<?> outType) { getSemanticPropsSingleFromString( result, forwarded, nonForwarded, readSet, inType, outType, false); }
@Test void testNonForwardedInvalidTypes5() { String[] nonForwardedFields = {"int1"}; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); assertThatThrownBy( () -> SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, nonForwardedFields, null, pojoType, pojo2Type)) .isInstanceOf(InvalidSemanticAnnotationException.class); }
public static Object applyLogicalType(Schema.Field field, Object value) { if (field == null || field.schema() == null) { return value; } Schema fieldSchema = resolveUnionSchema(field.schema()); return applySchemaTypeLogic(fieldSchema, value); }
@Test public void testApplyLogicalTypeReturnsSameValueWhenFieldIsNull() { String value = "d7738003-1472-4f63-b0f1-b5e69c8b93e9"; Object result = AvroSchemaUtil.applyLogicalType(null, value); Assert.assertTrue(result instanceof String); Assert.assertSame(value, result); }
@Override public void validate(Context context) { if (!context.deployState().isHosted() || context.deployState().zone().system().isPublic()) return; if (context.deployState().getProperties().allowDisableMtls()) return; context.model().getContainerClusters().forEach((id, cluster) -> { Http http = cluster.getHttp(); if (http != null) { if (http.getAccessControl().isPresent()) { verifyNoExclusions(id, http.getAccessControl().get(), context); } } }); }
@Test public void validator_warns_excludes_in_cloud() throws IOException, SAXException { StringBuffer logOutput = new StringBuffer(); DeployState deployState = createDeployState(zone(CloudName.YAHOO, SystemName.main), logOutput, false); VespaModel model = new VespaModel( MapConfigModelRegistry.createFromList(new ModelBuilderAddingAccessControlFilter()), deployState); ValidationTester.validate(new AccessControlFilterExcludeValidator(), model, deployState); assertTrue(logOutput.toString().contains("Application cluster container-cluster-with-access-control excludes paths from access control, this is not allowed and should be removed.")); }
@Override public Optional<String> resolveQueryFailure(QueryStats controlQueryStats, QueryException queryException, Optional<QueryObjectBundle> test) { return mapMatchingPrestoException(queryException, CONTROL_CHECKSUM, ImmutableSet.of(COMPILER_ERROR, GENERATED_BYTECODE_TOO_LARGE), e -> Optional.of("Checksum query too large")); }
@Test public void testResolveGeneratedBytecodeTooLarge() { assertEquals( getFailureResolver().resolveQueryFailure( CONTROL_QUERY_STATS, new PrestoQueryException( new RuntimeException(), false, CONTROL_CHECKSUM, Optional.of(GENERATED_BYTECODE_TOO_LARGE), EMPTY_STATS), Optional.empty()), Optional.of("Checksum query too large")); }
@Override public Object getKey() { return serializationService.toObject(key); }
@Test public void getKey_caching() { QueryableEntry entry = createEntry("key", "value"); assertThat(entry.getKey()).isNotSameAs(entry.getKey()); }
@Override public MapTileArea computeFromSource(final MapTileArea pSource, final MapTileArea pReuse) { final MapTileArea out = pReuse != null ? pReuse : new MapTileArea(); if (pSource.size() == 0) { out.reset(); return out; } final int sourceZoom = pSource.getZoom(); int destZoom = sourceZoom + mZoomDelta; if (destZoom < 0 || destZoom > MapTileIndex.mMaxZoomLevel) { out.reset(); return out; } if (mZoomDelta <= 0) { out.set(destZoom, pSource.getLeft() >> -mZoomDelta, pSource.getTop() >> -mZoomDelta, pSource.getRight() >> -mZoomDelta, pSource.getBottom() >> -mZoomDelta); return out; } out.set(destZoom, pSource.getLeft() << mZoomDelta, pSource.getTop() << mZoomDelta, ((1 + pSource.getRight()) << mZoomDelta) - 1, ((1 + pSource.getBottom()) << mZoomDelta) - 1 ); return out; }
@Test public void testWorld() { final MapTileArea src = new MapTileArea(); final MapTileArea dst = new MapTileArea(); long size; int mapTileUpperBound; for (int zoom = 0; zoom <= TileSystem.getMaximumZoomLevel(); zoom++) { mapTileUpperBound = getMapTileUpperBound(zoom); size = ((long) mapTileUpperBound) * mapTileUpperBound; if (size >= Integer.MAX_VALUE) { return; } for (int i = 0; i <= 1; i++) { final Rect rect = new Rect(); switch (i) { case 0: // the world rect.left = 0; // remember: don't use the rect.set() syntax for unit test (cf. ProjectionTest) rect.top = 0; rect.right = mapTileUpperBound - 1; rect.bottom = mapTileUpperBound - 1; break; case 1: // top-left quarter of the world if (zoom == 0) { // top-left quarter makes no sense in zoom 0 continue; } rect.left = 0; rect.top = 0; rect.right = mapTileUpperBound / 2 - 1; rect.bottom = mapTileUpperBound / 2 - 1; break; } src.set(zoom, rect); final long srcSize = src.size(); for (int zoomDelta = 0; zoomDelta <= TileSystem.getMaximumZoomLevel(); zoomDelta++) { final int newZoom = zoom + zoomDelta; if (newZoom < 0 || newZoom > TileSystem.getMaximumZoomLevel()) { continue; } mapTileUpperBound = getMapTileUpperBound(newZoom); size = ((long) mapTileUpperBound) * mapTileUpperBound; if (size >= Integer.MAX_VALUE) { return; } final MapTileAreaZoomComputer computer = new MapTileAreaZoomComputer(zoomDelta); computer.computeFromSource(src, dst); final long dstSize = dst.size(); final String message = "zoom=" + zoom + ", delta=" + zoomDelta; if (zoomDelta == 0) { Assert.assertEquals(message, srcSize, dstSize); } else if (zoomDelta < 0) { Assert.assertEquals(message, srcSize * (1 >> -zoomDelta) * (1 >> -zoomDelta), dstSize); } else { Assert.assertEquals(message, srcSize * (1 << zoomDelta) * (1 << zoomDelta), dstSize); } } } } }
@VisibleForTesting public Account updateLastSeen(Account account, Device device) { // compute a non-negative integer between 0 and 86400. long n = Util.ensureNonNegativeLong(account.getUuid().getLeastSignificantBits()); final long lastSeenOffsetSeconds = n % ChronoUnit.DAYS.getDuration().toSeconds(); // produce a truncated timestamp which is either today at UTC midnight // or yesterday at UTC midnight, based on per-user randomized offset used. final long todayInMillisWithOffset = Util.todayInMillisGivenOffsetFromNow(clock, Duration.ofSeconds(lastSeenOffsetSeconds).negated()); // only update the device's last seen time when it falls behind the truncated timestamp. // this ensures a few things: // (1) each account will only update last-seen at most once per day // (2) these updates will occur throughout the day rather than all occurring at UTC midnight. if (device.getLastSeen() < todayInMillisWithOffset) { Metrics.summary(DAYS_SINCE_LAST_SEEN_DISTRIBUTION_NAME, IS_PRIMARY_DEVICE_TAG, String.valueOf(device.isPrimary())) .record(Duration.ofMillis(todayInMillisWithOffset - device.getLastSeen()).toDays()); return accountsManager.updateDeviceLastSeen(account, device, Util.todayInMillis(clock)); } return account; }
@Test void testNeverWriteYesterday() { clock.pin(Instant.ofEpochMilli(today)); final Device device = oldAccount.getDevices().stream().findFirst().get(); accountAuthenticator.updateLastSeen(oldAccount, device); verify(accountsManager).updateDeviceLastSeen(eq(oldAccount), eq(device), anyLong()); assertThat(device.getLastSeen()).isEqualTo(today); }
@Override public void start() throws Exception { LOG.debug("Start leadership runner for job {}.", getJobID()); leaderElection.startLeaderElection(this); }
@Test void testJobInformationOperationsDuringInitialization() throws Exception { final JobManagerRunner jobManagerRunner = newJobMasterServiceLeadershipRunnerBuilder() .withSingleJobMasterServiceProcess( TestingJobMasterServiceProcess.newBuilder() .setIsInitializedAndRunningSupplier(() -> false) .build()) .build(); jobManagerRunner.start(); // assert initializing while waiting for leadership assertInitializingStates(jobManagerRunner); // assign leadership leaderElection.isLeader(UUID.randomUUID()); // assert initializing while not yet initialized assertInitializingStates(jobManagerRunner); }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_serializable_object() { Object original = new SerializableObject("value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
public static String getDefaultServerPort() { return serverPort; }
@Test void testGetDefaultServerPort() { String actual = ParamUtil.getDefaultServerPort(); assertEquals("8848", actual); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); gauges.put("total.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit() + mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("total.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed() + mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("total.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax() == -1 ? -1 : mxBean.getHeapMemoryUsage().getMax() + mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("total.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted() + mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("heap.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit()); gauges.put("heap.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed()); gauges.put("heap.max", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getMax()); gauges.put("heap.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted()); gauges.put("heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax()); } }); gauges.put("non-heap.init", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("non-heap.used", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("non-heap.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("non-heap.committed", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("non-heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getNonHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); for (final MemoryPoolMXBean pool : memoryPools) { final String poolName = name("pools", WHITESPACE.matcher(pool.getName()).replaceAll("-")); gauges.put(name(poolName, "usage"), new RatioGauge() { @Override protected Ratio getRatio() { MemoryUsage usage = pool.getUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); gauges.put(name(poolName, "max"), (Gauge<Long>) () -> pool.getUsage().getMax()); gauges.put(name(poolName, "used"), (Gauge<Long>) () -> pool.getUsage().getUsed()); gauges.put(name(poolName, "committed"), (Gauge<Long>) () -> pool.getUsage().getCommitted()); // Only register GC usage metrics if the memory pool supports usage statistics. if (pool.getCollectionUsage() != null) { gauges.put(name(poolName, "used-after-gc"), (Gauge<Long>) () -> pool.getCollectionUsage().getUsed()); } gauges.put(name(poolName, "init"), (Gauge<Long>) () -> pool.getUsage().getInit()); } return Collections.unmodifiableMap(gauges); }
@Test public void hasAGaugeForWeirdMemoryPoolCommitted() { final Gauge gauge = (Gauge) gauges.getMetrics().get("pools.Weird-Pool.committed"); assertThat(gauge.getValue()) .isEqualTo(100L); }
@Subscribe public void inputCreated(InputCreated inputCreatedEvent) { final String inputId = inputCreatedEvent.id(); LOG.debug("Input created: {}", inputId); final Input input; try { input = inputService.find(inputId); } catch (NotFoundException e) { LOG.warn("Received InputCreated event but could not find input {}", inputId, e); return; } final IOState<MessageInput> inputState = inputRegistry.getInputState(inputId); if (inputState != null) { inputRegistry.remove(inputState); } if (input.isGlobal() || this.nodeId.getNodeId().equals(input.getNodeId())) { startInput(input); } }
@Test public void inputCreatedStartsGlobalInputOnOtherNode() throws Exception { final String inputId = "input-id"; final Input input = mock(Input.class); when(inputService.find(inputId)).thenReturn(input); when(input.getNodeId()).thenReturn(OTHER_NODE_ID); when(input.isGlobal()).thenReturn(true); final MessageInput messageInput = mock(MessageInput.class); when(inputService.getMessageInput(input)).thenReturn(messageInput); listener.inputCreated(InputCreated.create(inputId)); verify(inputLauncher, times(1)).launch(messageInput); }
public static Builder in(Table table) { return new Builder(table); }
@TestTemplate public void testBasicBehavior() { table.newAppend().appendFile(FILE_A).appendFile(FILE_B).commit(); Iterable<DataFile> files = FindFiles.in(table).collect(); assertThat(pathSet(files)).isEqualTo(pathSet(FILE_A, FILE_B)); }
public static DataPermission get() { return DATA_PERMISSIONS.get().peekLast(); }
@Test public void testGet() { // mock 方法 DataPermission dataPermission01 = mock(DataPermission.class); DataPermissionContextHolder.add(dataPermission01); DataPermission dataPermission02 = mock(DataPermission.class); DataPermissionContextHolder.add(dataPermission02); // 调用 DataPermission result = DataPermissionContextHolder.get(); // 断言 assertSame(result, dataPermission02); }
@SuppressWarnings("JavaUtilDate") protected LocalDate convertDateValue(Object value) { if (value instanceof Number) { int days = ((Number) value).intValue(); return DateTimeUtil.dateFromDays(days); } else if (value instanceof String) { return LocalDate.parse((String) value); } else if (value instanceof LocalDate) { return (LocalDate) value; } else if (value instanceof Date) { int days = (int) (((Date) value).getTime() / 1000 / 60 / 60 / 24); return DateTimeUtil.dateFromDays(days); } throw new RuntimeException("Cannot convert date: " + value); }
@Test public void testDateConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); LocalDate expected = LocalDate.of(2023, 11, 15); List<Object> inputList = ImmutableList.of( "2023-11-15", expected.toEpochDay(), expected, new Date(Duration.ofDays(expected.toEpochDay()).toMillis())); inputList.forEach( input -> { Temporal ts = converter.convertDateValue(input); assertThat(ts).isEqualTo(expected); }); }
public synchronized BeamFnStateClient forApiServiceDescriptor( ApiServiceDescriptor apiServiceDescriptor) throws IOException { // We specifically are synchronized so that we only create one GrpcStateClient at a time // preventing a race where multiple GrpcStateClient objects might be constructed at the same // for the same ApiServiceDescriptor. BeamFnStateClient rval; synchronized (cache) { rval = cache.get(apiServiceDescriptor); } if (rval == null) { // We can't be synchronized on cache while constructing the GrpcStateClient since if the // connection fails, onError may be invoked from the gRPC thread which will invoke // closeAndCleanUp that clears the cache. rval = new GrpcStateClient(apiServiceDescriptor); synchronized (cache) { cache.put(apiServiceDescriptor, rval); } } return rval; }
@Test public void testRequestResponses() throws Exception { BeamFnStateClient client = clientCache.forApiServiceDescriptor(apiServiceDescriptor); CompletableFuture<StateResponse> successfulResponse = client.handle(StateRequest.newBuilder().setInstructionId(SUCCESS)); CompletableFuture<StateResponse> unsuccessfulResponse = client.handle(StateRequest.newBuilder().setInstructionId(FAIL)); // Wait for the client to connect. StreamObserver<StateResponse> outboundServerObserver = outboundServerObservers.take(); // Ensure the client doesn't break when sent garbage. outboundServerObserver.onNext(StateResponse.newBuilder().setId("UNKNOWN ID").build()); // We expect to receive and handle two requests handleServerRequest(outboundServerObserver, values.take()); handleServerRequest(outboundServerObserver, values.take()); // Ensure that the successful and unsuccessful responses were propagated. assertNotNull(successfulResponse.get()); try { unsuccessfulResponse.get(); fail("Expected unsuccessful response"); } catch (ExecutionException e) { assertThat(e.toString(), containsString(TEST_ERROR)); } }
int parseAndConvert(String[] args) throws Exception { Options opts = createOptions(); int retVal = 0; try { if (args.length == 0) { LOG.info("Missing command line arguments"); printHelp(opts); return 0; } CommandLine cliParser = new GnuParser().parse(opts, args); if (cliParser.hasOption(CliOption.HELP.shortSwitch)) { printHelp(opts); return 0; } FSConfigToCSConfigConverter converter = prepareAndGetConverter(cliParser); converter.convert(converterParams); String outputDir = converterParams.getOutputDirectory(); boolean skipVerification = cliParser.hasOption(CliOption.SKIP_VERIFICATION.shortSwitch); if (outputDir != null && !skipVerification) { validator.validateConvertedConfig( converterParams.getOutputDirectory()); } } catch (ParseException e) { String msg = "Options parsing failed: " + e.getMessage(); logAndStdErr(e, msg); printHelp(opts); retVal = -1; } catch (PreconditionException e) { String msg = "Cannot start FS config conversion due to the following" + " precondition error: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (UnsupportedPropertyException e) { String msg = "Unsupported property/setting encountered during FS config " + "conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (ConversionException | IllegalArgumentException e) { String msg = "Fatal error during FS config conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (VerificationException e) { Throwable cause = e.getCause(); String msg = "Verification failed: " + e.getCause().getMessage(); conversionOptions.handleVerificationFailure(cause, msg); retVal = -1; } conversionOptions.handleParsingFinished(); return retVal; }
@Test public void testMissingRulesConfiguration() throws Exception { setupFSConfigConversionFiles(true); FSConfigToCSConfigArgumentHandler argumentHandler = createArgumentHandler(); argumentHandler.parseAndConvert(getDefaultArgumentsAsArray()); }
public static double longitudeToPixelX(double longitude, byte zoomLevel, int tileSize) { long mapSize = getMapSize(zoomLevel, tileSize); return (longitude + 180) / 360 * mapSize; }
@Test public void longitudeToPixelXTest() { for (int tileSize : TILE_SIZES) { for (byte zoomLevel = ZOOM_LEVEL_MIN; zoomLevel <= ZOOM_LEVEL_MAX; ++zoomLevel) { long mapSize = MercatorProjection.getMapSize(zoomLevel, tileSize); double pixelX = MercatorProjection.longitudeToPixelX(LatLongUtils.LONGITUDE_MIN, mapSize); Assert.assertEquals(0, pixelX, 0); pixelX = MercatorProjection.longitudeToPixelXWithScaleFactor(LatLongUtils.LONGITUDE_MIN, MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); Assert.assertEquals(0, pixelX, 0); pixelX = MercatorProjection.longitudeToPixelX(0, mapSize); Assert.assertEquals((float) mapSize / 2, pixelX, 0); mapSize = MercatorProjection.getMapSizeWithScaleFactor(MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); pixelX = MercatorProjection.longitudeToPixelXWithScaleFactor(0, MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); Assert.assertEquals((float) mapSize / 2, pixelX, 0); pixelX = MercatorProjection.longitudeToPixelX(LatLongUtils.LONGITUDE_MAX, mapSize); Assert.assertEquals(mapSize, pixelX, 0); pixelX = MercatorProjection.longitudeToPixelXWithScaleFactor(LatLongUtils.LONGITUDE_MAX, MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); Assert.assertEquals(mapSize, pixelX, 0); } } }
public Map<String, Parameter> generateMergedWorkflowParams( WorkflowInstance instance, RunRequest request) { Workflow workflow = instance.getRuntimeWorkflow(); Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); Map<String, ParamDefinition> defaultWorkflowParams = defaultParamManager.getDefaultWorkflowParams(); // merge workflow params for start if (request.isFreshRun()) { // merge default workflow params ParamsMergeHelper.mergeParams( allParamDefs, defaultWorkflowParams, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM_DEFAULT, request)); // merge defined workflow params if (workflow.getParams() != null) { ParamsMergeHelper.mergeParams( allParamDefs, workflow.getParams(), ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.DEFINITION, request)); } } // merge workflow params from previous instance for restart if (!request.isFreshRun() && instance.getParams() != null) { Map<String, ParamDefinition> previousParamDefs = instance.getParams().entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toDefinition())); // remove reserved params, which should be injected again by the system. for (String paramName : Constants.RESERVED_PARAM_NAMES) { previousParamDefs.remove(paramName); } ParamsMergeHelper.mergeParams( allParamDefs, previousParamDefs, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM, false)); } // merge run params if (request.getRunParams() != null) { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, request.getRunParams(), ParamsMergeHelper.MergeContext.workflowCreate(source, request)); } // merge user provided restart run params getUserRestartParam(request) .ifPresent( userRestartParams -> { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, userRestartParams, ParamsMergeHelper.MergeContext.workflowCreate(source, request)); }); // cleanup any placeholder params and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testWorkflowParamRunParamsUpstreamInitiatorRestartMerge() { Map<String, ParamDefinition> restartParams = singletonMap( "TARGET_RUN_DATE", ParamDefinition.buildParamDefinition("TARGET_RUN_DATE", 1001).toBuilder() .mode(ParamMode.MUTABLE) .meta(singletonMap(Constants.METADATA_SOURCE_KEY, "RESTART")) .build()); Map<String, Parameter> instanceParams = new LinkedHashMap<>(); instanceParams.put("RUN_TS", buildParam("RUN_TS", 123L)); instanceParams.put( "TARGET_RUN_DATE", LongParameter.builder() .name("TARGET_RUN_DATE") .value(1000L) .evaluatedResult(1000L) .evaluatedTime(123L) .mode(ParamMode.MUTABLE_ON_START) .build()); workflowInstance.setParams(instanceParams); Initiator.Type[] initiators = new Initiator.Type[] { Initiator.Type.FOREACH, Initiator.Type.SUBWORKFLOW, Initiator.Type.TEMPLATE }; for (Initiator.Type initiator : initiators) { RunRequest request = RunRequest.builder() .initiator(UpstreamInitiator.withType(initiator)) .currentPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .restartConfig( RestartConfig.builder() .restartPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .restartPath( Collections.singletonList( new RestartConfig.RestartNode( workflowSummary.getWorkflowId(), workflowSummary.getWorkflowInstanceId(), "step1"))) .restartParams(restartParams) .build()) .build(); Map<String, Parameter> params = paramsManager.generateMergedWorkflowParams(workflowInstance, request); Assert.assertEquals( Long.valueOf(1001L), params.get("TARGET_RUN_DATE").asLongParam().getValue()); } }
@Nullable public static <T> T getWithoutException(CompletableFuture<T> future) { if (isCompletedNormally(future)) { try { return future.get(); } catch (InterruptedException | ExecutionException ignored) { } } return null; }
@Test void testGetWithoutExceptionWithoutFinishing() { final CompletableFuture<Integer> completableFuture = new CompletableFuture<>(); assertThat(FutureUtils.getWithoutException(completableFuture)).isNull(); }
public static Time parseTime(final String str) { try { return new Time(LocalTime.parse(str).toNanoOfDay() / 1000000); } catch (DateTimeParseException e) { throw new KsqlException("Failed to parse time '" + str + "': " + e.getMessage() + TIME_HELP_MESSAGE, e ); } }
@Test public void shouldNotParseTime() { // When: final KsqlException e = assertThrows( KsqlException.class, () -> SqlTimeTypes.parseTime("foo") ); // Then assertThat(e.getMessage(), containsString( "Required format is: \"HH:mm:ss.SSS\"")); }
public static final String getTrimTypeCode( int i ) { if ( i < 0 || i >= trimTypeCode.length ) { return trimTypeCode[0]; } return trimTypeCode[i]; }
@Test public void testGetTrimTypeCode() { assertEquals( ValueMetaBase.getTrimTypeCode( ValueMetaInterface.TRIM_TYPE_NONE ), "none" ); assertEquals( ValueMetaBase.getTrimTypeCode( ValueMetaInterface.TRIM_TYPE_LEFT ), "left" ); assertEquals( ValueMetaBase.getTrimTypeCode( ValueMetaInterface.TRIM_TYPE_RIGHT ), "right" ); assertEquals( ValueMetaBase.getTrimTypeCode( ValueMetaInterface.TRIM_TYPE_BOTH ), "both" ); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(XUGU_BOOLEAN); builder.dataType(XUGU_BOOLEAN); break; case TINYINT: builder.columnType(XUGU_TINYINT); builder.dataType(XUGU_TINYINT); break; case SMALLINT: builder.columnType(XUGU_SMALLINT); builder.dataType(XUGU_SMALLINT); break; case INT: builder.columnType(XUGU_INTEGER); builder.dataType(XUGU_INTEGER); break; case BIGINT: builder.columnType(XUGU_BIGINT); builder.dataType(XUGU_BIGINT); break; case FLOAT: builder.columnType(XUGU_FLOAT); builder.dataType(XUGU_FLOAT); break; case DOUBLE: builder.columnType(XUGU_DOUBLE); builder.dataType(XUGU_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", XUGU_NUMERIC, precision, scale)); builder.dataType(XUGU_NUMERIC); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(XUGU_BLOB); builder.dataType(XUGU_BLOB); } else if (column.getColumnLength() <= MAX_BINARY_LENGTH) { builder.columnType(XUGU_BINARY); builder.dataType(XUGU_BINARY); } else { builder.columnType(XUGU_BLOB); builder.dataType(XUGU_BLOB); } break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(String.format("%s(%s)", XUGU_VARCHAR, MAX_VARCHAR_LENGTH)); builder.dataType(XUGU_VARCHAR); } else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", XUGU_VARCHAR, column.getColumnLength())); builder.dataType(XUGU_VARCHAR); } else { builder.columnType(XUGU_CLOB); builder.dataType(XUGU_CLOB); } break; case DATE: builder.columnType(XUGU_DATE); builder.dataType(XUGU_DATE); break; case TIME: builder.dataType(XUGU_TIME); if (column.getScale() != null && column.getScale() > 0) { Integer timeScale = column.getScale(); if (timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_SCALE, timeScale); } builder.columnType(String.format("%s(%s)", XUGU_TIME, timeScale)); builder.scale(timeScale); } else { builder.columnType(XUGU_TIME); } break; case TIMESTAMP: if (column.getScale() == null || column.getScale() <= 0) { builder.columnType(XUGU_TIMESTAMP); } else { int timestampScale = column.getScale(); if (column.getScale() > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType(String.format("TIMESTAMP(%s)", timestampScale)); builder.scale(timestampScale); } builder.dataType(XUGU_TIMESTAMP); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.XUGU, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertBoolean() { Column column = PhysicalColumn.builder() .name("test") .dataType(BasicType.BOOLEAN_TYPE) .nullable(true) .defaultValue(true) .comment("test") .build(); BasicTypeDefine typeDefine = XuguTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(XuguTypeConverter.XUGU_BOOLEAN, typeDefine.getColumnType()); Assertions.assertEquals(XuguTypeConverter.XUGU_BOOLEAN, typeDefine.getDataType()); Assertions.assertEquals(column.isNullable(), typeDefine.isNullable()); Assertions.assertEquals(column.getDefaultValue(), typeDefine.getDefaultValue()); Assertions.assertEquals(column.getComment(), typeDefine.getComment()); }
@Override public PackageRevision responseMessageForLatestRevisionSince(String responseBody) { return toPackageRevision(responseBody); }
@Test public void shouldBuildPackageRevisionFromLatestRevisionSinceResponse() throws Exception { String responseBody = "{\"revision\":\"abc.rpm\",\"timestamp\":\"2011-07-14T19:43:37.100Z\",\"user\":\"some-user\",\"revisionComment\":\"comment\"," + "\"trackbackUrl\":\"http:\\\\localhost:9999\",\"data\":{\"dataKeyOne\":\"data-value-one\",\"dataKeyTwo\":\"data-value-two\"}}"; PackageRevision packageRevision = messageHandler.responseMessageForLatestRevisionSince(responseBody); assertPackageRevision(packageRevision, "abc.rpm", "some-user", "2011-07-14T19:43:37.100Z", "comment", "http:\\localhost:9999"); }
public static String buildWhereConditionByPKs(List<String> pkNameList, int rowSize, String dbType) throws SQLException { return buildWhereConditionByPKs(pkNameList, rowSize, dbType, MAX_IN_SIZE); }
@Test void testBuildWhereConditionByPKs() throws SQLException { List<String> pkNameList=new ArrayList<>(); pkNameList.add("id"); pkNameList.add("name"); String result = SqlGenerateUtils.buildWhereConditionByPKs(pkNameList,4,"mysql",2); Assertions.assertEquals("(id,name) in ( (?,?),(?,?) ) or (id,name) in ( (?,?),(?,?) )", result); result = SqlGenerateUtils.buildWhereConditionByPKs(pkNameList,5,"mysql",2); Assertions.assertEquals("(id,name) in ( (?,?),(?,?) ) or (id,name) in ( (?,?),(?,?) ) or (id,name) in ( (?,?)" + " )", result); }
public static String substVars(String val, PropertyContainer pc1) { return substVars(val, pc1, null); }
@Test public void testSubstVarsVariableNotClosed() { String noSubst = "testing if ${v1 works"; try { @SuppressWarnings("unused") String result = OptionHelper.substVars(noSubst, context); fail(); } catch (IllegalArgumentException e) { //ok } }
public static QueryDataType toHazelcastType(RelDataType relDataType) { if (relDataType.getSqlTypeName() != OTHER) { return toHazelcastTypeFromSqlTypeName(relDataType.getSqlTypeName()); } final RelDataTypeFamily typeFamily = relDataType.getFamily(); if (typeFamily instanceof HazelcastJsonType) { return QueryDataType.JSON; } if (typeFamily instanceof HazelcastObjectType) { return convertHazelcastObjectType(relDataType); } throw new IllegalArgumentException("Unexpected SQL type: " + relDataType); }
@Test public void testCalciteToHazelcast() { assertSame(QueryDataType.JSON, HazelcastTypeUtils.toHazelcastType(HazelcastJsonType.TYPE)); assertSame(QueryDataType.JSON, HazelcastTypeUtils.toHazelcastType(HazelcastJsonType.TYPE_NULLABLE)); assertSame(QueryDataType.VARCHAR, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.VARCHAR))); assertSame(QueryDataType.BOOLEAN, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.BOOLEAN))); assertSame(QueryDataType.TINYINT, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.TINYINT))); assertSame(QueryDataType.SMALLINT, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.SMALLINT))); assertSame(QueryDataType.INT, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.INTEGER))); assertSame(QueryDataType.BIGINT, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.BIGINT))); assertSame(QueryDataType.DECIMAL, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.DECIMAL))); assertSame(QueryDataType.REAL, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.REAL))); assertSame(QueryDataType.DOUBLE, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.DOUBLE))); assertSame(QueryDataType.DATE, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.DATE))); assertSame(QueryDataType.TIME, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.TIME))); assertSame(QueryDataType.TIMESTAMP, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.TIMESTAMP))); assertSame(QueryDataType.TIMESTAMP_WITH_TZ_OFFSET_DATE_TIME, HazelcastTypeUtils.toHazelcastType(type(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE))); }
@Override public void rotate(IndexSet indexSet) { indexRotator.rotate(indexSet, this::shouldRotate); }
@Test public void testDontRotate() { when(indices.getStoreSizeInBytes("name")).thenReturn(Optional.of(1000L)); when(indexSet.getNewestIndex()).thenReturn("name"); when(indexSet.getConfig()).thenReturn(indexSetConfig); when(indexSetConfig.rotationStrategyConfig()).thenReturn(SizeBasedRotationStrategyConfig.create(100000L)); final SizeBasedRotationStrategy strategy = createStrategy(); strategy.rotate(indexSet); verify(indexSet, never()).cycle(); reset(indexSet); }
public void flushToServer() { if (buffer.isEmpty()) { return; } List sent = new ArrayList(); try { synchronized (buffer) { while (!buffer.isEmpty()) { sent.add(buffer.remove()); } } StringBuilder result = new StringBuilder(); for (Object string : sent) { result.append(string); result.append("\n"); } consoleAppender.append(result.toString()); } catch (IOException e) { LOGGER.warn("Could not send console output to server", e); synchronized (buffer) { sent.addAll(buffer); buffer.clear(); buffer.addAll(sent); } } }
@Test public void shouldNotFlushToServerWhenBufferIsEmpty() throws Exception { transmitter.flushToServer(); verify(consoleAppender, never()).append(any(String.class)); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { if(0L == status.getLength()) { return new NullInputStream(0L); } final Storage.Objects.Get request = session.getClient().objects().get( containerService.getContainer(file).getName(), containerService.getKey(file)); if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { request.setUserProject(session.getHost().getCredentials().getUsername()); } final VersioningConfiguration versioning = null != session.getFeature(Versioning.class) ? session.getFeature(Versioning.class).getConfiguration( containerService.getContainer(file) ) : VersioningConfiguration.empty(); if(versioning.isEnabled()) { if(StringUtils.isNotBlank(file.attributes().getVersionId())) { request.setGeneration(Long.parseLong(file.attributes().getVersionId())); } } if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } final HttpHeaders headers = request.getRequestHeaders(); headers.setRange(header); // Disable compression headers.setAcceptEncoding("identity"); } return request.executeMediaAsInputStream(); } catch(IOException e) { throw new GoogleStorageExceptionMappingService().map("Download {0} failed", e, file); } }
@Test @Ignore public void testReadRangeUnknownLength() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(container, String.format("%s %s", new AlphanumericRandomStringService().random(), new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)); new GoogleStorageTouchFeature(session).touch(test, new TransferStatus()); final byte[] content = RandomUtils.nextBytes(1023); final TransferStatus status = new TransferStatus().withLength(content.length); status.setChecksum(new SHA256ChecksumCompute().compute(new ByteArrayInputStream(content), status)); final HttpResponseOutputStream<StorageObject> out = new GoogleStorageWriteFeature(session).write(test, status, new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); test.attributes().setVersionId(String.valueOf(out.getStatus().getGeneration())); status.setAppend(true); status.setOffset(100L); status.setLength(-1L); final InputStream in = new GoogleStorageReadFeature(session).read(test, status, new DisabledConnectionCallback()); assertNotNull(in); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length - 100); new StreamCopier(status, status).transfer(in, buffer); final byte[] reference = new byte[content.length - 100]; System.arraycopy(content, 100, reference, 0, content.length - 100); assertArrayEquals(reference, buffer.toByteArray()); in.close(); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
void apply(Metrics.MetricsBuilder metricsBuilder) { metricsBuilder.topicBytesInPerSec(bytesInFifteenMinuteRate); metricsBuilder.topicBytesOutPerSec(bytesOutFifteenMinuteRate); metricsBuilder.brokerBytesInPerSec(brokerBytesInFifteenMinuteRate); metricsBuilder.brokerBytesOutPerSec(brokerBytesOutFifteenMinuteRate); }
@Test void appliesInnerStateToMetricsBuilder() { //filling per topic io rates wellKnownMetrics.bytesInFifteenMinuteRate.put("topic", new BigDecimal(1)); wellKnownMetrics.bytesOutFifteenMinuteRate.put("topic", new BigDecimal(2)); //filling per broker io rates wellKnownMetrics.brokerBytesInFifteenMinuteRate.put(1, new BigDecimal(1)); wellKnownMetrics.brokerBytesOutFifteenMinuteRate.put(1, new BigDecimal(2)); wellKnownMetrics.brokerBytesInFifteenMinuteRate.put(2, new BigDecimal(10)); wellKnownMetrics.brokerBytesOutFifteenMinuteRate.put(2, new BigDecimal(20)); Metrics.MetricsBuilder builder = Metrics.builder(); wellKnownMetrics.apply(builder); var metrics = builder.build(); // checking per topic io rates assertThat(metrics.getTopicBytesInPerSec()).containsExactlyEntriesOf(wellKnownMetrics.bytesInFifteenMinuteRate); assertThat(metrics.getTopicBytesOutPerSec()).containsExactlyEntriesOf(wellKnownMetrics.bytesOutFifteenMinuteRate); // checking per broker io rates assertThat(metrics.getBrokerBytesInPerSec()).containsExactlyInAnyOrderEntriesOf( Map.of(1, new BigDecimal(1), 2, new BigDecimal(10))); assertThat(metrics.getBrokerBytesOutPerSec()).containsExactlyInAnyOrderEntriesOf( Map.of(1, new BigDecimal(2), 2, new BigDecimal(20))); }
@Override public void post(Event event) { if (!getDispatcher(event).add(event)) { log.error("Unable to post event {}", event); } }
@Test public void postEventWithBadSink() throws Exception { gooSink.latch = new CountDownLatch(1); dispatcher.post(new Goo("boom")); gooSink.latch.await(100, TimeUnit.MILLISECONDS); validate(gooSink, "boom"); validate(prickleSink); }
@Udf(description = "Returns the inverse (arc) tangent of an INT value") public Double atan( @UdfParameter( value = "value", description = "The value to get the inverse tangent of." ) final Integer value ) { return atan(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleNegative() { assertThat(udf.atan(-0.43), closeTo(-0.40609805831761564, 0.000000000000001)); assertThat(udf.atan(-0.5), closeTo(-0.4636476090008061, 0.000000000000001)); assertThat(udf.atan(-1.0), closeTo(-0.7853981633974483, 0.000000000000001)); assertThat(udf.atan(-1), closeTo(-0.7853981633974483, 0.000000000000001)); assertThat(udf.atan(-1L), closeTo(-0.7853981633974483, 0.000000000000001)); }
public static <InputT, OutputT> MapElements<InputT, OutputT> via( final InferableFunction<InputT, OutputT> fn) { return new MapElements<>(fn, fn.getInputTypeDescriptor(), fn.getOutputTypeDescriptor()); }
@Test public void testNestedPolymorphicInferableFunction() throws Exception { pipeline.enableAbandonedNodeEnforcement(false); pipeline .apply(Create.of(1, 2, 3)) .apply("Polymorphic Identity", MapElements.via(new NestedPolymorphicInferableFunction<>())) .apply( "Test Consumer", MapElements.via( new InferableFunction<KV<Integer, String>, Integer>() { @Override public Integer apply(KV<Integer, String> input) throws Exception { return 42; } })); }
@Override public final boolean cancel(String errMsg) { isCancelling.set(true); try { // If waitingCreatingReplica == false, we will assume that // cancel thread will get the object lock very quickly. if (waitingCreatingReplica.get()) { Preconditions.checkState(createReplicaLatch != null); createReplicaLatch.countDownToZero(new Status(TStatusCode.OK, "")); } synchronized (this) { return cancelImpl(errMsg); } } finally { isCancelling.set(false); } }
@Test public void testCancelPendingJob() throws IOException { TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentState().getTabletInvertedIndex(); schemaChangeJob.cancel("test"); Assert.assertEquals(AlterJobV2.JobState.CANCELLED, schemaChangeJob.getJobState()); // test cancel again schemaChangeJob.cancel("test"); Assert.assertEquals(AlterJobV2.JobState.CANCELLED, schemaChangeJob.getJobState()); }
public static Object get(Object object, int index) { if (index < 0) { throw new IndexOutOfBoundsException("Index cannot be negative: " + index); } if (object instanceof Map) { Map map = (Map) object; Iterator iterator = map.entrySet().iterator(); return get(iterator, index); } else if (object instanceof List) { return ((List) object).get(index); } else if (object instanceof Object[]) { return ((Object[]) object)[index]; } else if (object instanceof Iterator) { Iterator it = (Iterator) object; while (it.hasNext()) { index--; if (index == -1) { return it.next(); } else { it.next(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object instanceof Collection) { Iterator iterator = ((Collection) object).iterator(); return get(iterator, index); } else if (object instanceof Enumeration) { Enumeration it = (Enumeration) object; while (it.hasMoreElements()) { index--; if (index == -1) { return it.nextElement(); } else { it.nextElement(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object == null) { throw new IllegalArgumentException("Unsupported object type: null"); } else { try { return Array.get(object, index); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName()); } } }
@Test void testGetArray1() { assertThrows(IndexOutOfBoundsException.class, () -> { CollectionUtils.get(new Object[] {}, -1); }); }
void notifyPendingReceivedCallback(final Message<T> message, Exception exception) { if (pendingReceives.isEmpty()) { return; } // fetch receivedCallback from queue final CompletableFuture<Message<T>> receivedFuture = nextPendingReceive(); if (receivedFuture == null) { return; } if (exception != null) { internalPinnedExecutor.execute(() -> receivedFuture.completeExceptionally(exception)); return; } if (message == null) { IllegalStateException e = new IllegalStateException("received message can't be null"); internalPinnedExecutor.execute(() -> receivedFuture.completeExceptionally(e)); return; } if (getCurrentReceiverQueueSize() == 0) { // call interceptor and complete received callback trackMessage(message); interceptAndComplete(message, receivedFuture); return; } // increase permits for available message-queue messageProcessed(message); // call interceptor and complete received callback interceptAndComplete(message, receivedFuture); }
@Test(invocationTimeOut = 1000) public void testNotifyPendingReceivedCallback_CompleteWithException() { CompletableFuture<Message<byte[]>> receiveFuture = new CompletableFuture<>(); consumer.pendingReceives.add(receiveFuture); Exception exception = new PulsarClientException.InvalidMessageException("some random exception"); consumer.notifyPendingReceivedCallback(null, exception); try { receiveFuture.join(); } catch (CompletionException e) { // Completion exception must be the same we provided at calling time Assert.assertEquals(e.getCause(), exception); } Assert.assertTrue(receiveFuture.isCompletedExceptionally()); }
public String encode(String name, String value) { return encode(new DefaultCookie(name, value)); }
@Test public void testEncodingMultipleClientCookies() { String c1 = "myCookie=myValue"; String c2 = "myCookie2=myValue2"; String c3 = "myCookie3=myValue3"; Cookie cookie1 = new DefaultCookie("myCookie", "myValue"); cookie1.setDomain(".adomainsomewhere"); cookie1.setMaxAge(50); cookie1.setPath("/apathsomewhere"); cookie1.setSecure(true); Cookie cookie2 = new DefaultCookie("myCookie2", "myValue2"); cookie2.setDomain(".anotherdomainsomewhere"); cookie2.setPath("/anotherpathsomewhere"); cookie2.setSecure(false); Cookie cookie3 = new DefaultCookie("myCookie3", "myValue3"); String encodedCookie = ClientCookieEncoder.STRICT.encode(cookie1, cookie2, cookie3); // Cookies should be sorted into decreasing order of path length, as per RFC6265. // When no path is provided, we assume maximum path length (so cookie3 comes first). assertEquals(c3 + "; " + c2 + "; " + c1, encodedCookie); }
@Override public void populateContainer(TaskContainer container) { ComputationSteps steps = new ReportComputationSteps(container); container.add(SettingsLoader.class); container.add(task); container.add(steps); container.add(componentClasses()); for (ReportAnalysisComponentProvider componentProvider : componentProviders) { container.add(componentProvider.getComponents()); } container.add(steps.orderedStepClasses()); }
@Test public void item_is_added_to_the_container() { ListTaskContainer container = new ListTaskContainer(); underTest.populateContainer(container); assertThat(container.getAddedComponents()).contains(task); }
public boolean isOnos() { return Arrays.equals(this.oui(), ONOS.oui()); }
@Test public void testIsOnos() throws Exception { assertFalse(MAC_NORMAL.isOnos()); assertFalse(MAC_BCAST.isOnos()); assertFalse(MAC_MCAST.isOnos()); assertFalse(MAC_MCAST_2.isOnos()); assertFalse(MAC_LLDP.isOnos()); assertFalse(MAC_LLDP_2.isOnos()); assertFalse(MAC_LLDP_3.isOnos()); assertTrue(MAC_ONOS.isOnos()); }