focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static CoordinatorRecord newConsumerGroupTargetAssignmentTombstoneRecord( String groupId, String memberId ) { return new CoordinatorRecord( new ApiMessageAndVersion( new ConsumerGroupTargetAssignmentMemberKey() .setGroupId(groupId) .setMemberId(memberId), (short) 7 ), null // Tombstone. ); }
@Test public void testNewConsumerGroupTargetAssignmentTombstoneRecord() { CoordinatorRecord expectedRecord = new CoordinatorRecord( new ApiMessageAndVersion( new ConsumerGroupTargetAssignmentMemberKey() .setGroupId("group-id") .setMemberId("member-id"), (short) 7), null); assertEquals(expectedRecord, newConsumerGroupTargetAssignmentTombstoneRecord( "group-id", "member-id" )); }
@Override public void forward(DeviceId deviceId, ForwardingObjective forwardingObjective) { if (forwardingObjective.nextId() == null || forwardingObjective.op() == Objective.Operation.REMOVE || flowObjectiveStore.getNextGroup(forwardingObjective.nextId()) != null || !queueFwdObjective(deviceId, forwardingObjective)) { // fast path executorService.execute(new ObjectiveInstaller(deviceId, forwardingObjective)); } }
@Test public void forwardingObjective() { TrafficSelector selector = DefaultTrafficSelector.emptySelector(); TrafficTreatment treatment = DefaultTrafficTreatment.emptyTreatment(); ForwardingObjective forward = DefaultForwardingObjective.builder() .fromApp(NetTestTools.APP_ID) .withFlag(ForwardingObjective.Flag.SPECIFIC) .withSelector(selector) .withTreatment(treatment) .makePermanent() .add(new ObjectiveContext() { @Override public void onSuccess(Objective objective) { assertEquals("1 flowrule entry expected", 1, flowRuleStore.getFlowRuleCount(vnet1.id())); assertEquals("0 flowrule entry expected", 0, flowRuleStore.getFlowRuleCount(vnet2.id())); } }); service1.forward(VDID1, forward); }
public Marker canonicalize(boolean removeConstants) { if (valueBlock.isPresent() && removeConstants) { // For REMOVE_CONSTANTS, we replace this with null return new Marker(type, Optional.of(Utils.nativeValueToBlock(type, null)), bound); } return this; }
@Test public void testCanonicalize() throws Exception { assertSameMarker(Marker.above(BIGINT, 0L), Marker.above(BIGINT, 0L), false); assertSameMarker(Marker.above(VARCHAR, utf8Slice("abc")), Marker.above(VARCHAR, utf8Slice("abc")), false); assertSameMarker(Marker.upperUnbounded(BIGINT), Marker.upperUnbounded(BIGINT), false); assertDifferentMarker(Marker.above(BIGINT, 0L), Marker.above(BIGINT, 5L), false); assertDifferentMarker(Marker.above(VARCHAR, utf8Slice("abc")), Marker.above(VARCHAR, utf8Slice("abcd")), false); assertDifferentMarker(Marker.upperUnbounded(BIGINT), Marker.upperUnbounded(VARCHAR), false); assertDifferentMarker(Marker.above(BIGINT, 0L), Marker.below(BIGINT, 0L), false); assertDifferentMarker(Marker.below(BIGINT, 0L), Marker.exactly(BIGINT, 0L), false); assertDifferentMarker(Marker.upperUnbounded(BIGINT), Marker.lowerUnbounded(BIGINT), false); assertSameMarker(Marker.above(BIGINT, 0L), Marker.above(BIGINT, 5L), true); assertSameMarker(Marker.below(BIGINT, 0L), Marker.below(BIGINT, 5L), true); assertSameMarker(Marker.exactly(BIGINT, 0L), Marker.exactly(BIGINT, 5L), true); assertSameMarker(Marker.above(VARCHAR, utf8Slice("abc")), Marker.above(VARCHAR, utf8Slice("abcd")), true); assertDifferentMarker(Marker.above(BIGINT, 0L), Marker.below(BIGINT, 0L), true); assertDifferentMarker(Marker.below(BIGINT, 0L), Marker.exactly(BIGINT, 0L), true); assertDifferentMarker(Marker.upperUnbounded(BIGINT), Marker.lowerUnbounded(BIGINT), true); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_screenSize_normal() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("normal", config); assertThat(config.screenLayout).isEqualTo(SCREENSIZE_NORMAL); }
@Override public Path move(final Path file, final Path target, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException { try { final BrickApiClient client = new BrickApiClient(session); if(status.isExists()) { if(!new CaseInsensitivePathPredicate(file).test(target)) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", target, file)); } new BrickDeleteFeature(session).delete(Collections.singletonList(target), callback, delete); } } final FileActionEntity entity = new FileActionsApi(client) .move(new MovePathBody().destination(StringUtils.removeStart(target.getAbsolute(), String.valueOf(Path.DELIMITER))), StringUtils.removeStart(file.getAbsolute(), String.valueOf(Path.DELIMITER))); if(entity.getFileMigrationId() != null) { this.poll(client, entity); } return target.withAttributes(file.attributes()); } catch(ApiException e) { throw new BrickExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test public void testMove() throws Exception { final Path test = new BrickTouchFeature(session).touch(new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertEquals(0L, test.attributes().getSize()); final Path target = new BrickMoveFeature(session).move(test, new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new BrickFindFeature(session).find(test)); assertTrue(new BrickFindFeature(session).find(target)); assertEquals(test.attributes(), target.attributes()); final PathAttributes targetAttr = new BrickAttributesFinderFeature(session).find(target); assertEquals(test.attributes().getModificationDate(), targetAttr.getModificationDate()); assertEquals(Comparison.equal, session.getHost().getProtocol().getFeature(ComparisonService.class).compare(Path.Type.file, test.attributes(), targetAttr)); assertEquals(Comparison.equal, session.getHost().getProtocol().getFeature(ComparisonService.class).compare(Path.Type.file, target.attributes(), targetAttr)); new BrickDeleteFeature(session).delete(Collections.singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Benchmark @Threads(16) // Use several threads since we expect contention during bundle processing. public void testStateWithCaching(StatefulTransform statefulTransform) throws Exception { testState(statefulTransform, statefulTransform.cachingStateRequestHandler); }
@Test public void testStateWithCaching() throws Exception { StatefulTransform transform = new StatefulTransform(); transform.elementsEmbedding = elementsEmbedding; new ProcessBundleBenchmark().testStateWithCaching(transform); transform.tearDown(); }
public static SerializableFunction<Row, Mutation> beamRowToMutationFn( Mutation.Op operation, String table) { return (row -> { switch (operation) { case INSERT: return MutationUtils.createMutationFromBeamRows(Mutation.newInsertBuilder(table), row); case DELETE: return Mutation.delete(table, MutationUtils.createKeyFromBeamRow(row)); case UPDATE: return MutationUtils.createMutationFromBeamRows(Mutation.newUpdateBuilder(table), row); case REPLACE: return MutationUtils.createMutationFromBeamRows(Mutation.newReplaceBuilder(table), row); case INSERT_OR_UPDATE: return MutationUtils.createMutationFromBeamRows( Mutation.newInsertOrUpdateBuilder(table), row); default: throw new IllegalArgumentException( String.format("Unknown mutation operation type: %s", operation)); } }); }
@Test public void testCreateUpdateMutationFromRow() { Mutation expectedMutation = createMutation(Mutation.Op.UPDATE); Mutation mutation = beamRowToMutationFn(Mutation.Op.UPDATE, TABLE).apply(WRITE_ROW); assertEquals(expectedMutation, mutation); }
@Override public String getMessage() { return message; }
@Test final void requireAllWrappedLevelsShowUp() { final Throwable t0 = new Throwable("t0"); final Throwable t1 = new Throwable("t1", t0); final Throwable t2 = new Throwable("t2", t1); final ExceptionWrapper e = new ExceptionWrapper(t2); final String expected = "Throwable(\"t2\") at com.yahoo.jdisc.http.server.jetty.ExceptionWrapperTest(ExceptionWrapperTest.java:30):" + " Throwable(\"t1\") at com.yahoo.jdisc.http.server.jetty.ExceptionWrapperTest(ExceptionWrapperTest.java:29):" + " Throwable(\"t0\") at com.yahoo.jdisc.http.server.jetty.ExceptionWrapperTest(ExceptionWrapperTest.java:28)"; assertThat(e.getMessage(), equalTo(expected)); }
@Deprecated public boolean isMap() { return type == TaskType.MAP; }
@Test public void testIsMap() { JobID jobId = new JobID("1234", 0); for (TaskType type : TaskType.values()) { TaskID taskId = new TaskID(jobId, type, 0); if (type == TaskType.MAP) { assertTrue("TaskID for map task did not correctly identify itself " + "as a map task", taskId.isMap()); } else { assertFalse("TaskID for " + type + " task incorrectly identified " + "itself as a map task", taskId.isMap()); } } TaskID taskId = new TaskID(); assertFalse("TaskID of default type incorrectly identified itself as a " + "map task", taskId.isMap()); }
public static @Nullable CastRule<?, ?> resolve(LogicalType inputType, LogicalType targetType) { return INSTANCE.internalResolve(inputType, targetType); }
@Test void testResolveConstructedToString() { assertThat(CastRuleProvider.resolve(new ArrayType(INT), new VarCharType(10))) .isSameAs(ArrayToStringCastRule.INSTANCE); }
@Override public boolean isFirstAnalysis() { return getBaseAnalysis() == null; }
@Test public void isFirstAnalysis_throws_ISE_when_base_project_snapshot_is_not_set() { assertThatThrownBy(() -> new AnalysisMetadataHolderImpl(editionProvider).isFirstAnalysis()) .isInstanceOf(IllegalStateException.class) .hasMessage("Base project snapshot has not been set"); }
public static LayoutLocation fromCompactString(String s) { String[] tokens = s.split(COMMA); if (tokens.length != 4) { throw new IllegalArgumentException(E_BAD_COMPACT + s); } String id = tokens[0]; String type = tokens[1]; String latY = tokens[2]; String longX = tokens[3]; if (Strings.isNullOrEmpty(id)) { throw new IllegalArgumentException(E_BAD_COMPACT + E_EMPTY_ID); } double latOrY; double longOrX; try { latOrY = Double.parseDouble(latY); longOrX = Double.parseDouble(longX); } catch (NumberFormatException nfe) { throw new IllegalArgumentException(E_BAD_COMPACT + E_BAD_DOUBLE); } return LayoutLocation.layoutLocation(id, type, latOrY, longOrX); }
@Test(expected = IllegalArgumentException.class) public void badCompactTooShort() { fromCompactString("one,two,three"); }
public static RowRanges union(RowRanges left, RowRanges right) { RowRanges result = new RowRanges(); Iterator<Range> it1 = left.ranges.iterator(); Iterator<Range> it2 = right.ranges.iterator(); if (it2.hasNext()) { Range range2 = it2.next(); while (it1.hasNext()) { Range range1 = it1.next(); if (range1.isAfter(range2)) { result.add(range2); range2 = range1; Iterator<Range> tmp = it1; it1 = it2; it2 = tmp; } else { result.add(range1); } } result.add(range2); } else { it2 = it1; } while (it2.hasNext()) { result.add(it2.next()); } return result; }
@Test public void testUnion() { RowRanges ranges1 = buildRanges( 2, 5, 7, 9, 14, 14, 20, 24); RowRanges ranges2 = buildRanges( 1, 2, 4, 5, 11, 12, 14, 15, 21, 22); RowRanges empty = buildRanges(); assertAllRowsEqual( union(ranges1, ranges2).iterator(), 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 20, 21, 22, 23, 24); assertAllRowsEqual( union(ranges2, ranges1).iterator(), 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 20, 21, 22, 23, 24); assertAllRowsEqual(union(ranges1, ranges1).iterator(), 2, 3, 4, 5, 7, 8, 9, 14, 20, 21, 22, 23, 24); assertAllRowsEqual(union(ranges1, empty).iterator(), 2, 3, 4, 5, 7, 8, 9, 14, 20, 21, 22, 23, 24); assertAllRowsEqual(union(empty, ranges1).iterator(), 2, 3, 4, 5, 7, 8, 9, 14, 20, 21, 22, 23, 24); assertAllRowsEqual(union(ranges2, ranges2).iterator(), 1, 2, 4, 5, 11, 12, 14, 15, 21, 22); assertAllRowsEqual(union(ranges2, empty).iterator(), 1, 2, 4, 5, 11, 12, 14, 15, 21, 22); assertAllRowsEqual(union(empty, ranges2).iterator(), 1, 2, 4, 5, 11, 12, 14, 15, 21, 22); assertAllRowsEqual(union(empty, empty).iterator()); }
public void addDataFormatEnvVariables(Map<String, String> env, Properties properties, boolean custom) { Set<String> toRemove = new HashSet<>(); env.forEach((k, v) -> { if (custom) { toRemove.add(k); String ck = "camel.dataformat." + k.substring(17).toLowerCase(Locale.US).replace('_', '-'); ck = ck.replaceFirst("-", "."); properties.put(ck, v); } else { Optional<String> e = dataformatEnvNames.stream().filter(k::startsWith).findFirst(); if (e.isPresent()) { toRemove.add(k); String cname = "camel.dataformat." + e.get().substring(17).toLowerCase(Locale.US).replace('_', '-'); String option = k.substring(cname.length() + 1).toLowerCase(Locale.US).replace('_', '-'); properties.put(cname + "." + option, v); } } }); toRemove.forEach(env::remove); }
@Test public void testAddDataFormatEnvVariables() { Map<String, String> env = MainHelper.filterEnvVariables(new String[] { "CAMEL_DATAFORMAT_" }); env.put("CAMEL_DATAFORMAT_BASE64_LINE_LENGTH", "64"); env.put("CAMEL_DATAFORMAT_JACKSONXML_PRETTYPRINT", "true"); Properties prop = new OrderedProperties(); helper.addDataFormatEnvVariables(env, prop, false); Assertions.assertEquals(0, env.size()); Assertions.assertEquals(2, prop.size()); Assertions.assertEquals("64", prop.getProperty("camel.dataformat.base64.line-length")); Assertions.assertEquals("true", prop.getProperty("camel.dataformat.jacksonxml.prettyprint")); }
@Override public Collection<ServiceInstance> getInstances(String serviceId) throws QueryInstanceException { if (!getZkClient().isStateOk()) { throw new QueryInstanceException("zk state is not valid!"); } checkDiscoveryState(); final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(ZkDiscoveryClient.class.getClassLoader()); return convert(serviceDiscovery.queryForInstances(serviceId)); } catch (Exception exception) { LOGGER.log(Level.WARNING, "Can not query service instances from registry center!", exception); Thread.currentThread().setContextClassLoader(contextClassLoader); throw new QueryInstanceException(exception.getMessage()); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } }
@Test(expected = QueryInstanceException.class) public void getInstances() throws Exception { Mockito.when(serviceDiscovery.queryForInstances(serviceName)).thenReturn(Collections.emptyList()); Assert.assertEquals(Collections.emptyList(), zkDiscoveryClient.getInstances(serviceName)); Mockito.when(serviceDiscovery.queryForInstances(serviceName)).thenReturn(Collections.singletonList(instance)); Assert.assertEquals(zkDiscoveryClient.getInstances(serviceName).size(), 1); // The simulation throws an exception Mockito.when(serviceDiscovery.queryForInstances(serviceName)).thenThrow(new IllegalStateException("wrong")); Assert.assertEquals(Collections.emptyList(), zkDiscoveryClient.getInstances(serviceName)); }
public static Builder newBuilder() { return new Builder(); }
@Test public void testBuilderThrowsExceptionWhenStartTimestampMissing() { assertThrows( "startTimestamp", IllegalStateException.class, () -> PartitionMetadata.newBuilder() .setPartitionToken(PARTITION_TOKEN) .setParentTokens(Sets.newHashSet(PARENT_TOKEN)) .setEndTimestamp(END_TIMESTAMP) .setHeartbeatMillis(10) .setState(State.CREATED) .setWatermark(WATERMARK) .setCreatedAt(CREATED_AT) .build()); }
public static boolean checkIfUseGeneric(ConsumerConfig consumerConfig) { Class proxyClass = consumerConfig.getProxyClass(); Class enclosingClass = proxyClass.getEnclosingClass(); if (enclosingClass != null) { try { enclosingClass.getDeclaredMethod("getSofaStub", Channel.class, CallOptions.class, int.class); return false; } catch (NoSuchMethodException e) { //ignore return true; } } return true; }
@Test public void testCheckIfUseGeneric() { ConsumerConfig asTrue = new ConsumerConfig(); asTrue.setInterfaceId(NeedGeneric.NeedGenericInterface.class.getName()); ConsumerConfig asFalse = new ConsumerConfig(); asFalse.setInterfaceId(DoNotNeedGeneric.NoNotNeedGenericInterface.class.getName()); Assert.assertTrue(SofaProtoUtils.checkIfUseGeneric(asTrue)); Assert.assertFalse(SofaProtoUtils.checkIfUseGeneric(asFalse)); }
public static Caffeine<Object, Object> from(CaffeineSpec spec) { Caffeine<Object, Object> builder = spec.toBuilder(); builder.strictParsing = false; return builder; }
@Test public void fromSpec_null() { assertThrows(NullPointerException.class, () -> Caffeine.from((CaffeineSpec) null)); }
public static ClusterDataSetListResponseBody from( Map<IntermediateDataSetID, DataSetMetaInfo> dataSets) { final List<ClusterDataSetEntry> convertedInfo = dataSets.entrySet().stream() .map( entry -> { DataSetMetaInfo metaInfo = entry.getValue(); int numRegisteredPartitions = metaInfo.getNumRegisteredPartitions().orElse(0); int numTotalPartition = metaInfo.getNumTotalPartitions(); return new ClusterDataSetEntry( entry.getKey(), numRegisteredPartitions == numTotalPartition); }) .collect(Collectors.toList()); return new ClusterDataSetListResponseBody(convertedInfo); }
@Test void testFrom() { final Map<IntermediateDataSetID, DataSetMetaInfo> originalDataSets = new HashMap<>(); originalDataSets.put( new IntermediateDataSetID(), DataSetMetaInfo.withNumRegisteredPartitions(1, 2)); originalDataSets.put( new IntermediateDataSetID(), DataSetMetaInfo.withNumRegisteredPartitions(2, 2)); List<ClusterDataSetEntry> convertedDataSets = ClusterDataSetListResponseBody.from(originalDataSets).getDataSets(); assertThat(convertedDataSets).hasSize(2); for (ClusterDataSetEntry convertedDataSet : convertedDataSets) { IntermediateDataSetID id = new IntermediateDataSetID( new AbstractID( StringUtils.hexStringToByte(convertedDataSet.getDataSetId()))); DataSetMetaInfo dataSetMetaInfo = originalDataSets.get(id); assertThat(convertedDataSet.isComplete()) .isEqualTo( dataSetMetaInfo.getNumRegisteredPartitions().orElse(0) == dataSetMetaInfo.getNumTotalPartitions()); } }
@Override public void asyncRequest(Request request, final RequestCallBack requestCallBack) throws NacosException { Payload grpcRequest = GrpcUtils.convert(request); ListenableFuture<Payload> requestFuture = grpcFutureServiceStub.request(grpcRequest); //set callback . Futures.addCallback(requestFuture, new FutureCallback<Payload>() { @Override public void onSuccess(@Nullable Payload grpcResponse) { Response response = (Response) GrpcUtils.parse(grpcResponse); if (response != null) { if (response instanceof ErrorResponse) { requestCallBack.onException(new NacosException(response.getErrorCode(), response.getMessage())); } else { requestCallBack.onResponse(response); } } else { requestCallBack.onException(new NacosException(ResponseCode.FAIL.getCode(), "response is null")); } } @Override public void onFailure(Throwable throwable) { if (throwable instanceof CancellationException) { requestCallBack.onException( new TimeoutException("Timeout after " + requestCallBack.getTimeout() + " milliseconds.")); } else { requestCallBack.onException(throwable); } } }, requestCallBack.getExecutor() != null ? requestCallBack.getExecutor() : this.executor); // set timeout future. ListenableFuture<Payload> payloadListenableFuture = Futures.withTimeout(requestFuture, requestCallBack.getTimeout(), TimeUnit.MILLISECONDS, RpcScheduledExecutor.TIMEOUT_SCHEDULER); }
@Test void testAsyncRequestWithOtherException() throws NacosException, ExecutionException, InterruptedException { when(future.get()).thenThrow(new RuntimeException("test")); doAnswer(invocationOnMock -> { ((Runnable) invocationOnMock.getArgument(0)).run(); return null; }).when(future).addListener(any(Runnable.class), eq(executor)); RequestCallBack requestCallBack = mock(RequestCallBack.class); connection.asyncRequest(new HealthCheckRequest(), requestCallBack); verify(requestCallBack).onException(any(RuntimeException.class)); }
public boolean isValid(String value) { if (value == null) { return false; } URI uri; // ensure value is a valid URI try { uri = new URI(value); } catch (URISyntaxException e) { return false; } // OK, perfom additional validation String scheme = uri.getScheme(); if (!isValidScheme(scheme)) { return false; } String authority = uri.getRawAuthority(); if ("file".equals(scheme) && (authority == null || "".equals(authority))) { // Special case - file: allows an empty authority return true; // this is a local file - nothing more to do here } else if ("file".equals(scheme) && authority != null && authority.contains(":")) { return false; } else { // Validate the authority if (!isValidAuthority(authority)) { return false; } } if (!isValidPath(uri.getRawPath())) { return false; } if (!isValidQuery(uri.getRawQuery())) { return false; } if (!isValidFragment(uri.getRawFragment())) { return false; } return true; }
@Test public void testIsValid() { testIsValid(testUrlParts, UrlValidator.ALLOW_ALL_SCHEMES); setUp(); long options = UrlValidator.ALLOW_2_SLASHES + UrlValidator.ALLOW_ALL_SCHEMES + UrlValidator.NO_FRAGMENTS; testIsValid(testUrlPartsOptions, options); }
public void validate(final Metric metric) { if (metric == null) { throw new ValidationException("Metric cannot be null"); } if (!isValidFunction(metric.functionName())) { throw new ValidationException("Unrecognized metric : " + metric.functionName() + ", valid metrics : " + availableMetricTypes); } if (!hasFieldIfFunctionNeedsIt(metric)) { throw new ValidationException(metric.functionName() + " metric requires field name to be provided after a colon, i.e. " + metric.functionName() + ":http_status_code"); } if (metric.sort() != null && UNSORTABLE_METRICS.contains(metric.functionName())) { throw new ValidationException(metric.functionName() + " metric cannot be used to sort aggregations"); } }
@Test void throwsExceptionOnMetricWithNoFunctionName() { assertThrows(ValidationException.class, () -> toTest.validate(new Metric(null, "field", SortSpec.Direction.Ascending, null))); }
public static ShenyuInstanceRegisterRepository newInstance(final String registerType) { return REPOSITORY_MAP.computeIfAbsent(registerType, ExtensionLoader.getExtensionLoader(ShenyuInstanceRegisterRepository.class)::getJoin); }
@Test public void testNewInstance() { assertNotNull(ShenyuInstanceRegisterRepositoryFactory.newInstance("zookeeper")); try (MockedStatic<ExtensionLoader> extensionLoaderMockedStatic = mockStatic(ExtensionLoader.class)) { ExtensionLoader extensionLoader = mock(ExtensionLoader.class); extensionLoaderMockedStatic.when(() -> ExtensionLoader.getExtensionLoader(ShenyuInstanceRegisterRepository.class)).thenReturn(extensionLoader); when(extensionLoader.getJoin("zs")).thenReturn(mock(ShenyuInstanceRegisterRepository.class)); assertNotNull(ShenyuInstanceRegisterRepositoryFactory.newInstance("zs")); } }
protected boolean isSecure(String key, ClusterProfile clusterProfile) { ElasticAgentPluginInfo pluginInfo = this.metadataStore().getPluginInfo(clusterProfile.getPluginId()); if (pluginInfo == null || pluginInfo.getElasticAgentProfileSettings() == null || pluginInfo.getElasticAgentProfileSettings().getConfiguration(key) == null) { return false; } return pluginInfo.getElasticAgentProfileSettings().getConfiguration(key).isSecure(); }
@Test public void postConstruct_shouldIgnoreEncryptionIfPluginInfoIsNotDefined() { ElasticProfile profile = new ElasticProfile("id", "prod-cluster", new ConfigurationProperty(new ConfigurationKey("password"), new ConfigurationValue("pass"))); // profile.encryptSecureConfigurations(); assertThat(profile.size(), is(1)); assertFalse(profile.first().isSecure()); }
@Override public void enableAutoTrackFragment(Class<?> fragment) { }
@Test public void enableAutoTrackFragment() { mSensorsAPI.enableAutoTrackFragment(Fragment.class); Assert.assertFalse(mSensorsAPI.isFragmentAutoTrackAppViewScreen(Fragment.class)); }
@Override public int findColumn(final String columnLabel) throws SQLException { checkClosed(); if (!columnLabelIndexMap.containsKey(columnLabel)) { throw new ColumnLabelNotFoundException(columnLabel).toSQLException(); } return columnLabelIndexMap.get(columnLabel); }
@Test void assertFindColumn() throws SQLException { assertThat(databaseMetaDataResultSet.findColumn(TABLE_NAME_COLUMN_LABEL), is(1)); assertThat(databaseMetaDataResultSet.findColumn(NON_TABLE_NAME_COLUMN_LABEL), is(2)); assertThat(databaseMetaDataResultSet.findColumn(NUMBER_COLUMN_LABEL), is(3)); }
@Override public void start() { // We request a split only if we did not get splits during the checkpoint restore. // Otherwise, reader restarts will keep requesting more and more splits. if (getNumberOfCurrentlyAssignedSplits() == 0) { requestSplit(Collections.emptyList()); } }
@Test public void testReaderMetrics() throws Exception { TestingReaderOutput<RowData> readerOutput = new TestingReaderOutput<>(); TestingMetricGroup metricGroup = new TestingMetricGroup(); TestingReaderContext readerContext = new TestingReaderContext(new Configuration(), metricGroup); IcebergSourceReader reader = createReader(metricGroup, readerContext, null); reader.start(); testOneSplitFetcher(reader, readerOutput, metricGroup, 1); testOneSplitFetcher(reader, readerOutput, metricGroup, 2); }
public ConsumeStatsList fetchConsumeStatsInBroker(String brokerAddr, boolean isOrder, long timeoutMillis) throws MQClientException, RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, InterruptedException { GetConsumeStatsInBrokerHeader requestHeader = new GetConsumeStatsInBrokerHeader(); requestHeader.setIsOrder(isOrder); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_BROKER_CONSUME_STATS, requestHeader); RemotingCommand response = this.remotingClient .invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), brokerAddr), request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { byte[] body = response.getBody(); if (body != null) { return ConsumeStatsList.decode(body, ConsumeStatsList.class); } } default: break; } throw new MQClientException(response.getCode(), response.getRemark()); }
@Test public void assertFetchConsumeStatsInBroker() throws RemotingException, InterruptedException, MQClientException { mockInvokeSync(); ConsumeStatsList responseBody = new ConsumeStatsList(); responseBody.setBrokerAddr(defaultBrokerAddr); responseBody.getConsumeStatsList().add(new HashMap<>()); setResponseBody(responseBody); ConsumeStatsList actual = mqClientAPI.fetchConsumeStatsInBroker(defaultBrokerAddr, false, defaultTimeout); assertNotNull(actual); assertEquals(1, actual.getConsumeStatsList().size()); assertEquals(defaultBrokerAddr, actual.getBrokerAddr()); }
public static boolean match(WorkerInfo workerInfo, String specifyInfo) { String workerTag = workerInfo.getTag(); // tagIn 语法,worker 可上报多个tag,如 WorkerInfo#tag=tag1,tag2,tag3,配置中指定 tagIn=tag1 即可命中 if (specifyInfo.startsWith(TAG_IN)) { String targetTag = specifyInfo.replace(TAG_IN, StringUtils.EMPTY); return Optional.ofNullable(workerTag).orElse(StringUtils.EMPTY).contains(targetTag); } // tagEquals 语法,字符串完全匹配,worker 只可上报一个 tag,如 WorkerInfo#tag=tag1,配置中指定 tagEquals=tag1 即可命中 if (specifyInfo.startsWith(TAG_EQUALS)) { String targetTag = specifyInfo.replace(TAG_EQUALS, StringUtils.EMPTY); return Optional.ofNullable(workerTag).orElse(StringUtils.EMPTY).equals(targetTag); } // 默认情况,IP 和 tag 逗号分割后任意完全匹配即视为命中(兼容 4.3.8 版本前序逻辑) Set<String> designatedWorkersSet = Sets.newHashSet(SJ.COMMA_SPLITTER.splitToList(specifyInfo)); for (String tagOrAddress : designatedWorkersSet) { if (tagOrAddress.equals(workerInfo.getTag()) || tagOrAddress.equals(workerInfo.getAddress())) { return true; } } return false; }
@Test void match() { WorkerInfo workerInfo = new WorkerInfo(); workerInfo.setAddress("192.168.1.1"); workerInfo.setTag("tag1"); assert SpecifyUtils.match(workerInfo, "192.168.1.1"); assert SpecifyUtils.match(workerInfo, "192.168.1.1,192.168.1.2,192.168.1.3,192.168.1.4"); assert !SpecifyUtils.match(workerInfo, "172.168.1.1"); assert !SpecifyUtils.match(workerInfo, "172.168.1.1,172.168.1.2,172.168.1.3"); assert SpecifyUtils.match(workerInfo, "tag1"); assert SpecifyUtils.match(workerInfo, "tag1,tag2"); assert !SpecifyUtils.match(workerInfo, "t1"); assert !SpecifyUtils.match(workerInfo, "t1,t2"); assert SpecifyUtils.match(workerInfo, "tagIn:tag1"); assert !SpecifyUtils.match(workerInfo, "tagIn:tag2"); assert SpecifyUtils.match(workerInfo, "tagEquals:tag1"); assert !SpecifyUtils.match(workerInfo, "tagEquals:tag2"); workerInfo.setTag("tag1,tag2,tag3"); assert SpecifyUtils.match(workerInfo, "tagIn:tag1"); assert SpecifyUtils.match(workerInfo, "tagIn:tag3"); assert !SpecifyUtils.match(workerInfo, "tagIn:tag99"); }
@VisibleForTesting boolean isUsefulCheckRequired(int dictionaryMemoryBytes) { if (dictionaryMemoryBytes < dictionaryUsefulCheckColumnSizeBytes) { return false; } dictionaryUsefulCheckCounter++; if (dictionaryUsefulCheckCounter == dictionaryUsefulCheckPerChunkFrequency) { dictionaryUsefulCheckCounter = 0; return true; } return false; }
@Test public void testIsDictionaryUsefulCheckRequired() { TestDictionaryColumn directColumn = directColumn(1024, 1); int dictionaryColumnSizeCheckBytes = megabytes(1); int dictionaryUsefulCheckPerChunkFrequency = 3; DataSimulator simulator = new DataSimulator(megabytes(100), megabytes(200), 10_000_000, megabytes(100), 0, DICTIONARY_ALMOST_FULL_MEMORY_RANGE, dictionaryUsefulCheckPerChunkFrequency, dictionaryColumnSizeCheckBytes, directColumn); for (int loop = 0; loop < 3; loop++) { assertFalse(simulator.isUsefulCheckRequired(dictionaryColumnSizeCheckBytes + 1)); assertFalse(simulator.isUsefulCheckRequired(dictionaryColumnSizeCheckBytes)); // Calling with 1 byte less should not increment the counter assertFalse(simulator.isUsefulCheckRequired(dictionaryColumnSizeCheckBytes - 1)); // 3rd time, it should return true as dictionaryUsefulCheckPerChunkFrequency is set to 3. assertTrue(simulator.isUsefulCheckRequired(dictionaryColumnSizeCheckBytes)); } }
public static long nextLong(final long startInclusive, final long endExclusive) { checkParameters(startInclusive, endExclusive); long diff = endExclusive - startInclusive; if (diff == 0) { return startInclusive; } return (long) (startInclusive + (diff * RANDOM.nextDouble())); }
@Test void testNextLongWithIllegalArgumentException2() { assertThrows(IllegalArgumentException.class, () -> { RandomUtils.nextLong(-10L, 199L); }); }
@Override public String getConfig(final String dataId) { try { return configService.getConfig(dataId, NacosPathConstants.GROUP, NacosPathConstants.DEFAULT_TIME_OUT); } catch (NacosException e) { LOG.error("Get data from nacos error.", e); throw new ShenyuException(e.getMessage()); } }
@Test public void testOnAppAuthChanged() throws NacosException { when(configService.getConfig(anyString(), anyString(), anyLong())).thenReturn(null); AppAuthData appAuthData = AppAuthData.builder().appKey(MOCK_APP_KEY).appSecret(MOCK_APP_SECRET).build(); nacosDataChangedListener.onAppAuthChanged(ImmutableList.of(appAuthData), DataEventTypeEnum.DELETE); nacosDataChangedListener.onAppAuthChanged(ImmutableList.of(appAuthData), DataEventTypeEnum.REFRESH); nacosDataChangedListener.onAppAuthChanged(ImmutableList.of(appAuthData), DataEventTypeEnum.MYSELF); nacosDataChangedListener.onAppAuthChanged(ImmutableList.of(appAuthData), DataEventTypeEnum.CREATE); verify(configService, times(7)).publishConfig(any(String.class), any(String.class), any(String.class), any(String.class)); }
@Override public IColumnType processTypeConvert(GlobalConfig config, String fieldType) { return TypeConverts.use(fieldType) .test(containsAny("char", "xml", "text").then(STRING)) .test(contains("bigint").then(LONG)) .test(contains("int").then(INTEGER)) .test(containsAny("date", "time").then(t -> toDateType(config, t))) .test(contains("bit").then(BOOLEAN)) .test(containsAny("decimal", "numeric").then(DOUBLE)) .test(contains("money").then(BIG_DECIMAL)) .test(containsAny("binary", "image").then(BYTE_ARRAY)) .test(containsAny("float", "real").then(FLOAT)) .or(STRING); }
@Test void processTypeConvertTest() { // 常用格式 GlobalConfig globalConfig = GeneratorBuilder.globalConfig(); SqlServerTypeConvert convert = SqlServerTypeConvert.INSTANCE; Assertions.assertEquals(STRING, convert.processTypeConvert(globalConfig, "char")); Assertions.assertEquals(STRING, convert.processTypeConvert(globalConfig, "xml")); Assertions.assertEquals(STRING, convert.processTypeConvert(globalConfig, "text")); Assertions.assertEquals(LONG, convert.processTypeConvert(globalConfig, "bigint")); Assertions.assertEquals(INTEGER, convert.processTypeConvert(globalConfig, "int")); Assertions.assertEquals(BOOLEAN, convert.processTypeConvert(globalConfig, "bit")); Assertions.assertEquals(DOUBLE, convert.processTypeConvert(globalConfig, "decimal")); Assertions.assertEquals(DOUBLE, convert.processTypeConvert(globalConfig, "numeric")); Assertions.assertEquals(BIG_DECIMAL, convert.processTypeConvert(globalConfig, "money")); Assertions.assertEquals(BYTE_ARRAY, convert.processTypeConvert(globalConfig, "binary")); Assertions.assertEquals(BYTE_ARRAY, convert.processTypeConvert(globalConfig, "image")); Assertions.assertEquals(FLOAT, convert.processTypeConvert(globalConfig, "float")); Assertions.assertEquals(FLOAT, convert.processTypeConvert(globalConfig, "real")); // 日期格式 globalConfig = GeneratorBuilder.globalConfigBuilder().dateType(DateType.SQL_PACK).build(); Assertions.assertEquals(DATE_SQL, convert.processTypeConvert(globalConfig, "date")); Assertions.assertEquals(TIME, convert.processTypeConvert(globalConfig, "time")); Assertions.assertEquals(TIMESTAMP, convert.processTypeConvert(globalConfig, "timestamp")); Assertions.assertEquals(TIMESTAMP, convert.processTypeConvert(globalConfig, "datetime")); globalConfig = GeneratorBuilder.globalConfigBuilder().dateType(DateType.TIME_PACK).build(); Assertions.assertEquals(LOCAL_DATE, convert.processTypeConvert(globalConfig, "date")); Assertions.assertEquals(LOCAL_TIME, convert.processTypeConvert(globalConfig, "time")); Assertions.assertEquals(LOCAL_DATE_TIME, convert.processTypeConvert(globalConfig, "timestamp")); Assertions.assertEquals(LOCAL_DATE_TIME, convert.processTypeConvert(globalConfig, "datetime")); globalConfig = GeneratorBuilder.globalConfigBuilder().dateType(DateType.ONLY_DATE).build(); Assertions.assertEquals(DATE, convert.processTypeConvert(globalConfig, "date")); Assertions.assertEquals(DATE, convert.processTypeConvert(globalConfig, "time")); Assertions.assertEquals(DATE, convert.processTypeConvert(globalConfig, "timestamp")); Assertions.assertEquals(DATE, convert.processTypeConvert(globalConfig, "datetime")); }
void cleanCurrentEntryInLocal() { if (context instanceof NullContext) { return; } Context originalContext = context; if (originalContext != null) { Entry curEntry = originalContext.getCurEntry(); if (curEntry == this) { Entry parent = this.parent; originalContext.setCurEntry(parent); if (parent != null) { ((CtEntry)parent).child = null; } } else { String curEntryName = curEntry == null ? "none" : curEntry.resourceWrapper.getName() + "@" + curEntry.hashCode(); String msg = String.format("Bad async context state, expected entry: %s, but actual: %s", getResourceWrapper().getName() + "@" + hashCode(), curEntryName); throw new IllegalStateException(msg); } } }
@Test public void testCleanCurrentEntryInLocal() { final String contextName = "abc"; try { ContextUtil.enter(contextName); Context curContext = ContextUtil.getContext(); Entry previousEntry = new CtEntry(new StringResourceWrapper("entry-sync", EntryType.IN), null, curContext); AsyncEntry entry = new AsyncEntry(new StringResourceWrapper("testCleanCurrentEntryInLocal", EntryType.OUT), null, curContext); assertSame(entry, curContext.getCurEntry()); entry.cleanCurrentEntryInLocal(); assertNotSame(entry, curContext.getCurEntry()); assertSame(previousEntry, curContext.getCurEntry()); } finally { ContextTestUtil.cleanUpContext(); } }
@Description("current timestamp with time zone") @ScalarFunction(value = "current_timestamp", alias = "now") @SqlType(StandardTypes.TIMESTAMP_WITH_TIME_ZONE) public static long currentTimestamp(SqlFunctionProperties properties) { try { return packDateTimeWithZone(properties.getSessionStartTime(), properties.getTimeZoneKey()); } catch (NotSupportedException | TimeZoneNotSupportedException e) { throw new PrestoException(NOT_SUPPORTED, e.getMessage(), e); } catch (IllegalArgumentException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e.getMessage(), e); } }
@Test public void testCurrentTimestamp() { Session localSession = Session.builder(session) .setStartTime(new DateTime(2017, 3, 1, 14, 30, 0, 0, DATE_TIME_ZONE).getMillis()) .build(); try (FunctionAssertions localAssertion = new FunctionAssertions(localSession)) { localAssertion.assertFunctionString("CURRENT_TIMESTAMP", TIMESTAMP_WITH_TIME_ZONE, "2017-03-01 14:30:00.000 " + DATE_TIME_ZONE.getID()); localAssertion.assertFunctionString("NOW()", TIMESTAMP_WITH_TIME_ZONE, "2017-03-01 14:30:00.000 " + DATE_TIME_ZONE.getID()); } }
public static String file2String(MultipartFile file) { try (InputStream inputStream = file.getInputStream()) { return IOUtils.toString(inputStream, StandardCharsets.UTF_8); } catch (IOException e) { log.error("file convert to string failed: {}", file.getName()); } return ""; }
@Test public void testFile2String() throws IOException { String content = "123"; org.apache.commons.io.FileUtils.writeStringToFile(new File("/tmp/task.json"), content, Charset.defaultCharset()); File file = new File("/tmp/task.json"); FileInputStream fileInputStream = new FileInputStream("/tmp/task.json"); MultipartFile multipartFile = new MockMultipartFile(file.getName(), file.getName(), ContentType.APPLICATION_OCTET_STREAM.toString(), fileInputStream); String resultStr = FileUtils.file2String(multipartFile); Assertions.assertEquals(content, resultStr); boolean delete = file.delete(); Assertions.assertTrue(delete); }
public static ResourceModel processResource(final Class<?> resourceClass) { return processResource(resourceClass, null); }
@Test(expectedExceptions = ResourceConfigException.class) public void failsOnInvalidBatchFinderMethodReturnType() { @RestLiCollection(name = "batchFinderWithInvalidReturnType") class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord> { @BatchFinder(value = "batchFinderWithInvalidReturnType", batchParam = "criteria") public List<EmptyRecord> batchFinderWithInvalidReturnType(@QueryParam("criteria") EmptyRecord[] criteria) { return Collections.emptyList(); } } RestLiAnnotationReader.processResource(LocalClass.class); Assert.fail("#validateBatchFinderMethod should fail throwing a ResourceConfigException"); }
public boolean isUserMemberOfRole(final CaseInsensitiveString userName, final CaseInsensitiveString roleName) { Role role = findByName(roleName); bombIfNull(role, () -> String.format("Role \"%s\" does not exist!", roleName)); return role.hasMember(userName); }
@Test public void shouldReturnTrueIfUserIsMemberOfRole() { RolesConfig rolesConfig = new RolesConfig(new RoleConfig(new CaseInsensitiveString("role1"), new RoleUser(new CaseInsensitiveString("user1")))); assertThat("shouldReturnTrueIfUserIsMemberOfRole", rolesConfig.isUserMemberOfRole(new CaseInsensitiveString("user1"), new CaseInsensitiveString("role1")), is(true)); }
@Override public void checkExec(final String cmd) { if (inUdfExecution()) { throw new SecurityException("A UDF attempted to execute the following cmd: " + cmd); } super.checkExec(cmd); }
@Test public void shouldAllowExec() { ExtensionSecurityManager.INSTANCE.checkExec("cmd"); }
public T addFromMandatoryProperty(Props props, String propertyName) { String value = props.nonNullValue(propertyName); if (!value.isEmpty()) { String splitRegex = " (?=-)"; List<String> jvmOptions = Arrays.stream(value.split(splitRegex)).map(String::trim).toList(); checkOptionFormat(propertyName, jvmOptions); checkMandatoryOptionOverwrite(propertyName, jvmOptions); options.addAll(jvmOptions); } return castThis(); }
@Test public void addFromMandatoryProperty_throws_IAE_if_option_starts_with_prefix_of_mandatory_option_but_has_different_value() { String[] optionOverrides = { randomPrefix, randomPrefix + randomValue.substring(1), randomPrefix + randomValue.substring(1), randomPrefix + randomValue.substring(2), randomPrefix + randomValue.substring(3), randomPrefix + randomValue.substring(3) + randomAlphanumeric(1), randomPrefix + randomValue.substring(3) + randomAlphanumeric(2), randomPrefix + randomValue.substring(3) + randomAlphanumeric(3), randomPrefix + randomValue + randomAlphanumeric(1) }; JvmOptions underTest = new JvmOptions(ImmutableMap.of(randomPrefix, randomValue)); for (String optionOverride : optionOverrides) { try { properties.put(randomPropertyName, optionOverride); underTest.addFromMandatoryProperty(new Props(properties), randomPropertyName); fail("an MessageException should have been thrown"); } catch (MessageException e) { assertThat(e.getMessage()) .isEqualTo("a JVM option can't overwrite mandatory JVM options. " + "The following JVM options defined by property '" + randomPropertyName + "' are invalid: " + optionOverride + " overwrites " + randomPrefix + randomValue); } } }
@Override public DoubleMinimum clone() { DoubleMinimum clone = new DoubleMinimum(); clone.min = this.min; return clone; }
@Test void testClone() { DoubleMinimum min = new DoubleMinimum(); double value = 3.14159265359; min.add(value); DoubleMinimum clone = min.clone(); assertThat(clone.getLocalValue()).isCloseTo(value, within(0.0)); }
public static Write write() { return Write.create(); }
@Test public void testWritingDisplayData() { BigtableIO.Write write = BigtableIO.write().withTableId("fooTable").withBigtableOptions(BIGTABLE_OPTIONS); DisplayData displayData = DisplayData.from(write); assertThat(displayData, hasDisplayItem("tableId", "fooTable")); }
@Override public void run() { JobConfig jobConfig = null; Serializable taskArgs = null; try { jobConfig = (JobConfig) SerializationUtils.deserialize( mRunTaskCommand.getJobConfig().toByteArray()); if (mRunTaskCommand.hasTaskArgs()) { taskArgs = SerializationUtils.deserialize(mRunTaskCommand.getTaskArgs().toByteArray()); } } catch (IOException | ClassNotFoundException e) { fail(e, jobConfig, null); } PlanDefinition<JobConfig, Serializable, Serializable> definition; try { definition = PlanDefinitionRegistry.INSTANCE.getJobDefinition(jobConfig); } catch (JobDoesNotExistException e) { LOG.error("The job definition for config {} does not exist.", jobConfig.getName()); fail(e, jobConfig, taskArgs); return; } mTaskExecutorManager.notifyTaskRunning(mJobId, mTaskId); Serializable result; try { result = definition.runTask(jobConfig, taskArgs, mContext); } catch (InterruptedException | CancelledException e) { // Cleanup around the interruption should already have been handled by a different thread Thread.currentThread().interrupt(); return; } catch (Throwable t) { fail(t, jobConfig, taskArgs); return; } mTaskExecutorManager.notifyTaskCompletion(mJobId, mTaskId, result); }
@Test public void runCompletion() throws Exception { long jobId = 1; long taskId = 2; JobConfig jobConfig = mock(JobConfig.class); Serializable taskArgs = Lists.newArrayList(1); RunTaskContext context = mock(RunTaskContext.class); Integer taskResult = 1; @SuppressWarnings("unchecked") PlanDefinition<JobConfig, Serializable, Serializable> planDefinition = mock(PlanDefinition.class); when(mRegistry.getJobDefinition(any(JobConfig.class))).thenReturn(planDefinition); when(planDefinition.runTask(any(JobConfig.class), eq(taskArgs), any(RunTaskContext.class))) .thenReturn(taskResult); RunTaskCommand command = RunTaskCommand.newBuilder() .setJobConfig(ByteString.copyFrom(SerializationUtils.serialize(jobConfig))) .setTaskArgs(ByteString.copyFrom(SerializationUtils.serialize(taskArgs))).build(); TaskExecutor executor = new TaskExecutor(jobId, taskId, command, context, mTaskExecutorManager); executor.run(); verify(planDefinition).runTask(any(JobConfig.class), eq(taskArgs), eq(context)); verify(mTaskExecutorManager).notifyTaskCompletion(jobId, taskId, taskResult); }
public static JavaToSqlTypeConverter javaToSqlConverter() { return JAVA_TO_SQL_CONVERTER; }
@Test public void shouldConvertJavaStringToSqlTimestamp() { assertThat(javaToSqlConverter().toSqlType(Timestamp.class), is(SqlBaseType.TIMESTAMP)); }
public void setPrioritizedRule(DefaultIssue issue, boolean prioritizedRule, IssueChangeContext context) { if (!Objects.equals(prioritizedRule, issue.isPrioritizedRule())) { issue.setPrioritizedRule(prioritizedRule); if (!issue.isNew()){ issue.setUpdateDate(context.date()); issue.setChanged(true); } } }
@Test void setPrioritizedRule_whenNotChanged_shouldNotUpdateIssue() { issue.setPrioritizedRule(true); underTest.setPrioritizedRule(issue, true, context); assertThat(issue.isChanged()).isFalse(); assertThat(issue.isPrioritizedRule()).isTrue(); }
@Override public String getConnectionSpec() { return hostId; }
@Test void requireThatBlockingSendTimeOutInSendQ() throws InterruptedException { final LocalWire wire = new LocalWire(); final Server serverA = new Server(wire); final SourceSession source = serverA.newSourceSession(new StaticThrottlePolicy().setMaxPendingCount(1)); final Server serverB = new Server(wire); final IntermediateSession intermediate = serverB.newIntermediateSession(); final Server serverC = new Server(wire); final DestinationSession destination = serverC.newDestinationSession(); Message msg = new SimpleMessage("foo"); msg.setRoute(new Route().addHop(Hop.parse(intermediate.getConnectionSpec())) .addHop(Hop.parse(destination.getConnectionSpec()))); assertTrue(source.sendBlocking(msg).isAccepted()); long start = SystemTimer.INSTANCE.milliTime(); Message msg2 = new SimpleMessage("foo2"); msg2.setRoute(new Route().addHop(Hop.parse(intermediate.getConnectionSpec())) .addHop(Hop.parse(destination.getConnectionSpec()))); long TIMEOUT = 1000; msg2.setTimeRemaining(TIMEOUT); Result res = source.sendBlocking(msg2); assertFalse(res.isAccepted()); assertEquals(ErrorCode.TIMEOUT, res.getError().getCode()); assertTrue(res.getError().getMessage().endsWith("Timed out in sendQ")); long end = SystemTimer.INSTANCE.milliTime(); assertTrue(end >= start + TIMEOUT); assertTrue(end < start + 5 * TIMEOUT); msg = serverB.messages.poll(60, TimeUnit.SECONDS); assertTrue(msg instanceof SimpleMessage); assertEquals("foo", ((SimpleMessage) msg).getValue()); intermediate.forward(msg); msg = serverC.messages.poll(60, TimeUnit.SECONDS); assertTrue(msg instanceof SimpleMessage); assertEquals("foo", ((SimpleMessage) msg).getValue()); Reply reply = new SimpleReply("bar"); reply.swapState(msg); destination.reply(reply); reply = serverB.replies.poll(60, TimeUnit.SECONDS); assertTrue(reply instanceof SimpleReply); assertEquals("bar", ((SimpleReply) reply).getValue()); intermediate.forward(reply); reply = serverA.replies.poll(60, TimeUnit.SECONDS); assertEquals(ErrorCode.TIMEOUT, reply.getError(0).getCode()); assertTrue(reply.getError(0).getMessage().endsWith("Timed out in sendQ")); reply = serverA.replies.poll(60, TimeUnit.SECONDS); assertTrue(reply instanceof SimpleReply); assertEquals("bar", ((SimpleReply) reply).getValue()); serverA.mbus.destroy(); serverB.mbus.destroy(); serverC.mbus.destroy(); }
public static ClassLoader getClassLoader(Class<?> clazz) { ClassLoader cl = null; if (!clazz.getName().startsWith("org.apache.dubbo")) { cl = clazz.getClassLoader(); } if (cl == null) { try { cl = Thread.currentThread().getContextClassLoader(); } catch (Exception ignored) { // Cannot access thread context ClassLoader - falling back to system class loader... } if (cl == null) { // No thread context class loader -> use class loader of this class. cl = clazz.getClassLoader(); if (cl == null) { // getClassLoader() returning null indicates the bootstrap ClassLoader try { cl = ClassLoader.getSystemClassLoader(); } catch (Exception ignored) { // Cannot access system ClassLoader - oh well, maybe the caller can live with null... } } } } return cl; }
@Test void testGetClassLoader1() { ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader(); try { assertThat(ClassUtils.getClassLoader(ClassUtilsTest.class), sameInstance(oldClassLoader)); Thread.currentThread().setContextClassLoader(null); assertThat( ClassUtils.getClassLoader(ClassUtilsTest.class), sameInstance(ClassUtilsTest.class.getClassLoader())); } finally { Thread.currentThread().setContextClassLoader(oldClassLoader); } }
@Override @CacheEvict(cacheNames = RedisKeyConstants.SMS_TEMPLATE, allEntries = true) // allEntries 清空所有缓存,因为 id 不是直接的缓存 code,不好清理 public void deleteSmsTemplate(Long id) { // 校验存在 validateSmsTemplateExists(id); // 更新 smsTemplateMapper.deleteById(id); }
@Test public void testDeleteSmsTemplate_success() { // mock 数据 SmsTemplateDO dbSmsTemplate = randomSmsTemplateDO(); smsTemplateMapper.insert(dbSmsTemplate);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbSmsTemplate.getId(); // 调用 smsTemplateService.deleteSmsTemplate(id); // 校验数据不存在了 assertNull(smsTemplateMapper.selectById(id)); }
public static SortOrder buildSortOrder(Table table) { return buildSortOrder(table.schema(), table.spec(), table.sortOrder()); }
@Test public void testSortOrderClusteringAllPartitionFieldsReordered() { PartitionSpec spec = PartitionSpec.builderFor(SCHEMA).identity("category").day("ts").build(); SortOrder order = SortOrder.builderFor(SCHEMA) .withOrderId(1) .asc(Expressions.day("ts")) .asc("category") .desc("id") .build(); assertThat(SortOrderUtil.buildSortOrder(SCHEMA, spec, order)) .as("Should leave the order unchanged") .isEqualTo(order); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseNestedMap() { SchemaAndValue schemaAndValue = Values.parseString("{\"a\":{}}"); assertEquals(Type.MAP, schemaAndValue.schema().type()); assertEquals(Type.MAP, schemaAndValue.schema().valueSchema().type()); }
public static String randomAlphabetic(int length) { int leftLimit = 97; // letter 'a' int rightLimit = 122; // letter 'z' Random random = new Random(); return random.ints(leftLimit, rightLimit + 1) .limit(length) .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) .toString(); }
@Test public void testAlphabet() { System.out.println(ByteUtil.randomAlphabetic(100)); }
@Override public void processRestApiCallToRuleEngine(TenantId tenantId, UUID requestId, TbMsg request, boolean useQueueFromTbMsg, Consumer<TbMsg> responseConsumer) { log.trace("[{}] Processing REST API call to rule engine: [{}] for entity: [{}]", tenantId, requestId, request.getOriginator()); requests.put(requestId, responseConsumer); sendRequestToRuleEngine(tenantId, request, useQueueFromTbMsg); scheduleTimeout(request, requestId, requests); }
@Test void givenRequest_whenProcessRestApiCallToRuleEngine_thenPushMsgToRuleEngineAndCheckRemovedDueTimeout() { long timeout = 1L; long expTime = System.currentTimeMillis() + timeout; HashMap<String, String> metaData = new HashMap<>(); UUID requestId = UUID.randomUUID(); metaData.put("serviceId", "core"); metaData.put("requestUUID", requestId.toString()); metaData.put("expirationTime", Long.toString(expTime)); TbMsg msg = TbMsg.newMsg(DataConstants.MAIN_QUEUE_NAME, TbMsgType.REST_API_REQUEST, TENANT_ID, new TbMsgMetaData(metaData), "{\"key\":\"value\"}"); Consumer<TbMsg> anyConsumer = TbMsg::getData; doAnswer(invocation -> { //check the presence of request in the map after pushMsgToRuleEngine() assertThat(requests.size()).isEqualTo(1); assertThat(requests.get(requestId)).isEqualTo(anyConsumer); return null; }).when(tbClusterServiceMock).pushMsgToRuleEngine(any(), any(), any(), anyBoolean(), any()); ruleEngineCallService.processRestApiCallToRuleEngine(TENANT_ID, requestId, msg, true, anyConsumer); verify(tbClusterServiceMock).pushMsgToRuleEngine(TENANT_ID, TENANT_ID, msg, true, null); //check map is empty after scheduleTimeout() Awaitility.await("Await until request was deleted from map due to timeout") .pollDelay(25, TimeUnit.MILLISECONDS) .atMost(10, TimeUnit.SECONDS) .until(requests::isEmpty); }
public static Class<?> getReturnType(Invocation invocation) { try { if (invocation != null && invocation.getInvoker() != null && invocation.getInvoker().getUrl() != null && invocation.getInvoker().getInterface() != GenericService.class && !invocation.getMethodName().startsWith("$")) { String service = invocation.getInvoker().getUrl().getServiceInterface(); if (StringUtils.isNotEmpty(service)) { Method method = getMethodByService(invocation, service); return method == null ? null : method.getReturnType(); } } } catch (Throwable t) { logger.warn(COMMON_REFLECTIVE_OPERATION_FAILED, "", "", t.getMessage(), t); } return null; }
@Test void testGetReturnType() { Class<?> demoServiceClass = DemoService.class; String serviceName = demoServiceClass.getName(); Invoker invoker = createMockInvoker( URL.valueOf( "test://127.0.0.1:1/org.apache.dubbo.rpc.support.DemoService?interface=org.apache.dubbo.rpc.support.DemoService")); // void sayHello(String name); RpcInvocation inv = new RpcInvocation( "sayHello", serviceName, "", new Class<?>[] {String.class}, null, null, invoker, null); Class<?> returnType = RpcUtils.getReturnType(inv); Assertions.assertNull(returnType); // String echo(String text); RpcInvocation inv1 = new RpcInvocation("echo", serviceName, "", new Class<?>[] {String.class}, null, null, invoker, null); Class<?> returnType1 = RpcUtils.getReturnType(inv1); Assertions.assertNotNull(returnType1); Assertions.assertEquals(String.class, returnType1); // int getSize(String[] strs); RpcInvocation inv2 = new RpcInvocation( "getSize", serviceName, "", new Class<?>[] {String[].class}, null, null, invoker, null); Class<?> returnType2 = RpcUtils.getReturnType(inv2); Assertions.assertNotNull(returnType2); Assertions.assertEquals(int.class, returnType2); // Person getPerson(Person person); RpcInvocation inv3 = new RpcInvocation( "getPerson", serviceName, "", new Class<?>[] {Person.class}, null, null, invoker, null); Class<?> returnType3 = RpcUtils.getReturnType(inv3); Assertions.assertNotNull(returnType3); Assertions.assertEquals(Person.class, returnType3); // List<String> testReturnType1(String str); RpcInvocation inv4 = new RpcInvocation( "testReturnType1", serviceName, "", new Class<?>[] {String.class}, null, null, invoker, null); Class<?> returnType4 = RpcUtils.getReturnType(inv4); Assertions.assertNotNull(returnType4); Assertions.assertEquals(List.class, returnType4); }
@Override public void set(int fieldNum, Object value) { throw new UnsupportedOperationException("not allowed to run set() on LazyHCatRecord"); }
@Test public void testSetWithName() throws Exception { HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()); boolean sawException = false; try { r.set("fred", null, "bob"); } catch (UnsupportedOperationException uoe) { sawException = true; } Assert.assertTrue(sawException); }
public static Long jsToInteger( Object value, Class<?> clazz ) { if ( Number.class.isAssignableFrom( clazz ) ) { return ( (Number) value ).longValue(); } else { String classType = clazz.getName(); if ( classType.equalsIgnoreCase( "java.lang.String" ) ) { return ( new Long( (String) value ) ); } else if ( classType.equalsIgnoreCase( JS_UNDEFINED ) ) { return null; } else if ( classType.equalsIgnoreCase( JS_NATIVE_NUM ) ) { Number nb = Context.toNumber( value ); return nb.longValue(); } else if ( classType.equalsIgnoreCase( JS_NATIVE_JAVA_OBJ ) ) { // Is it a Value? // try { Value v = (Value) Context.jsToJava( value, Value.class ); return v.getInteger(); } catch ( Exception e2 ) { String string = Context.toString( value ); return Long.parseLong( Const.trim( string ) ); } } else { return Long.parseLong( value.toString() ); } } }
@Test public void jsToInteger_NaturalNumbers() throws Exception { Number[] naturalNumbers = new Number[] { (byte) 1, (short) 1, 1, (long) 1 }; for ( Number number : naturalNumbers ) { assertEquals( LONG_ONE, JavaScriptUtils.jsToInteger( number, number.getClass() ) ); } }
public boolean isSubscribedToTopic(String topic) { return subscribedTopics.map(topics -> topics.contains(topic)) .orElse(usesConsumerGroupProtocol()); }
@Test public void testIsSubscribedToTopic() { ClassicGroup group = new ClassicGroup(new LogContext(), "groupId", EMPTY, Time.SYSTEM, mock(GroupCoordinatorMetricsShard.class)); // 1. group has no protocol type => not subscribed assertFalse(group.isSubscribedToTopic("topic")); // 2. group does not use consumer group protocol type => not subscribed JoinGroupRequestProtocolCollection protocols = new JoinGroupRequestProtocolCollection(); protocols.add(new JoinGroupRequestProtocol() .setName("range") .setMetadata(ConsumerProtocol.serializeSubscription( new ConsumerPartitionAssignor.Subscription(Collections.singletonList("topic"))).array())); ClassicGroupMember memberWithNonConsumerProtocol = new ClassicGroupMember( "memberWithNonConsumerProtocol", Optional.empty(), clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs, "My Protocol", protocols ); group.add(memberWithNonConsumerProtocol); group.transitionTo(PREPARING_REBALANCE); group.initNextGeneration(); assertTrue(group.isInState(COMPLETING_REBALANCE)); assertEquals(Optional.empty(), group.computeSubscribedTopics()); assertFalse(group.isSubscribedToTopic("topic")); // 3. group uses consumer group protocol type but empty members => not subscribed group.remove("memberWithNonConsumerProtocol"); ClassicGroupMember memberWithConsumerProtocol = new ClassicGroupMember( "memberWithConsumerProtocol", Optional.empty(), clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs, "consumer", protocols ); group.add(memberWithConsumerProtocol); group.remove("memberWithConsumerProtocol"); group.transitionTo(PREPARING_REBALANCE); group.initNextGeneration(); assertTrue(group.isInState(EMPTY)); assertEquals(Optional.of(Collections.emptySet()), group.computeSubscribedTopics()); assertTrue(group.usesConsumerGroupProtocol()); assertFalse(group.isSubscribedToTopic("topic")); // 4. group uses consumer group protocol type with member subscription => subscribed group.add(memberWithConsumerProtocol); group.transitionTo(PREPARING_REBALANCE); group.initNextGeneration(); assertTrue(group.isInState(COMPLETING_REBALANCE)); assertEquals(Optional.of(Collections.singleton("topic")), group.computeSubscribedTopics()); assertTrue(group.usesConsumerGroupProtocol()); assertTrue(group.isSubscribedToTopic("topic")); }
public static <K, V> Map<K, V> emptyOnNull(Map<K, V> map) { return map == null ? new HashMap<>() : map; }
@Test void emptyOnNull() { var map = MapUtils.emptyOnNull(null); assertThat(map, notNullValue()); assertThat(map, anEmptyMap()); map = MapUtils.emptyOnNull(Map.of("key", "value")); assertThat(map, notNullValue()); assertThat(map.size(), is(1)); }
@Override public Object convert(String[] segments, int size, Class<?> targetType, Class<?> elementType) { Class<?> componentType = targetType.getComponentType(); Converter converter = converterUtil.getConverter(String.class, componentType); Object array = newInstance(componentType, size); for (int i = 0; i < size; i++) { Array.set(array, i, converter.convert(segments[i])); } return array; }
@Test void testConvert() { assertTrue(deepEquals(new Integer[] {123}, converter.convert("123", Integer[].class, Integer.class))); assertTrue(deepEquals(new Integer[] {1, 2, 3}, converter.convert("1,2,3", Integer[].class, null))); assertNull(converter.convert("", Integer[].class, null)); assertNull(converter.convert(null, Integer[].class, null)); }
public static void validate(BugPattern pattern) throws ValidationException { if (pattern == null) { throw new ValidationException("No @BugPattern provided"); } // name must not contain spaces if (CharMatcher.whitespace().matchesAnyOf(pattern.name())) { throw new ValidationException("Name must not contain whitespace: " + pattern.name()); } // linkType must be consistent with link element. switch (pattern.linkType()) { case CUSTOM: if (pattern.link().isEmpty()) { throw new ValidationException("Expected a custom link but none was provided"); } break; case AUTOGENERATED: case NONE: if (!pattern.link().isEmpty()) { throw new ValidationException("Expected no custom link but found: " + pattern.link()); } break; } }
@Test public void linkTypeNoneButIncludesLink() { @BugPattern( name = "LinkTypeNoneButIncludesLink", summary = "linkType none but includes link", explanation = "linkType none but includes link", severity = SeverityLevel.ERROR, linkType = LinkType.NONE, link = "http://foo") final class BugPatternTestClass {} BugPattern annotation = BugPatternTestClass.class.getAnnotation(BugPattern.class); assertThrows(ValidationException.class, () -> BugPatternValidator.validate(annotation)); }
@Override public void syncHoodieTable() { switch (bqSyncClient.getTableType()) { case COPY_ON_WRITE: case MERGE_ON_READ: syncTable(bqSyncClient); break; default: throw new UnsupportedOperationException(bqSyncClient.getTableType() + " table type is not supported yet."); } }
@Test void useBQManifestFile_newTablePartitioned() { properties.setProperty(BigQuerySyncConfig.BIGQUERY_SYNC_USE_BQ_MANIFEST_FILE.key(), "true"); String prefix = "file:///local/prefix"; properties.setProperty(BigQuerySyncConfig.BIGQUERY_SYNC_SOURCE_URI_PREFIX.key(), prefix); properties.setProperty(BigQuerySyncConfig.BIGQUERY_SYNC_PARTITION_FIELDS.key(), "datestr,type"); when(mockBqSyncClient.getTableType()).thenReturn(HoodieTableType.COPY_ON_WRITE); when(mockBqSyncClient.getBasePath()).thenReturn(TEST_TABLE_BASE_PATH); when(mockBqSyncClient.datasetExists()).thenReturn(true); when(mockBqSyncClient.tableNotExistsOrDoesNotMatchSpecification(TEST_TABLE)).thenReturn(true); Path manifestPath = new Path("file:///local/path"); when(mockManifestFileWriter.getManifestSourceUri(true)).thenReturn(manifestPath.toUri().getPath()); when(mockBqSchemaResolver.getTableSchema(any(), eq(Arrays.asList("datestr", "type")))).thenReturn(schema); BigQuerySyncTool tool = new BigQuerySyncTool(properties, mockManifestFileWriter, mockBqSyncClient, mockMetaClient, mockBqSchemaResolver); tool.syncHoodieTable(); verify(mockBqSyncClient).createOrUpdateTableUsingBqManifestFile(TEST_TABLE, manifestPath.toUri().getPath(), prefix, schema); verify(mockManifestFileWriter).writeManifestFile(true); }
public static HollowChecksum forStateEngineWithCommonSchemas(HollowReadStateEngine stateEngine, HollowReadStateEngine commonSchemasWithState) { final Vector<TypeChecksum> typeChecksums = new Vector<TypeChecksum>(); SimultaneousExecutor executor = new SimultaneousExecutor(HollowChecksum.class, "checksum-common-schemas"); for(final HollowTypeReadState typeState : stateEngine.getTypeStates()) { HollowTypeReadState commonSchemasWithType = commonSchemasWithState.getTypeState(typeState.getSchema().getName()); if(commonSchemasWithType != null) { final HollowSchema commonSchemasWith = commonSchemasWithType.getSchema(); executor.execute(new Runnable() { public void run() { HollowChecksum cksum = typeState.getChecksum(commonSchemasWith); typeChecksums.addElement(new TypeChecksum(typeState.getSchema().getName(), cksum)); } }); } } try { executor.awaitSuccessfulCompletion(); } catch (Exception e) { throw new RuntimeException(e); } Collections.sort(typeChecksums); HollowChecksum totalChecksum = new HollowChecksum(); for(TypeChecksum cksum : typeChecksums) { totalChecksum.applyInt(cksum.getChecksum()); } return totalChecksum; }
@Test public void checksumsCanBeEvaluatedAcrossObjectTypesWithDifferentSchemas() { HollowChecksum cksum1 = HollowChecksum.forStateEngineWithCommonSchemas(readEngine1, readEngine2); HollowChecksum cksum2 = HollowChecksum.forStateEngineWithCommonSchemas(readEngine2, readEngine1); Assert.assertEquals(cksum1, cksum2); }
@Override public void updateUserProfile(Long id, UserProfileUpdateReqVO reqVO) { // 校验正确性 validateUserExists(id); validateEmailUnique(id, reqVO.getEmail()); validateMobileUnique(id, reqVO.getMobile()); // 执行更新 userMapper.updateById(BeanUtils.toBean(reqVO, AdminUserDO.class).setId(id)); }
@Test public void testUpdateUserProfile_success() { // mock 数据 AdminUserDO dbUser = randomAdminUserDO(); userMapper.insert(dbUser); // 准备参数 Long userId = dbUser.getId(); UserProfileUpdateReqVO reqVO = randomPojo(UserProfileUpdateReqVO.class, o -> { o.setMobile(randomString()); o.setSex(RandomUtil.randomEle(SexEnum.values()).getSex()); }); // 调用 userService.updateUserProfile(userId, reqVO); // 断言 AdminUserDO user = userMapper.selectById(userId); assertPojoEquals(reqVO, user); }
void handleLine(final String line) { final String trimmedLine = Optional.ofNullable(line).orElse("").trim(); if (trimmedLine.isEmpty()) { return; } handleStatements(trimmedLine); }
@Test public void shouldExplainQueryId() { // Given: localCli.handleLine("CREATE STREAM " + streamName + " " + "AS SELECT * FROM " + ORDER_DATA_PROVIDER.sourceName() + ";"); final String queryId = extractQueryId(terminal.getOutputString()); final String explain = "EXPLAIN " + queryId + ";"; // When: localCli.handleLine(explain); // Then: assertThat(terminal.getOutputString(), containsString(queryId)); assertThat(terminal.getOutputString(), containsString("Status")); assertThat(terminal.getOutputString(), either(containsString("REBALANCING")) .or(containsString("RUNNING"))); dropStream(streamName); }
public static synchronized X509Certificate createX509V3Certificate(KeyPair kp, int days, String issuerCommonName, String subjectCommonName, String domain, String signAlgoritm) throws GeneralSecurityException, IOException { return createX509V3Certificate( kp, days, issuerCommonName, subjectCommonName, domain, signAlgoritm, null ); }
@Test public void testGenerateCertificateSubject() throws Exception { // Setup fixture. final KeyPair keyPair = subjectKeyPair; final int days = 2; final String issuerCommonName = "issuer common name"; final String subjectCommonName = "subject common name"; final String domain = "domain.example.org"; final Set<String> sanDnsNames = Stream.of( "alternative-a.example.org", "alternative-b.example.org" ).collect( Collectors.toSet() ); // Execute system under test. final X509Certificate result = CertificateManager.createX509V3Certificate( keyPair, days, issuerCommonName, subjectCommonName, domain, SIGNATURE_ALGORITHM, sanDnsNames ); // Verify results. assertNotNull( result ); final Set<String> foundSubjectCNs = parse( result.getSubjectX500Principal().getName(), "CN" ); assertEquals( 1, foundSubjectCNs.size() ); assertEquals( subjectCommonName, foundSubjectCNs.iterator().next() ); }
public void correctUsage() { correctGroupUsage(); correctTenantUsage(); }
@Test void testCorrectUsage() { List<GroupCapacity> groupCapacityList = new ArrayList<>(); GroupCapacity groupCapacity = new GroupCapacity(); groupCapacity.setId(1L); groupCapacity.setGroup("testGroup"); groupCapacityList.add(groupCapacity); when(groupCapacityPersistService.getCapacityList4CorrectUsage(0L, 100)).thenReturn(groupCapacityList); when(groupCapacityPersistService.getCapacityList4CorrectUsage(1L, 100)).thenReturn(new ArrayList<>()); when(groupCapacityPersistService.correctUsage(eq("testGroup"), any())).thenReturn(true); List<TenantCapacity> tenantCapacityList = new ArrayList<>(); TenantCapacity tenantCapacity = new TenantCapacity(); tenantCapacity.setId(1L); tenantCapacity.setTenant("testTenant"); tenantCapacityList.add(tenantCapacity); when(tenantCapacityPersistService.getCapacityList4CorrectUsage(0L, 100)).thenReturn(tenantCapacityList); when(tenantCapacityPersistService.getCapacityList4CorrectUsage(1L, 100)).thenReturn(new ArrayList<>()); when(tenantCapacityPersistService.correctUsage(eq("testTenant"), any())).thenReturn(true); service.correctUsage(); Mockito.verify(groupCapacityPersistService, times(1)).getCapacityList4CorrectUsage(0L, 100); Mockito.verify(groupCapacityPersistService, times(1)).getCapacityList4CorrectUsage(1L, 100); Mockito.verify(groupCapacityPersistService, times(1)).correctUsage(eq("testGroup"), any()); Mockito.verify(tenantCapacityPersistService, times(1)).getCapacityList4CorrectUsage(0L, 100); Mockito.verify(tenantCapacityPersistService, times(1)).getCapacityList4CorrectUsage(1L, 100); Mockito.verify(tenantCapacityPersistService, times(1)).correctUsage(eq("testTenant"), any()); }
@Override @NotNull public BTreeMutable getMutableCopy() { final BTreeMutable result = new BTreeMutable(this); result.addExpiredLoggable(rootLoggable); return result; }
@Test public void testSplitLeft() { int s = 50; tm = new BTreeEmpty(log, createTestSplittingPolicy(), true, 1).getMutableCopy(); for (int i = s - 1; i >= 0; i--) { getTreeMutable().put(kv(i, "v" + i)); } checkTree(getTreeMutable(), s).run(); long rootAddress = saveTree(); checkTree(getTreeMutable(), s).run(); reopen(); t = new BTree(log, rootAddress, true, 1); checkTree(getTree(), s).run(); }
@Override protected Future<KafkaMirrorMakerStatus> createOrUpdate(Reconciliation reconciliation, KafkaMirrorMaker assemblyResource) { String namespace = reconciliation.namespace(); KafkaMirrorMakerCluster mirror; KafkaMirrorMakerStatus kafkaMirrorMakerStatus = new KafkaMirrorMakerStatus(); try { mirror = KafkaMirrorMakerCluster.fromCrd(reconciliation, assemblyResource, versions, sharedEnvironmentProvider); } catch (Exception e) { LOGGER.warnCr(reconciliation, e); StatusUtils.setStatusConditionAndObservedGeneration(assemblyResource, kafkaMirrorMakerStatus, e); return Future.failedFuture(new ReconciliationException(kafkaMirrorMakerStatus, e)); } Map<String, String> annotations = new HashMap<>(1); KafkaClientAuthentication authConsumer = assemblyResource.getSpec().getConsumer().getAuthentication(); List<CertSecretSource> trustedCertificatesConsumer = assemblyResource.getSpec().getConsumer().getTls() == null ? Collections.emptyList() : assemblyResource.getSpec().getConsumer().getTls().getTrustedCertificates(); KafkaClientAuthentication authProducer = assemblyResource.getSpec().getProducer().getAuthentication(); List<CertSecretSource> trustedCertificatesProducer = assemblyResource.getSpec().getProducer().getTls() == null ? Collections.emptyList() : assemblyResource.getSpec().getProducer().getTls().getTrustedCertificates(); Promise<KafkaMirrorMakerStatus> createOrUpdatePromise = Promise.promise(); boolean mirrorHasZeroReplicas = mirror.getReplicas() == 0; LOGGER.debugCr(reconciliation, "Updating Kafka Mirror Maker cluster"); mirrorMakerServiceAccount(reconciliation, namespace, mirror) .compose(i -> deploymentOperations.scaleDown(reconciliation, namespace, mirror.getComponentName(), mirror.getReplicas(), operationTimeoutMs)) .compose(i -> MetricsAndLoggingUtils.metricsAndLogging(reconciliation, configMapOperations, mirror.logging(), mirror.metrics())) .compose(metricsAndLoggingCm -> { ConfigMap logAndMetricsConfigMap = mirror.generateMetricsAndLogConfigMap(metricsAndLoggingCm); annotations.put(Annotations.ANNO_STRIMZI_LOGGING_HASH, Util.hashStub(logAndMetricsConfigMap.getData().get(mirror.logging().configMapKey()))); return configMapOperations.reconcile(reconciliation, namespace, KafkaMirrorMakerResources.metricsAndLogConfigMapName(reconciliation.name()), logAndMetricsConfigMap); }) .compose(i -> podDisruptionBudgetOperator.reconcile(reconciliation, namespace, mirror.getComponentName(), mirror.generatePodDisruptionBudget())) .compose(i -> Future.join(VertxUtil.authTlsHash(secretOperations, namespace, authConsumer, trustedCertificatesConsumer), VertxUtil.authTlsHash(secretOperations, namespace, authProducer, trustedCertificatesProducer))) .compose(hashFut -> { if (hashFut != null) { annotations.put(Annotations.ANNO_STRIMZI_AUTH_HASH, Integer.toString((int) hashFut.resultAt(0) + (int) hashFut.resultAt(1))); } return Future.succeededFuture(); }) .compose(i -> deploymentOperations.reconcile(reconciliation, namespace, mirror.getComponentName(), mirror.generateDeployment(annotations, pfa.isOpenshift(), imagePullPolicy, imagePullSecrets))) .compose(i -> deploymentOperations.scaleUp(reconciliation, namespace, mirror.getComponentName(), mirror.getReplicas(), operationTimeoutMs)) .compose(i -> deploymentOperations.waitForObserved(reconciliation, namespace, mirror.getComponentName(), 1_000, operationTimeoutMs)) .compose(i -> mirrorHasZeroReplicas ? Future.succeededFuture() : deploymentOperations.readiness(reconciliation, namespace, mirror.getComponentName(), 1_000, operationTimeoutMs)) .onComplete(reconciliationResult -> { StatusUtils.setStatusConditionAndObservedGeneration(assemblyResource, kafkaMirrorMakerStatus, reconciliationResult.cause()); // Add warning about Mirror Maker 1 being deprecated and removed soon LOGGER.warnCr(reconciliation, "Mirror Maker 1 is deprecated and will be removed in Apache Kafka 4.0.0. Please migrate to Mirror Maker 2."); StatusUtils.addConditionsToStatus(kafkaMirrorMakerStatus, Set.of(StatusUtils.buildWarningCondition("MirrorMaker1Deprecation", "Mirror Maker 1 is deprecated and will be removed in Apache Kafka 4.0.0. Please migrate to Mirror Maker 2."))); kafkaMirrorMakerStatus.setReplicas(mirror.getReplicas()); kafkaMirrorMakerStatus.setLabelSelector(mirror.getSelectorLabels().toSelectorString()); if (reconciliationResult.succeeded()) { createOrUpdatePromise.complete(kafkaMirrorMakerStatus); } else { createOrUpdatePromise.fail(new ReconciliationException(kafkaMirrorMakerStatus, reconciliationResult.cause())); } } ); return createOrUpdatePromise.future(); }
@Test public void testUpdateClusterFailure(VertxTestContext context) { ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(true); CrdOperator mockMirrorOps = supplier.mirrorMakerOperator; DeploymentOperator mockDcOps = supplier.deploymentOperations; PodDisruptionBudgetOperator mockPdbOps = supplier.podDisruptionBudgetOperator; ConfigMapOperator mockCmOps = supplier.configMapOperations; String kmmName = "foo"; String kmmNamespace = "test"; KafkaMirrorMakerConsumerSpec consumer = new KafkaMirrorMakerConsumerSpecBuilder() .withBootstrapServers(consumerBootstrapServers) .withGroupId(groupId) .withNumStreams(numStreams) .build(); KafkaMirrorMakerProducerSpec producer = new KafkaMirrorMakerProducerSpecBuilder() .withBootstrapServers(producerBootstrapServers) .build(); KafkaMirrorMaker kmm = ResourceUtils.createKafkaMirrorMaker(kmmNamespace, kmmName, image, producer, consumer, include); KafkaMirrorMakerCluster mirror = KafkaMirrorMakerCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kmm, VERSIONS, SHARED_ENV_PROVIDER); kmm.getSpec().setImage("some/different:image"); // Change the image to generate some diff when(mockMirrorOps.get(kmmNamespace, kmmName)).thenReturn(kmm); when(mockDcOps.get(kmmNamespace, mirror.getComponentName())).thenReturn(mirror.generateDeployment(new HashMap<>(), true, null, null)); when(mockDcOps.readiness(any(), eq(kmmNamespace), eq(mirror.getComponentName()), anyLong(), anyLong())).thenReturn(Future.succeededFuture()); when(mockDcOps.waitForObserved(any(), anyString(), anyString(), anyLong(), anyLong())).thenReturn(Future.succeededFuture()); ArgumentCaptor<String> dcNamespaceCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> dcNameCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<Deployment> dcCaptor = ArgumentCaptor.forClass(Deployment.class); when(mockDcOps.reconcile(any(), dcNamespaceCaptor.capture(), dcNameCaptor.capture(), dcCaptor.capture())).thenReturn(Future.failedFuture("Failed")); ArgumentCaptor<String> dcScaleUpNamespaceCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> dcScaleUpNameCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<Integer> dcScaleUpReplicasCaptor = ArgumentCaptor.forClass(Integer.class); when(mockDcOps.scaleUp(any(), dcScaleUpNamespaceCaptor.capture(), dcScaleUpNameCaptor.capture(), dcScaleUpReplicasCaptor.capture(), anyLong())).thenReturn(Future.succeededFuture()); ArgumentCaptor<String> dcScaleDownNamespaceCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> dcScaleDownNameCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<Integer> dcScaleDownReplicasCaptor = ArgumentCaptor.forClass(Integer.class); when(mockDcOps.scaleDown(any(), dcScaleDownNamespaceCaptor.capture(), dcScaleDownNameCaptor.capture(), dcScaleDownReplicasCaptor.capture(), anyLong())).thenReturn(Future.succeededFuture()); when(mockPdbOps.reconcile(any(), anyString(), any(), any())).thenReturn(Future.succeededFuture()); when(mockMirrorOps.reconcile(any(), anyString(), any(), any())).thenReturn(Future.succeededFuture(ReconcileResult.created(new KafkaMirrorMaker()))); when(mockMirrorOps.getAsync(anyString(), anyString())).thenReturn(Future.succeededFuture(kmm)); when(mockMirrorOps.updateStatusAsync(any(), any(KafkaMirrorMaker.class))).thenReturn(Future.succeededFuture()); when(mockCmOps.reconcile(any(), anyString(), any(), any())).thenReturn(Future.succeededFuture(ReconcileResult.created(new ConfigMap()))); KafkaMirrorMakerAssemblyOperator ops = new KafkaMirrorMakerAssemblyOperator(vertx, new PlatformFeaturesAvailability(true, kubernetesVersion), new MockCertManager(), new PasswordGenerator(10, "a", "a"), supplier, ResourceUtils.dummyClusterOperatorConfig(VERSIONS)); Checkpoint async = context.checkpoint(); ops.createOrUpdate(new Reconciliation("test-trigger", KafkaMirrorMaker.RESOURCE_KIND, kmmNamespace, kmmName), kmm) .onComplete(context.failing(v -> context.verify(() -> async.flag()))); }
public TimeRange parseTimeRange(final String timerangeKeyword) { try { if (StringUtils.isBlank(timerangeKeyword)) { return null; } final Optional<TimeRange> shortTimeRange = shortTimerangeFormatParser.parse(timerangeKeyword); return shortTimeRange.orElseGet(() -> KeywordRange.create(timerangeKeyword, "UTC")); } catch (Exception e) { throw new IllegalArgumentException("Could not parse timerange " + timerangeKeyword + ". It should have a short format (i.e. '2h') or natural date format (i.e. 'last 2 hours')"); } }
@Test void returnsNullOnBlankTimerange() { assertNull(toTest.parseTimeRange(null)); assertNull(toTest.parseTimeRange("")); assertNull(toTest.parseTimeRange(" ")); }
@Nullable public byte[] getValue() { return mValue; }
@Test public void setValue_SINT32() { final MutableData data = new MutableData(new byte[4]); data.setValue(0xfefdfd00, Data.FORMAT_UINT32_LE, 0); assertArrayEquals(new byte[] { (byte) 0x00, (byte) 0xFD, (byte) 0xFD, (byte) 0xFE } , data.getValue()); }
@Override public void createService(String serviceName) throws NacosException { createService(serviceName, Constants.DEFAULT_GROUP); }
@Test void testCreateService2() throws NacosException { //given String serviceName = "service1"; String groupName = "groupName"; //when nacosNamingMaintainService.createService(serviceName, groupName); //then verify(serverProxy, times(1)).createService(argThat(new ArgumentMatcher<Service>() { @Override public boolean matches(Service service) { return service.getName().equals(serviceName) && service.getGroupName().equals(groupName) && Math.abs(service.getProtectThreshold() - Constants.DEFAULT_PROTECT_THRESHOLD) < 0.1f && service.getMetadata().size() == 0; } }), argThat(o -> o instanceof NoneSelector)); }
@Override public String getStringFunctions() { return null; }
@Test void assertGetStringFunctions() { assertNull(metaData.getStringFunctions()); }
public Channel getAvailableChannel(String groupId) { if (groupId == null) { return null; } List<Channel> channelList; ConcurrentHashMap<Channel, ClientChannelInfo> channelClientChannelInfoHashMap = groupChannelTable.get(groupId); if (channelClientChannelInfoHashMap != null) { channelList = new ArrayList<>(channelClientChannelInfoHashMap.keySet()); } else { log.warn("Check transaction failed, channel table is empty. groupId={}", groupId); return null; } int size = channelList.size(); if (0 == size) { log.warn("Channel list is empty. groupId={}", groupId); return null; } Channel lastActiveChannel = null; int index = positiveAtomicCounter.incrementAndGet() % size; Channel channel = channelList.get(index); int count = 0; boolean isOk = channel.isActive() && channel.isWritable(); while (count++ < GET_AVAILABLE_CHANNEL_RETRY_COUNT) { if (isOk) { return channel; } if (channel.isActive()) { lastActiveChannel = channel; } index = (++index) % size; channel = channelList.get(index); isOk = channel.isActive() && channel.isWritable(); } return lastActiveChannel; }
@Test public void testGetAvailableChannel() { producerManager.registerProducer(group, clientInfo); when(channel.isActive()).thenReturn(true); when(channel.isWritable()).thenReturn(true); Channel c = producerManager.getAvailableChannel(group); assertThat(c).isSameAs(channel); when(channel.isWritable()).thenReturn(false); c = producerManager.getAvailableChannel(group); assertThat(c).isSameAs(channel); when(channel.isActive()).thenReturn(false); c = producerManager.getAvailableChannel(group); assertThat(c).isNull(); }
@Override public String getTopic() { StringBuilder sb = new StringBuilder(); if (StringUtils.isNotBlank(config.getMm2SourceAlias())) { sb.append(config.getMm2SourceAlias()).append(config.getMm2SourceSeparator()); } if (StringUtil.isNotBlank(config.getNamespace())) { sb.append(config.getNamespace()).append("-"); } sb.append(getPlainTopic()); return sb.toString(); }
@Test public void testGetTopic() { KafkaFetcherConfig config = new KafkaFetcherConfig(); MockModuleManager manager = new MockModuleManager() { @Override protected void init() { } }; String plainTopic = config.getTopicNameOfTracingSegments(); MockKafkaHandler kafkaHandler = new MockKafkaHandler(plainTopic, manager, config); // unset namespace and mm2 assertEquals(kafkaHandler.getTopic(), plainTopic); //set namespace only String namespace = "product"; config.setNamespace(namespace); assertEquals(namespace + "-" + plainTopic, kafkaHandler.getTopic()); //set mm2 only config.setNamespace(""); String mm2SourceAlias = "DC1"; config.setMm2SourceAlias(mm2SourceAlias); String mm2SourceSeparator = "."; config.setMm2SourceSeparator(mm2SourceSeparator); assertEquals(mm2SourceAlias + mm2SourceSeparator + plainTopic, kafkaHandler.getTopic()); //set namespace and mm2 config.setNamespace(namespace); config.setMm2SourceAlias(mm2SourceAlias); config.setMm2SourceSeparator(mm2SourceSeparator); assertEquals(mm2SourceAlias + mm2SourceSeparator + namespace + "-" + plainTopic, kafkaHandler.getTopic()); }
@Override public void showPreviewForKey(Keyboard.Key key, CharSequence label, Point previewPosition) { mPreviewIcon.setVisibility(View.GONE); mPreviewText.setVisibility(View.VISIBLE); mPreviewIcon.setImageDrawable(null); mPreviewText.setTextColor(mPreviewPopupTheme.getPreviewKeyTextColor()); mPreviewText.setText(label); if (label.length() > 1 && key.getCodesCount() < 2) { mPreviewText.setTextSize( TypedValue.COMPLEX_UNIT_PX, mPreviewPopupTheme.getPreviewLabelTextSize()); } else { mPreviewText.setTextSize( TypedValue.COMPLEX_UNIT_PX, mPreviewPopupTheme.getPreviewKeyTextSize()); } mPreviewText.measure( View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED), View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED)); showPopup( key, mPreviewText.getMeasuredWidth(), mPreviewText.getMeasuredHeight(), previewPosition); }
@Test public void testPreviewLayoutCorrectlyForLabel() { PreviewPopupTheme theme = new PreviewPopupTheme(); theme.setPreviewKeyBackground( ContextCompat.getDrawable(getApplicationContext(), blacktheme_preview_background)); theme.setPreviewKeyTextSize(1); final KeyPreviewPopupWindow underTest = new KeyPreviewPopupWindow( getApplicationContext(), new View(getApplicationContext()), theme); PopupWindow createdPopupWindow = Shadows.shadowOf((Application) ApplicationProvider.getApplicationContext()) .getLatestPopupWindow(); Assert.assertNull(createdPopupWindow); Keyboard.Key key = Mockito.mock(Keyboard.Key.class); Mockito.doReturn((int) 'y').when(key).getPrimaryCode(); Mockito.doReturn(1).when(key).getCodesCount(); key.width = 10; key.height = 20; underTest.showPreviewForKey(key, "yy", new Point(1, 1)); createdPopupWindow = Shadows.shadowOf((Application) ApplicationProvider.getApplicationContext()) .getLatestPopupWindow(); Assert.assertNotNull(createdPopupWindow); }
public static Predicate<MetricDto> isOptimizedForBestValue() { return m -> m != null && m.isOptimizedBestValue() && m.getBestValue() != null; }
@Test void isOptimizedForBestValue_at_true() { metric = new MetricDto() .setBestValue(42.0d) .setOptimizedBestValue(true); boolean result = MetricDtoFunctions.isOptimizedForBestValue().test(metric); assertThat(result).isTrue(); }
public static KafkaRebalanceState rebalanceState(KafkaRebalanceStatus kafkaRebalanceStatus) { if (kafkaRebalanceStatus != null) { Condition rebalanceStateCondition = rebalanceStateCondition(kafkaRebalanceStatus); String statusString = rebalanceStateCondition != null ? rebalanceStateCondition.getType() : null; if (statusString != null) { return KafkaRebalanceState.valueOf(statusString); } } return null; }
@Test public void testNullStatus() { KafkaRebalanceState state = KafkaRebalanceUtils.rebalanceState(null); assertThat(state, is(nullValue())); }
@Override public void store(K key, V value) { long startNanos = Timer.nanos(); try { delegate.store(key, value); } finally { storeProbe.recordValue(Timer.nanosElapsed(startNanos)); } }
@Test public void store() { String key = "somekey"; String value = "somevalue"; cacheStore.store(key, value); verify(delegate).store(key, value); assertProbeCalledOnce("store"); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void variable() throws ScanException { Tokenizer tokenizer = new Tokenizer("${abc}"); Parser parser = new Parser(tokenizer.tokenize()); Node node = parser.parse(); Node witness = new Node(Node.Type.VARIABLE, new Node(Node.Type.LITERAL, "abc")); assertEquals(witness, node); }
@Override public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) throws YarnException, IOException { if (request == null || request.getApplicationSubmissionContext() == null || request.getApplicationSubmissionContext().getApplicationId() == null) { RouterMetrics.getMetrics().incrAppsFailedSubmitted(); String errMsg = "Missing submitApplication request or applicationSubmissionContext information."; RouterAuditLogger.logFailure(user.getShortUserName(), SUBMIT_NEW_APP, UNKNOWN, TARGET_CLIENT_RM_SERVICE, errMsg); RouterServerUtil.logAndThrowException(errMsg, null); } ApplicationSubmissionContext appContext = request.getApplicationSubmissionContext(); ApplicationSubmissionContextPBImpl asc = (ApplicationSubmissionContextPBImpl) appContext; // Check for excessively large fields, throw exception if found RouterServerUtil.checkAppSubmissionContext(asc, getConf()); // Check succeeded - app submit will be passed on to the next interceptor return getNextInterceptor().submitApplication(request); }
@Test public void testSubmitApplicationEmptyRequest() throws Exception { MockRouterClientRMService rmService = getRouterClientRMService(); LambdaTestUtils.intercept(YarnException.class, "Missing submitApplication request or applicationSubmissionContext information.", () -> rmService.submitApplication(null)); ApplicationSubmissionContext context = ApplicationSubmissionContext.newInstance( null, "", "", null, null, false, false, -1, null, null); SubmitApplicationRequest request = SubmitApplicationRequest.newInstance(context); LambdaTestUtils.intercept(YarnException.class, "Missing submitApplication request or applicationSubmissionContext information.", () -> rmService.submitApplication(null)); }
public static SimpleFunction<String, Row> getJsonStringToRowFunction(Schema beamSchema) { return new JsonToRowFn<String>(beamSchema) { @Override public Row apply(String jsonString) { return RowJsonUtils.jsonToRow(objectMapper, jsonString); } }; }
@Test public void testGetJsonStringToRowFunction() { for (TestCase<? extends RowEncodable> caze : testCases) { Row expected = caze.row; Row actual = JsonUtils.getJsonStringToRowFunction(expected.getSchema()).apply(caze.jsonString); assertEquals(caze.userT.toString(), expected, actual); } }
public List<CoordinatorRecord> onPartitionsDeleted( List<TopicPartition> topicPartitions ) { List<CoordinatorRecord> records = new ArrayList<>(); Map<String, List<Integer>> partitionsByTopic = new HashMap<>(); topicPartitions.forEach(tp -> partitionsByTopic .computeIfAbsent(tp.topic(), __ -> new ArrayList<>()) .add(tp.partition()) ); Consumer<Offsets> delete = offsetsToClean -> { offsetsToClean.offsetsByGroup.forEach((groupId, topicOffsets) -> { topicOffsets.forEach((topic, partitionOffsets) -> { if (partitionsByTopic.containsKey(topic)) { partitionsByTopic.get(topic).forEach(partition -> { if (partitionOffsets.containsKey(partition)) { appendOffsetCommitTombstone(groupId, topic, partition, records); } }); } }); }); }; // Delete the partitions from the main storage. delete.accept(offsets); // Delete the partitions from the pending transactional offsets. pendingTransactionalOffsets.forEach((__, offsets) -> delete.accept(offsets)); return records; }
@Test public void testOnPartitionsDeleted() { OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build(); // Commit offsets. context.commitOffset("grp-0", "foo", 1, 100, 1, context.time.milliseconds()); context.commitOffset("grp-0", "foo", 2, 200, 1, context.time.milliseconds()); context.commitOffset("grp-0", "foo", 3, 300, 1, context.time.milliseconds()); context.commitOffset("grp-1", "bar", 1, 100, 1, context.time.milliseconds()); context.commitOffset("grp-1", "bar", 2, 200, 1, context.time.milliseconds()); context.commitOffset("grp-1", "bar", 3, 300, 1, context.time.milliseconds()); context.commitOffset(100L, "grp-2", "foo", 1, 100, 1, context.time.milliseconds()); context.commitOffset(100L, "grp-2", "foo", 2, 200, 1, context.time.milliseconds()); context.commitOffset(100L, "grp-2", "foo", 3, 300, 1, context.time.milliseconds()); // Delete partitions. List<CoordinatorRecord> records = context.deletePartitions(Arrays.asList( new TopicPartition("foo", 1), new TopicPartition("foo", 2), new TopicPartition("foo", 3), new TopicPartition("bar", 1) )); // Verify. List<CoordinatorRecord> expectedRecords = Arrays.asList( GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("grp-0", "foo", 1), GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("grp-0", "foo", 2), GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("grp-0", "foo", 3), GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("grp-1", "bar", 1), GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("grp-2", "foo", 1), GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("grp-2", "foo", 2), GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("grp-2", "foo", 3) ); assertEquals(new HashSet<>(expectedRecords), new HashSet<>(records)); assertFalse(context.hasOffset("grp-0", "foo", 1)); assertFalse(context.hasOffset("grp-0", "foo", 2)); assertFalse(context.hasOffset("grp-0", "foo", 3)); assertFalse(context.hasOffset("grp-1", "bar", 1)); assertFalse(context.hasOffset("grp-2", "foo", 1)); assertFalse(context.hasOffset("grp-2", "foo", 2)); assertFalse(context.hasOffset("grp-2", "foo", 3)); }
@Override public void updateConfig(ConfigSaveReqVO updateReqVO) { // 校验自己存在 validateConfigExists(updateReqVO.getId()); // 校验参数配置 key 的唯一性 validateConfigKeyUnique(updateReqVO.getId(), updateReqVO.getKey()); // 更新参数配置 ConfigDO updateObj = ConfigConvert.INSTANCE.convert(updateReqVO); configMapper.updateById(updateObj); }
@Test public void testUpdateConfig_success() { // mock 数据 ConfigDO dbConfig = randomConfigDO(); configMapper.insert(dbConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 ConfigSaveReqVO reqVO = randomPojo(ConfigSaveReqVO.class, o -> { o.setId(dbConfig.getId()); // 设置更新的 ID }); // 调用 configService.updateConfig(reqVO); // 校验是否更新正确 ConfigDO config = configMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, config); }
@Override public int hashCode() { return Objects.hash(taskId, topicPartitions); }
@Test public void shouldNotBeEqualsIfDifferInTaskID() { final TaskMetadataImpl differTaskId = new TaskMetadataImpl( new TaskId(1, 10000), TOPIC_PARTITIONS, COMMITTED_OFFSETS, END_OFFSETS, TIME_CURRENT_IDLING_STARTED); assertThat(taskMetadata, not(equalTo(differTaskId))); assertThat(taskMetadata.hashCode(), not(equalTo(differTaskId.hashCode()))); }
@ScalarFunction(nullableParameters = true) public static byte[] toThetaSketch(@Nullable Object input) { return toThetaSketch(input, CommonConstants.Helix.DEFAULT_THETA_SKETCH_NOMINAL_ENTRIES); }
@Test public void testThetaSketchCreation() { for (Object i : _inputs) { Assert.assertEquals(thetaEstimate(SketchFunctions.toThetaSketch(i)), 1.0); Assert.assertEquals(thetaEstimate(SketchFunctions.toThetaSketch(i, 1024)), 1.0); } Assert.assertEquals(thetaEstimate(SketchFunctions.toThetaSketch(null)), 0.0); Assert.assertEquals(thetaEstimate(SketchFunctions.toThetaSketch(null, 1024)), 0.0); Assert.assertThrows(IllegalArgumentException.class, () -> SketchFunctions.toThetaSketch(new Object())); Assert.assertThrows(IllegalArgumentException.class, () -> SketchFunctions.toThetaSketch(new Object(), 1024)); }
@Override public OAuth2CodeDO consumeAuthorizationCode(String code) { OAuth2CodeDO codeDO = oauth2CodeMapper.selectByCode(code); if (codeDO == null) { throw exception(OAUTH2_CODE_NOT_EXISTS); } if (DateUtils.isExpired(codeDO.getExpiresTime())) { throw exception(OAUTH2_CODE_EXPIRE); } oauth2CodeMapper.deleteById(codeDO.getId()); return codeDO; }
@Test public void testConsumeAuthorizationCode_expired() { // 准备参数 String code = "test_code"; // mock 数据 OAuth2CodeDO codeDO = randomPojo(OAuth2CodeDO.class).setCode(code) .setExpiresTime(LocalDateTime.now().minusDays(1)); oauth2CodeMapper.insert(codeDO); // 调用,并断言 assertServiceException(() -> oauth2CodeService.consumeAuthorizationCode(code), OAUTH2_CODE_EXPIRE); }
protected Map<String, String[]> generateParameterMap(MultiValuedTreeMap<String, String> qs, ContainerConfig config) { Map<String, String[]> output; Map<String, List<String>> formEncodedParams = getFormUrlEncodedParametersMap(); if (qs == null) { // Just transform the List<String> values to String[] output = formEncodedParams.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, (e) -> e.getValue().toArray(new String[0]))); } else { Map<String, List<String>> queryStringParams; if (config.isQueryStringCaseSensitive()) { queryStringParams = qs; } else { // If it's case insensitive, we check the entire map on every parameter queryStringParams = qs.entrySet().stream().parallel().collect( Collectors.toMap( Map.Entry::getKey, e -> getQueryParamValuesAsList(qs, e.getKey(), false) )); } // Merge formEncodedParams and queryStringParams Maps output = Stream.of(formEncodedParams, queryStringParams).flatMap(m -> m.entrySet().stream()) .collect( Collectors.toMap( Map.Entry::getKey, e -> e.getValue().toArray(new String[0]), // If a parameter is in both Maps, we merge the list of values (and ultimately transform to String[]) (formParam, queryParam) -> Stream.of(formParam, queryParam).flatMap(Stream::of).toArray(String[]::new) )); } return output; }
@Test void parameterMap_generateParameterMap_nullParameter() { AwsProxyHttpServletRequest request = new AwsProxyHttpServletRequest(queryStringNullValue, mockContext, null, config); Map<String, String[]> paramMap = null; try { paramMap = request.generateParameterMap(request.getAwsProxyRequest().getMultiValueQueryStringParameters(), config); } catch (Exception e) { e.printStackTrace(); fail("Could not generate parameter map"); } assertArrayEquals(new String[]{"two"}, paramMap.get("one")); assertArrayEquals(new String[]{null}, paramMap.get("three")); assertTrue(paramMap.size() == 2); }
@Override public PollResult poll(long currentTimeMs) { if (memberId == null) { return PollResult.EMPTY; } // Send any pending acknowledgements before fetching more records. PollResult pollResult = processAcknowledgements(currentTimeMs); if (pollResult != null) { return pollResult; } if (!fetchMoreRecords || closing) { return PollResult.EMPTY; } Map<Node, ShareSessionHandler> handlerMap = new HashMap<>(); Map<String, Uuid> topicIds = metadata.topicIds(); for (TopicPartition partition : partitionsToFetch()) { Optional<Node> leaderOpt = metadata.currentLeader(partition).leader; if (!leaderOpt.isPresent()) { log.debug("Requesting metadata update for partition {} since current leader node is missing", partition); metadata.requestUpdate(false); continue; } Uuid topicId = topicIds.get(partition.topic()); if (topicId == null) { log.debug("Requesting metadata update for partition {} since topic ID is missing", partition); metadata.requestUpdate(false); continue; } Node node = leaderOpt.get(); if (nodesWithPendingRequests.contains(node.id())) { log.trace("Skipping fetch for partition {} because previous fetch request to {} has not been processed", partition, node.id()); } else { // if there is a leader and no in-flight requests, issue a new fetch ShareSessionHandler handler = handlerMap.computeIfAbsent(node, k -> sessionHandlers.computeIfAbsent(node.id(), n -> new ShareSessionHandler(logContext, n, memberId))); TopicIdPartition tip = new TopicIdPartition(topicId, partition); Acknowledgements acknowledgementsToSend = fetchAcknowledgementsMap.get(tip); if (acknowledgementsToSend != null) { metricsManager.recordAcknowledgementSent(acknowledgementsToSend.size()); } handler.addPartitionToFetch(tip, acknowledgementsToSend); log.debug("Added fetch request for partition {} to node {}", partition, node.id()); } } Map<Node, ShareFetchRequest.Builder> builderMap = new LinkedHashMap<>(); for (Map.Entry<Node, ShareSessionHandler> entry : handlerMap.entrySet()) { builderMap.put(entry.getKey(), entry.getValue().newShareFetchBuilder(groupId, fetchConfig)); } List<UnsentRequest> requests = builderMap.entrySet().stream().map(entry -> { Node target = entry.getKey(); log.trace("Building ShareFetch request to send to node {}", target.id()); ShareFetchRequest.Builder requestBuilder = entry.getValue(); nodesWithPendingRequests.add(target.id()); BiConsumer<ClientResponse, Throwable> responseHandler = (clientResponse, error) -> { if (error != null) { handleShareFetchFailure(target, requestBuilder.data(), error); } else { handleShareFetchSuccess(target, requestBuilder.data(), clientResponse); } }; return new UnsentRequest(requestBuilder, Optional.of(target)).whenComplete(responseHandler); }).collect(Collectors.toList()); return new PollResult(requests); }
@Test public void testUnauthorizedTopic() { buildRequestManager(); assignFromSubscribed(singleton(tp0)); assertEquals(1, sendFetches()); client.prepareResponse(fullFetchResponse(tip0, records, emptyAcquiredRecords, Errors.TOPIC_AUTHORIZATION_FAILED)); networkClientDelegate.poll(time.timer(0)); try { collectFetch(); fail("collectFetch should have thrown a TopicAuthorizationException"); } catch (TopicAuthorizationException e) { assertEquals(singleton(topicName), e.unauthorizedTopics()); } }
@ApiOperation(value = "Create Or update Tenant (saveTenant)", notes = "Create or update the Tenant. When creating tenant, platform generates Tenant Id as " + UUID_WIKI_LINK + "Default Rule Chain and Device profile are also generated for the new tenants automatically. " + "The newly created Tenant Id will be present in the response. " + "Specify existing Tenant Id id to update the Tenant. " + "Referencing non-existing Tenant Id will cause 'Not Found' error." + "Remove 'id', 'tenantId' from the request body example (below) to create new Tenant entity." + SYSTEM_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAuthority('SYS_ADMIN')") @RequestMapping(value = "/tenant", method = RequestMethod.POST) @ResponseBody public Tenant saveTenant(@Parameter(description = "A JSON value representing the tenant.") @RequestBody Tenant tenant) throws Exception { checkEntity(tenant.getId(), tenant, Resource.TENANT); return tbTenantService.save(tenant); }
@Test public void testFindTenantInfos() throws Exception { loginSysAdmin(); List<TenantInfo> tenants = new ArrayList<>(); PageLink pageLink = new PageLink(17); PageData<TenantInfo> pageData = doGetTypedWithPageLink("/api/tenantInfos?", PAGE_DATA_TENANT_INFO_TYPE_REF, pageLink); Assert.assertFalse(pageData.hasNext()); Assert.assertEquals(1, pageData.getData().size()); tenants.addAll(pageData.getData()); List<ListenableFuture<TenantInfo>> createFutures = new ArrayList<>(56); for (int i = 0; i < 56; i++) { Tenant tenant = new Tenant(); tenant.setTitle("Tenant" + i); createFutures.add(executor.submit(() -> new TenantInfo(saveTenant(tenant), "Default"))); } tenants.addAll(Futures.allAsList(createFutures).get(TIMEOUT, TimeUnit.SECONDS)); List<TenantInfo> loadedTenants = new ArrayList<>(); pageLink = new PageLink(17); do { pageData = doGetTypedWithPageLink("/api/tenantInfos?", PAGE_DATA_TENANT_INFO_TYPE_REF, pageLink); loadedTenants.addAll(pageData.getData()); if (pageData.hasNext()) { pageLink = pageLink.nextPageLink(); } } while (pageData.hasNext()); assertThat(tenants).containsExactlyInAnyOrderElementsOf(loadedTenants); deleteEntitiesAsync("/api/tenant/", loadedTenants.stream() .filter((t) -> !TEST_TENANT_NAME.equals(t.getTitle())) .collect(Collectors.toList()), executor).get(TIMEOUT, TimeUnit.SECONDS); pageLink = new PageLink(17); pageData = doGetTypedWithPageLink("/api/tenantInfos?", PAGE_DATA_TENANT_INFO_TYPE_REF, pageLink); Assert.assertFalse(pageData.hasNext()); Assert.assertEquals(1, pageData.getData().size()); }
public static String[] splitString( String string, String separator ) { /* * 0123456 Example a;b;c;d --> new String[] { a, b, c, d } */ // System.out.println("splitString ["+path+"] using ["+separator+"]"); List<String> list = new ArrayList<>(); if ( string == null || string.length() == 0 ) { return new String[] {}; } int sepLen = separator.length(); int from = 0; int end = string.length() - sepLen + 1; for ( int i = from; i < end; i += sepLen ) { if ( string.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) { // OK, we found a separator, the string to add to the list // is [from, i[ list.add( nullToEmpty( string.substring( from, i ) ) ); from = i + sepLen; } } // Wait, if the string didn't end with a separator, we still have information at the end of the string... // In our example that would be "d"... if ( from + sepLen <= string.length() ) { list.add( nullToEmpty( string.substring( from, string.length() ) ) ); } return list.toArray( new String[list.size()] ); }
@Test public void testSplitStringWithDelimiterAndEmptyEnclosureMultiChar() { String mask = "Hello%s world"; String[] chunks = {"Hello", " world"}; String stringToSplit = String.format( mask, DELIMITER2 ); String[] result = Const.splitString( stringToSplit, DELIMITER2, "" ); assertSplit( result, chunks ); }
static final String addFunctionParameter(ParameterDescriptor descriptor, RuleBuilderStep step) { final String parameterName = descriptor.name(); // parameter name needed by function final Map<String, Object> parameters = step.parameters(); if (Objects.isNull(parameters)) { return null; } final Object value = parameters.get(parameterName); // parameter value set by rule definition String syntax = " " + parameterName + " : "; if (value == null) { return null; } else if (value instanceof String valueString) { if (StringUtils.isEmpty(valueString)) { return null; } else if (valueString.startsWith("$")) { // value set as variable syntax += valueString.substring(1); } else { syntax += "\"" + StringEscapeUtils.escapeJava(valueString) + "\""; // value set as string } } else { syntax += value; } return syntax; }
@Test public void addFunctionParameterSyntaxOk_WhenVariableParameterValueIsSet() { String parameterName = "foo"; String parameterValue = "$bar"; RuleBuilderStep step = mock(RuleBuilderStep.class); Map<String, Object> params = Map.of(parameterName, parameterValue); when(step.parameters()).thenReturn(params); ParameterDescriptor descriptor = mock(ParameterDescriptor.class); when(descriptor.name()).thenReturn(parameterName); assertThat(ParserUtil.addFunctionParameter(descriptor, step)) .isEqualTo(" foo : bar"); }
public void expectLogMessage(int level, String tag, Matcher<String> messageMatcher) { expectLog(level, tag, messageMatcher, null); }
@Test public void testNoExpectedMessageFailsTest() { expectedException.expect(AssertionError.class); rule.expectLogMessage(Log.ERROR, "Mytag", "What's up"); }
public static RestartBackoffTimeStrategy.Factory createRestartBackoffTimeStrategyFactory( final RestartStrategies.RestartStrategyConfiguration jobRestartStrategyConfiguration, final Configuration jobConfiguration, final Configuration clusterConfiguration, final boolean isCheckpointingEnabled) { checkNotNull(jobRestartStrategyConfiguration); checkNotNull(jobConfiguration); checkNotNull(clusterConfiguration); return getJobRestartStrategyFactory(jobRestartStrategyConfiguration) .orElse( getRestartStrategyFactoryFromConfig(jobConfiguration) .orElse( (getRestartStrategyFactoryFromConfig(clusterConfiguration) .orElse( getDefaultRestartStrategyFactory( isCheckpointingEnabled))))); }
@Test void testFailureRateStrategySpecifiedInJobConfig() { final Configuration jobConf = new Configuration(); jobConf.set(RestartStrategyOptions.RESTART_STRATEGY, FAILURE_RATE.getMainValue()); final Configuration clusterConf = new Configuration(); clusterConf.set(RestartStrategyOptions.RESTART_STRATEGY, FIXED_DELAY.getMainValue()); final RestartBackoffTimeStrategy.Factory factory = RestartBackoffTimeStrategyFactoryLoader.createRestartBackoffTimeStrategyFactory( DEFAULT_JOB_LEVEL_RESTART_CONFIGURATION, jobConf, clusterConf, false); assertThat(factory) .isInstanceOf( FailureRateRestartBackoffTimeStrategy .FailureRateRestartBackoffTimeStrategyFactory.class); }
@SneakyThrows(ReflectiveOperationException.class) public static <T extends YamlConfiguration> T unmarshal(final File yamlFile, final Class<T> classType) throws IOException { try (BufferedReader inputStreamReader = Files.newBufferedReader(Paths.get(yamlFile.toURI()))) { T result = new Yaml(new ShardingSphereYamlConstructor(classType)).loadAs(inputStreamReader, classType); return null == result ? classType.getConstructor().newInstance() : result; } }
@Test void assertUnmarshalWithEmptyProperties() { Properties actual = YamlEngine.unmarshal("", Properties.class); assertNotNull(actual); assertTrue(actual.isEmpty()); }
static long compressTimeBucket(long timeBucket, int dayStep) { if (dayStep > 1) { DateTime time = TIME_BUCKET_FORMATTER.parseDateTime("" + timeBucket); int days = Days.daysBetween(DAY_ONE, time).getDays(); int groupBucketOffset = days % dayStep; return Long.parseLong(time.minusDays(groupBucketOffset).toString(TIME_BUCKET_FORMATTER)); } else { /* * No calculation required. dayStep is for lower traffic. For normally configuration, there is pointless to calculate. */ return timeBucket; } }
@Test public void testCompressTimeBucket() { Assertions.assertEquals(20000101L, compressTimeBucket(20000105, 11)); Assertions.assertEquals(20000101L, compressTimeBucket(20000111, 11)); Assertions.assertEquals(20000112L, compressTimeBucket(20000112, 11)); Assertions.assertEquals(20000112L, compressTimeBucket(20000122, 11)); Assertions.assertEquals(20000123L, compressTimeBucket(20000123, 11)); Assertions.assertEquals(20000123L, compressTimeBucket(20000125, 11)); }
public static void clean( Object func, ExecutionConfig.ClosureCleanerLevel level, boolean checkSerializable) { clean(func, level, checkSerializable, Collections.newSetFromMap(new IdentityHashMap<>())); }
@Test void testCleanedNonSerializable() throws Exception { MapCreator creator = new NonSerializableMapCreator(); MapFunction<Integer, Integer> map = creator.getMap(); ClosureCleaner.clean(map, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); int result = map.map(3); assertThat(result).isEqualTo(4); }
public static InstrumentedScheduledExecutorService newScheduledThreadPool( int corePoolSize, MetricRegistry registry, String name) { return new InstrumentedScheduledExecutorService( Executors.newScheduledThreadPool(corePoolSize), registry, name); }
@Test public void testNewScheduledThreadPoolWithThreadFactory() throws Exception { final ScheduledExecutorService executorService = InstrumentedExecutors.newScheduledThreadPool(2, defaultThreadFactory, registry); executorService.schedule(new NoopRunnable(), 0, TimeUnit.SECONDS); final Field delegateField = InstrumentedScheduledExecutorService.class.getDeclaredField("delegate"); delegateField.setAccessible(true); final ScheduledThreadPoolExecutor delegate = (ScheduledThreadPoolExecutor) delegateField.get(executorService); assertThat(delegate.getCorePoolSize()).isEqualTo(2); assertThat(delegate.getThreadFactory()).isSameAs(defaultThreadFactory); executorService.shutdown(); }
public Map<FeatureOption, MergingStrategy> computeMergingStrategies( List<SqlTableLikeOption> mergingOptions) { Map<FeatureOption, MergingStrategy> result = new HashMap<>(defaultMergingStrategies); Optional<SqlTableLikeOption> maybeAllOption = mergingOptions.stream() .filter(option -> option.getFeatureOption() == FeatureOption.ALL) .findFirst(); maybeAllOption.ifPresent( (allOption) -> { MergingStrategy strategy = allOption.getMergingStrategy(); for (FeatureOption featureOption : FeatureOption.values()) { if (featureOption != FeatureOption.ALL) { result.put(featureOption, strategy); } } }); for (SqlTableLikeOption mergingOption : mergingOptions) { result.put(mergingOption.getFeatureOption(), mergingOption.getMergingStrategy()); } return result; }
@Test void excludingAllMergeStrategyExpansion() { List<SqlTableLikeOption> inputOptions = Collections.singletonList( new SqlTableLikeOption(MergingStrategy.EXCLUDING, FeatureOption.ALL)); Map<FeatureOption, MergingStrategy> mergingStrategies = util.computeMergingStrategies(inputOptions); assertThat(mergingStrategies.get(FeatureOption.OPTIONS)) .isEqualTo(MergingStrategy.EXCLUDING); assertThat(mergingStrategies.get(FeatureOption.PARTITIONS)) .isEqualTo(MergingStrategy.EXCLUDING); assertThat(mergingStrategies.get(FeatureOption.CONSTRAINTS)) .isEqualTo(MergingStrategy.EXCLUDING); assertThat(mergingStrategies.get(FeatureOption.GENERATED)) .isEqualTo(MergingStrategy.EXCLUDING); assertThat(mergingStrategies.get(FeatureOption.WATERMARKS)) .isEqualTo(MergingStrategy.EXCLUDING); }
public List<ResContainer> makeResourcesXml(JadxArgs args) { Map<String, ICodeWriter> contMap = new HashMap<>(); for (ResourceEntry ri : resStorage.getResources()) { if (SKIP_RES_TYPES.contains(ri.getTypeName())) { continue; } String fn = getFileName(ri); ICodeWriter cw = contMap.get(fn); if (cw == null) { cw = new SimpleCodeWriter(args); cw.add("<?xml version=\"1.0\" encoding=\"utf-8\"?>"); cw.startLine("<resources>"); cw.incIndent(); contMap.put(fn, cw); } addValue(cw, ri); } List<ResContainer> files = new ArrayList<>(contMap.size()); for (Map.Entry<String, ICodeWriter> entry : contMap.entrySet()) { String fileName = entry.getKey(); ICodeWriter content = entry.getValue(); content.decIndent(); content.startLine("</resources>"); ICodeInfo codeInfo = content.finish(); files.add(ResContainer.textResource(fileName, codeInfo)); } Collections.sort(files); return files; }
@Test void testAttrMin() { ResourceStorage resStorage = new ResourceStorage(); ResourceEntry re = new ResourceEntry(2130903103, "jadx.gui.app", "attr", "size", ""); re.setNamedValues( Lists.list(new RawNamedValue(16777216, new RawValue(16, 4)), new RawNamedValue(16777217, new RawValue(16, 1)))); resStorage.add(re); ValuesParser vp = new ValuesParser(null, resStorage.getResourcesNames()); ResXmlGen resXmlGen = new ResXmlGen(resStorage, vp); List<ResContainer> files = resXmlGen.makeResourcesXml(args); assertThat(files).hasSize(1); assertThat(files.get(0).getName()).isEqualTo("res/values/attrs.xml"); String input = files.get(0).getText().toString(); assertThat(input).isEqualTo("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" + "<resources>\n" + " <attr name=\"size\" format=\"integer\" min=\"1\">\n" + " </attr>\n" + "</resources>"); }
public void doIt( String[] args ) throws IOException { if( args.length != 3 ) { usage(); } else { try (PDDocument document = Loader.loadPDF(new File(args[0]))) { if( document.isEncrypted() ) { throw new IOException( "Encrypted documents are not supported for this example" ); } for (int i = 0; i < document.getNumberOfPages(); i++) { PDPage page = document.getPage(i); List<PDAnnotation> annotations = page.getAnnotations(); PDAnnotationRubberStamp rubberStamp = new PDAnnotationRubberStamp(); rubberStamp.setName(PDAnnotationRubberStamp.NAME_TOP_SECRET); rubberStamp.setRectangle(new PDRectangle(200,100)); rubberStamp.setContents("A top secret note"); // create a PDXObjectImage with the given image file // if you already have the image in a BufferedImage, // call LosslessFactory.createFromImage() instead PDImageXObject ximage = PDImageXObject.createFromFile(args[2], document); // define and set the target rectangle float lowerLeftX = 250; float lowerLeftY = 550; float formWidth = 150; float formHeight = 25; float imgWidth = 50; float imgHeight = 25; PDRectangle rect = new PDRectangle(); rect.setLowerLeftX(lowerLeftX); rect.setLowerLeftY(lowerLeftY); rect.setUpperRightX(lowerLeftX + formWidth); rect.setUpperRightY(lowerLeftY + formHeight); // Create a PDFormXObject PDFormXObject form = new PDFormXObject(document); form.setResources(new PDResources()); form.setBBox(rect); form.setFormType(1); // adjust the image to the target rectangle and add it to the stream try (OutputStream os = form.getStream().createOutputStream()) { drawXObject(ximage, form.getResources(), os, lowerLeftX, lowerLeftY, imgWidth, imgHeight); } PDAppearanceStream myDic = new PDAppearanceStream(form.getCOSObject()); PDAppearanceDictionary appearance = new PDAppearanceDictionary(new COSDictionary()); appearance.setNormalAppearance(myDic); rubberStamp.setAppearance(appearance); rubberStamp.setRectangle(rect); // add the new RubberStamp to the document annotations.add(rubberStamp); } document.save( args[1] ); } } }
@Test void test() throws IOException { String documentFile = "src/test/resources/org/apache/pdfbox/examples/pdmodel/document.pdf"; String stampFile = "src/test/resources/org/apache/pdfbox/examples/pdmodel/stamp.jpg"; String outFile = "target/test-output/TestRubberStampWithImage.pdf"; new File("target/test-output").mkdirs(); BufferedImage bim1; try (PDDocument doc1 = Loader.loadPDF(new File(documentFile))) { bim1 = new PDFRenderer(doc1).renderImage(0); } String[] args = { documentFile, outFile, stampFile }; RubberStampWithImage rubberStamp = new RubberStampWithImage(); rubberStamp.doIt(args); try (PDDocument doc2 = Loader.loadPDF(new File(outFile))) { BufferedImage bim2 = new PDFRenderer(doc2).renderImage(0); assertFalse(compareImages(bim1, bim2)); PDAnnotationRubberStamp rubberStampAnnotation = (PDAnnotationRubberStamp) doc2.getPage(0).getAnnotations().get(0); PDAppearanceDictionary appearance = rubberStampAnnotation.getAppearance(); PDAppearanceEntry normalAppearance = appearance.getNormalAppearance(); PDAppearanceStream appearanceStream = normalAppearance.getAppearanceStream(); PDImageXObject ximage = (PDImageXObject) appearanceStream.getResources().getXObject(COSName.getPDFName("Im1")); BufferedImage actualStampImage = ximage.getImage(); BufferedImage expectedStampImage = ImageIO.read(new File(stampFile)); assertTrue(compareImages(expectedStampImage, actualStampImage)); } }
@Override public AttributedList<Path> read(final Path directory, final List<String> replies) throws FTPInvalidListException { final AttributedList<Path> children = new AttributedList<>(); if(replies.isEmpty()) { return children; } // At least one entry successfully parsed boolean success = false; for(String line : replies) { final Map<String, Map<String, String>> file = this.parseFacts(line); if(null == file) { log.error(String.format("Error parsing line %s", line)); continue; } for(Map.Entry<String, Map<String, String>> f : file.entrySet()) { final String name = f.getKey(); // size -- Size in octets // modify -- Last modification time // create -- Creation time // type -- Entry type // unique -- Unique id of file/directory // perm -- File permissions, whether read, write, execute is allowed for the login id. // lang -- Language of the file name per IANA [11] registry. // media-type -- MIME media-type of file contents per IANA registry. // charset -- Character set per IANA registry (if not UTF-8) final Map<String, String> facts = f.getValue(); if(!facts.containsKey("type")) { log.error(String.format("No type fact in line %s", line)); continue; } final Path parsed; if("dir".equals(facts.get("type").toLowerCase(Locale.ROOT))) { parsed = new Path(directory, PathNormalizer.name(f.getKey()), EnumSet.of(Path.Type.directory)); } else if("file".equals(facts.get("type").toLowerCase(Locale.ROOT))) { parsed = new Path(directory, PathNormalizer.name(f.getKey()), EnumSet.of(Path.Type.file)); } else if(facts.get("type").toLowerCase(Locale.ROOT).matches("os\\.unix=slink:.*")) { parsed = new Path(directory, PathNormalizer.name(f.getKey()), EnumSet.of(Path.Type.file, Path.Type.symboliclink)); // Parse symbolic link target in Type=OS.unix=slink:/foobar;Perm=;Unique=keVO1+4G4; foobar final String[] type = facts.get("type").split(":"); if(type.length == 2) { final String target = type[1]; if(target.startsWith(String.valueOf(Path.DELIMITER))) { parsed.setSymlinkTarget(new Path(PathNormalizer.normalize(target), EnumSet.of(Path.Type.file))); } else { parsed.setSymlinkTarget(new Path(PathNormalizer.normalize(String.format("%s/%s", directory.getAbsolute(), target)), EnumSet.of(Path.Type.file))); } } else { log.warn(String.format("Missing symbolic link target for type %s in line %s", facts.get("type"), line)); continue; } } else { log.warn(String.format("Ignored type %s in line %s", facts.get("type"), line)); continue; } if(!success) { if(parsed.isDirectory() && directory.getName().equals(name)) { log.warn(String.format("Possibly bogus response line %s", line)); } else { success = true; } } if(name.equals(".") || name.equals("..")) { if(log.isDebugEnabled()) { log.debug(String.format("Skip %s", name)); } continue; } if(facts.containsKey("size")) { parsed.attributes().setSize(Long.parseLong(facts.get("size"))); } if(facts.containsKey("unix.uid")) { parsed.attributes().setOwner(facts.get("unix.uid")); } if(facts.containsKey("unix.owner")) { parsed.attributes().setOwner(facts.get("unix.owner")); } if(facts.containsKey("unix.gid")) { parsed.attributes().setGroup(facts.get("unix.gid")); } if(facts.containsKey("unix.group")) { parsed.attributes().setGroup(facts.get("unix.group")); } if(facts.containsKey("unix.mode")) { parsed.attributes().setPermission(new Permission(facts.get("unix.mode"))); } else if(facts.containsKey("perm")) { if(PreferencesFactory.get().getBoolean("ftp.parser.mlsd.perm.enable")) { Permission.Action user = Permission.Action.none; final String flags = facts.get("perm"); if(StringUtils.contains(flags, 'r') || StringUtils.contains(flags, 'l')) { // RETR command may be applied to that object // Listing commands, LIST, NLST, and MLSD may be applied user = user.or(Permission.Action.read); } if(StringUtils.contains(flags, 'w') || StringUtils.contains(flags, 'm') || StringUtils.contains(flags, 'c')) { user = user.or(Permission.Action.write); } if(StringUtils.contains(flags, 'e')) { // CWD command naming the object should succeed user = user.or(Permission.Action.execute); if(parsed.isDirectory()) { user = user.or(Permission.Action.read); } } final Permission permission = new Permission(user, Permission.Action.none, Permission.Action.none); parsed.attributes().setPermission(permission); } } if(facts.containsKey("modify")) { // Time values are always represented in UTC parsed.attributes().setModificationDate(this.parseTimestamp(facts.get("modify"))); } if(facts.containsKey("create")) { // Time values are always represented in UTC parsed.attributes().setCreationDate(this.parseTimestamp(facts.get("create"))); } children.add(parsed); } } if(!success) { throw new FTPInvalidListException(children); } return children; }
@Test @Ignore public void testParseSlashInFilename() throws Exception { Path path = new Path("/www", EnumSet.of(Path.Type.directory)); String[] replies = new String[]{ "type=dir;modify=20140315210350; Gozo 2013/2014", "type=dir;modify=20140315210350; Tigger & Friends" }; final AttributedList<Path> children = new FTPMlsdListResponseReader() .read(path, Arrays.asList(replies)); assertEquals(2, children.size()); assertEquals("/www/Gozo 2013/2014", children.get(0).getAbsolute()); assertEquals("Gozo 2013/2014", children.get(0).getName()); }