focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
private synchronized RemotingCommand removeColdDataFlowCtrGroupConfig(ChannelHandlerContext ctx,
RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.info("removeColdDataFlowCtrGroupConfig called by {}", RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
byte[] body = request.getBody();
if (body != null) {
try {
String consumerGroup = new String(body, MixAll.DEFAULT_CHARSET);
if (consumerGroup != null) {
LOGGER.info("removeColdDataFlowCtrGroupConfig, consumerGroup: {} client: {}", consumerGroup, ctx.channel().remoteAddress());
this.brokerController.getColdDataCgCtrService().removeGroupConfig(consumerGroup);
} else {
LOGGER.error("removeColdDataFlowCtrGroupConfig string parse error");
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("string parse error");
return response;
}
} catch (UnsupportedEncodingException e) {
LOGGER.error("removeColdDataFlowCtrGroupConfig UnsupportedEncodingException", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
} | @Test
public void testRemoveColdDataFlowCtrGroupConfig() throws RemotingCommandException {
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.REMOVE_COLD_DATA_FLOW_CTR_CONFIG, null);
RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
request.setBody("consumerGroup1".getBytes());
response = adminBrokerProcessor.processRequest(handlerContext, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
} |
@Override @Nullable public Throwable error() {
return status.getCause();
} | @Test void error_null() {
assertThat(response.error()).isNull();
} |
public static boolean isBean(Type type) {
return isBean(TypeRef.of(type));
} | @Test
public void isBean() {
Assert.assertTrue(TypeUtils.isBean(BeanA.class));
Assert.assertTrue(TypeUtils.isBean(Bean.class));
Assert.assertFalse(TypeUtils.isBean(ArrayList.class));
} |
@Deprecated
public String withoutNamespace(String resource) {
return NamespaceUtil.withoutNamespace(resource, this.getNamespace());
} | @Test
public void testWithoutNamespace() {
String actual = clientConfig.withoutNamespace(resource);
assertEquals(resource, actual);
Set<String> resources = clientConfig.withoutNamespace(Collections.singleton(resource));
assertTrue(resources.contains(resource));
} |
public static boolean equalTo(Inspector a, Inspector b) {
if (a.type() != b.type()) return false;
switch (a.type()) {
case NIX: return a.valid() == b.valid();
case BOOL: return a.asBool() == b.asBool();
case LONG: return a.asLong() == b.asLong();
case DOUBLE: return Double.compare(a.asDouble(), b.asDouble()) == 0;
case STRING: return a.asString().equals(b.asString());
case DATA: return Arrays.equals(a.asData(), b.asData());
case ARRAY: {
if (a.entries() != b.entries()) return false;
for (int i = 0; i < a.entries(); i++) {
if (!equalTo(a.entry(i), b.entry(i))) return false;
}
return true;
}
case OBJECT: {
if (a.fields() != b.fields()) return false;
boolean[] equal = new boolean[]{ true };
a.traverse((String key, Inspector value) -> {
if (equal[0] && !equalTo(value, b.field(key))) equal[0] = false;
});
return equal[0];
}
default: throw new IllegalStateException("Unexpected type: " + a.type());
}
} | @Test
public void verifyArrayEquality() {
Slime slimeLeft = new Slime();
Cursor left = slimeLeft.setArray();
left.addArray().addString("a");
left.addArray().addString("b");
Slime slimeRight = new Slime();
Cursor right = slimeRight.setArray();
right.addArray().addString("a");
right.addArray().addString("b");
assertTrue(left.equalTo(right));
assertTrue(right.equalTo(left));
assertTrue(left.equalTo(left));
right.addArray().addString("c");
assertFalse(left.equalTo(right));
assertFalse(right.equalTo(left));
// Order matters
Slime slimeRight2 = new Slime();
Cursor right2 = slimeRight2.setObject();
right2.addArray().addString("b");
right2.addArray().addString("a");
assertFalse(left.equalTo(right2));
assertFalse(right2.equalTo(left));
} |
@Override
public PollResult poll(long currentTimeMs) {
return pollInternal(
prepareFetchRequests(),
this::handleFetchSuccess,
this::handleFetchFailure
);
} | @Test
public void testHeaders() {
buildFetcher();
MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(1024), Compression.NONE, TimestampType.CREATE_TIME, 1L);
builder.append(0L, "key".getBytes(), "value-1".getBytes());
Header[] headersArray = new Header[1];
headersArray[0] = new RecordHeader("headerKey", "headerValue".getBytes(StandardCharsets.UTF_8));
builder.append(0L, "key".getBytes(), "value-2".getBytes(), headersArray);
Header[] headersArray2 = new Header[2];
headersArray2[0] = new RecordHeader("headerKey", "headerValue".getBytes(StandardCharsets.UTF_8));
headersArray2[1] = new RecordHeader("headerKey", "headerValue2".getBytes(StandardCharsets.UTF_8));
builder.append(0L, "key".getBytes(), "value-3".getBytes(), headersArray2);
MemoryRecords memoryRecords = builder.build();
List<ConsumerRecord<byte[], byte[]>> records;
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 1);
client.prepareResponse(matchesOffset(tidp0, 1), fullFetchResponse(tidp0, memoryRecords, Errors.NONE, 100L, 0));
assertEquals(1, sendFetches());
networkClientDelegate.poll(time.timer(0));
Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> recordsByPartition = fetchRecords();
records = recordsByPartition.get(tp0);
assertEquals(3, records.size());
Iterator<ConsumerRecord<byte[], byte[]>> recordIterator = records.iterator();
ConsumerRecord<byte[], byte[]> record = recordIterator.next();
assertNull(record.headers().lastHeader("headerKey"));
record = recordIterator.next();
assertEquals("headerValue", new String(record.headers().lastHeader("headerKey").value(), StandardCharsets.UTF_8));
assertEquals("headerKey", record.headers().lastHeader("headerKey").key());
record = recordIterator.next();
assertEquals("headerValue2", new String(record.headers().lastHeader("headerKey").value(), StandardCharsets.UTF_8));
assertEquals("headerKey", record.headers().lastHeader("headerKey").key());
} |
public static Build withPropertyValue(String propertyValue) {
return new Builder(propertyValue);
} | @Test
void it_should_return_transport_as_default_value_when_property_is_empty() {
//GIVEN
String empty = "";
//WHEN
ElasticsearchClientType clientType =
ElasticsearchClientTypeBuilder.withPropertyValue(empty).build();
//THEN
assertEquals(TRANSPORT, clientType);
} |
@Override
public List<String> batchDeleteMetadata(String namespaceId, InstanceOperationInfo instanceOperationInfo,
Map<String, String> metadata) throws NacosException {
boolean isEphemeral = !UtilsAndCommons.PERSIST.equals(instanceOperationInfo.getConsistencyType());
String serviceName = instanceOperationInfo.getServiceName();
Service service = getService(namespaceId, serviceName, isEphemeral);
List<String> result = new LinkedList<>();
List<Instance> needUpdateInstance = findBatchUpdateInstance(instanceOperationInfo, service);
for (Instance each : needUpdateInstance) {
String metadataId = InstancePublishInfo.genMetadataId(each.getIp(), each.getPort(), each.getClusterName());
Optional<InstanceMetadata> instanceMetadata = metadataManager.getInstanceMetadata(service, metadataId);
InstanceMetadata newMetadata = instanceMetadata.map(this::cloneMetadata).orElseGet(InstanceMetadata::new);
metadata.keySet().forEach(key -> newMetadata.getExtendData().remove(key));
metadataOperateService.updateInstanceMetadata(service, metadataId, newMetadata);
result.add(each.toInetAddr() + ":" + UtilsAndCommons.LOCALHOST_SITE + ":" + each.getClusterName() + ":" + (
each.isEphemeral() ? UtilsAndCommons.EPHEMERAL : UtilsAndCommons.PERSIST));
}
return result;
} | @Test
void testBatchDeleteMetadata() throws NacosException {
Instance instance = new Instance();
instance.setServiceName("C");
instance.setIp("1.1.1.1");
instance.setPort(8848);
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setHosts(Collections.singletonList(instance));
when(serviceStorage.getData(Mockito.any())).thenReturn(serviceInfo);
List<String> res = instanceOperatorClient.batchDeleteMetadata("A", new InstanceOperationInfo(), new HashMap<>());
assertEquals(1, res.size());
} |
public void abortIncompleteBatches() {
// We need to keep aborting the incomplete batch until no thread is trying to append to
// 1. Avoid losing batches.
// 2. Free up memory in case appending threads are blocked on buffer full.
// This is a tight loop but should be able to get through very quickly.
do {
abortBatches();
} while (appendsInProgress());
// After this point, no thread will append any messages because they will see the close
// flag set. We need to do the last abort after no thread was appending in case there was a new
// batch appended by the last appending thread.
abortBatches();
this.topicInfoMap.clear();
} | @Test
public void testAbortIncompleteBatches() throws Exception {
int lingerMs = Integer.MAX_VALUE;
int numRecords = 100;
final AtomicInteger numExceptionReceivedInCallback = new AtomicInteger(0);
final RecordAccumulator accum = createTestRecordAccumulator(
128 + DefaultRecordBatch.RECORD_BATCH_OVERHEAD, 64 * 1024, Compression.NONE, lingerMs);
class TestCallback implements RecordAccumulator.AppendCallbacks {
@Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
assertEquals("Producer is closed forcefully.", exception.getMessage());
numExceptionReceivedInCallback.incrementAndGet();
}
@Override
public void setPartition(int partition) {
}
}
for (int i = 0; i < numRecords; i++)
accum.append(topic, i % 3, 0L, key, value, null, new TestCallback(), maxBlockTimeMs, false, time.milliseconds(), cluster);
RecordAccumulator.ReadyCheckResult result = accum.ready(metadataCache, time.milliseconds());
assertFalse(result.readyNodes.isEmpty());
Map<Integer, List<ProducerBatch>> drained = accum.drain(metadataCache, result.readyNodes, Integer.MAX_VALUE, time.milliseconds());
assertTrue(accum.hasUndrained());
assertTrue(accum.hasIncomplete());
int numDrainedRecords = 0;
for (Map.Entry<Integer, List<ProducerBatch>> drainedEntry : drained.entrySet()) {
for (ProducerBatch batch : drainedEntry.getValue()) {
assertTrue(batch.isClosed());
assertFalse(batch.produceFuture.completed());
numDrainedRecords += batch.recordCount;
}
}
assertTrue(numDrainedRecords > 0 && numDrainedRecords < numRecords);
accum.abortIncompleteBatches();
assertEquals(numRecords, numExceptionReceivedInCallback.get());
assertFalse(accum.hasUndrained());
assertFalse(accum.hasIncomplete());
} |
public List<String> searchTags(@Nullable String textQuery, int page, int size) {
int maxPageSize = 100;
int maxPage = 20;
checkArgument(size <= maxPageSize, "Page size must be lower than or equals to " + maxPageSize);
checkArgument(page > 0 && page <= maxPage, "Page must be between 0 and " + maxPage);
if (size <= 0) {
return emptyList();
}
TermsAggregationBuilder tagFacet = AggregationBuilders.terms(FIELD_TAGS)
.field(FIELD_TAGS)
.size(size * page)
.minDocCount(1)
.order(BucketOrder.key(true));
if (textQuery != null) {
tagFacet.includeExclude(new IncludeExclude(".*" + escapeSpecialRegexChars(textQuery) + ".*", null));
}
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder()
.query(authorizationTypeSupport.createQueryFilter())
.fetchSource(false)
.aggregation(tagFacet);
SearchResponse response = client.search(EsClient.prepareSearch(TYPE_PROJECT_MEASURES.getMainType())
.source(searchSourceBuilder));
Terms aggregation = response.getAggregations().get(FIELD_TAGS);
return aggregation.getBuckets().stream()
.skip((page - 1) * size)
.map(Bucket::getKeyAsString)
.toList();
} | @Test
public void search_tags_with_no_tags() {
List<String> result = underTest.searchTags("whatever", 1, 10);
assertThat(result).isEmpty();
} |
private void definePackageInternal(final String packageName, final Manifest manifest) {
if (null != getPackage(packageName)) {
return;
}
Attributes attributes = manifest.getMainAttributes();
String specTitle = attributes.getValue(Attributes.Name.SPECIFICATION_TITLE);
String specVersion = attributes.getValue(Attributes.Name.SPECIFICATION_VERSION);
String specVendor = attributes.getValue(Attributes.Name.SPECIFICATION_VENDOR);
String implTitle = attributes.getValue(Attributes.Name.IMPLEMENTATION_TITLE);
String implVersion = attributes.getValue(Attributes.Name.IMPLEMENTATION_VERSION);
String implVendor = attributes.getValue(Attributes.Name.IMPLEMENTATION_VENDOR);
definePackage(packageName, specTitle, specVersion, specVendor, implTitle, implVersion, implVendor, null);
} | @Test
public void testDefinePackageInternal() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException, IOException {
Method definePackageInternal = ShenyuPluginLoader.class.getDeclaredMethod("definePackageInternal", String.class, Manifest.class);
definePackageInternal.setAccessible(true);
Manifest manifest = mock(Manifest.class);
when(manifest.getMainAttributes()).thenReturn(mock(Attributes.class));
definePackageInternal.invoke(ShenyuPluginLoader.getInstance(), "org.apache.shenyu.plugin.DividePlugin", manifest);
} |
@Override
public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException {
if(!new LocalFindFeature(session).find(file)) {
throw new NotfoundException(file.getAbsolute());
}
if(status.isExists()) {
new LocalDeleteFeature(session).delete(Collections.singletonMap(renamed, status), new DisabledPasswordCallback(), callback);
}
if(!session.toPath(file).toFile().renameTo(session.toPath(renamed).toFile())) {
throw new LocalExceptionMappingService().map("Cannot rename {0}", new NoSuchFileException(file.getName()), file);
}
return renamed;
} | @Test
public void testRenameCaseOnly() throws Exception {
final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname()));
session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback());
session.login(new DisabledLoginCallback(), new DisabledCancelCallback());
final Path workdir = new LocalHomeFinderFeature().find();
final Path test = new Path(workdir, StringUtils.lowerCase(new AsciiRandomStringService().random()), EnumSet.of(Path.Type.file));
new LocalTouchFeature(session).touch(test, new TransferStatus());
final Path target = new Path(workdir, StringUtils.capitalize(test.getName()), EnumSet.of(Path.Type.file));
new LocalMoveFeature(session).move(test, target, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback());
assertFalse(new LocalFindFeature(session).find(test));
assertTrue(new LocalFindFeature(session).find(target));
new LocalDeleteFeature(session).delete(Collections.<Path>singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
public static String formatAddress(String host, Integer port) {
return host.contains(":")
? "[" + host + "]:" + port // IPv6
: host + ":" + port;
} | @Test
public void testFormatAddress() {
assertEquals("127.0.0.1:8000", formatAddress("127.0.0.1", 8000));
assertEquals("mydomain.com:8080", formatAddress("mydomain.com", 8080));
assertEquals("[::1]:1234", formatAddress("::1", 1234));
assertEquals("[2001:db8:85a3:8d3:1319:8a2e:370:7348]:5678", formatAddress("2001:db8:85a3:8d3:1319:8a2e:370:7348", 5678));
} |
public static int binarySearch(Sortable data) {
int value = data.size();
int lower = 0;
int upper = data.size();
while (lower < upper) {
int mid = (lower + upper) >> 1;
if (data.isLess(mid, value)) {
lower = mid + 1;
} else {
upper = mid;
}
}
return lower;
} | @Test
public void testSearch() {
int[] a = new int[] { 1, 2, 4, 4, 4, 5, 0 };
SimpleSortable sortable = new SimpleSortable(a, a.length - 1);
// search 4
a[a.length - 1] = 4;
assertThat(DataUtils.binarySearch(sortable), is(2));
// search 5
a[a.length - 1] = 5;
assertThat(DataUtils.binarySearch(sortable), is(5));
// search -5
a[a.length - 1] = -5;
assertThat(DataUtils.binarySearch(sortable), is(0));
// search 10
a[a.length - 1] = 10;
assertThat(DataUtils.binarySearch(sortable), is(6));
// search 3
a[a.length - 1] = 3;
assertThat(DataUtils.binarySearch(sortable), is(2));
} |
public static String serialize(AbstractHealthChecker healthChecker) {
try {
return MAPPER.writeValueAsString(healthChecker);
} catch (JsonProcessingException e) {
throw new NacosSerializationException(healthChecker.getClass(), e);
}
} | @Test
void testSerializeFailure() {
assertThrows(NacosSerializationException.class, () -> {
SelfDependHealthChecker selfDependHealthChecker = new SelfDependHealthChecker();
System.out.println(HealthCheckerFactory.serialize(selfDependHealthChecker));
});
} |
@SuppressWarnings("checkstyle:CyclomaticComplexity")
@Override
public void renameTable(TableIdentifier from, TableIdentifier to) {
if (from.equals(to)) {
return;
}
if (!tableExists(from)) {
throw new NoSuchTableException("Table does not exist: %s", from);
}
if (!namespaceExists(to.namespace())) {
throw new NoSuchNamespaceException("Namespace does not exist: %s", to.namespace());
}
if (schemaVersion == JdbcUtil.SchemaVersion.V1 && viewExists(to)) {
throw new AlreadyExistsException("Cannot rename %s to %s. View already exists", from, to);
}
if (tableExists(to)) {
throw new AlreadyExistsException("Table already exists: %s", to);
}
int updatedRecords =
execute(
err -> {
// SQLite doesn't set SQLState or throw SQLIntegrityConstraintViolationException
if (err instanceof SQLIntegrityConstraintViolationException
|| (err.getMessage() != null && err.getMessage().contains("constraint failed"))) {
throw new AlreadyExistsException("Table already exists: %s", to);
}
},
(schemaVersion == JdbcUtil.SchemaVersion.V1)
? JdbcUtil.V1_RENAME_TABLE_SQL
: JdbcUtil.V0_RENAME_TABLE_SQL,
JdbcUtil.namespaceToString(to.namespace()),
to.name(),
catalogName,
JdbcUtil.namespaceToString(from.namespace()),
from.name());
if (updatedRecords == 1) {
LOG.info("Renamed table from {}, to {}", from, to);
} else if (updatedRecords == 0) {
throw new NoSuchTableException("Table does not exist: %s", from);
} else {
LOG.warn(
"Rename operation affected {} rows: the catalog table's primary key assumption has been violated",
updatedRecords);
}
} | @Test
public void testRenameTable() {
TableIdentifier from = TableIdentifier.of("db", "tbl1");
TableIdentifier to = TableIdentifier.of("db", "tbl2-newtable");
catalog.createTable(from, SCHEMA, PartitionSpec.unpartitioned());
catalog.renameTable(from, to);
assertThat(catalog.listTables(to.namespace())).contains(to).doesNotContain(from);
assertThat(catalog.loadTable(to).name()).endsWith(to.name());
assertThatThrownBy(() -> catalog.renameTable(TableIdentifier.of("db", "tbl-not-exists"), to))
.isInstanceOf(NoSuchTableException.class)
.hasMessage("Table does not exist: db.tbl-not-exists");
// rename table to existing table name!
TableIdentifier from2 = TableIdentifier.of("db", "tbl2");
catalog.createTable(from2, SCHEMA, PartitionSpec.unpartitioned());
assertThatThrownBy(() -> catalog.renameTable(from2, to))
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("Table already exists: db.tbl2-newtable");
} |
public Optional<Details> sync(
@NotNull StepInstance instance,
@NotNull WorkflowSummary workflowSummary,
@NotNull StepRuntimeSummary stepSummary) {
try {
switch (stepSummary.getDbOperation()) {
case INSERT:
case UPSERT:
instanceDao.insertOrUpsertStepInstance(
instance, stepSummary.getDbOperation() == DbOperation.UPSERT);
break;
case UPDATE:
instanceDao.updateStepInstance(workflowSummary, stepSummary);
break;
default:
throw new MaestroInternalError(
"Invalid DB operation: %s for step instance [%s][%s]",
stepSummary.getDbOperation(),
stepSummary.getStepId(),
stepSummary.getStepAttemptId());
}
if (!stepSummary.getPendingRecords().isEmpty()) {
return jobEventPublisher.publish(
StepInstanceUpdateJobEvent.create(instance, stepSummary.getPendingRecords()));
}
return Optional.empty();
} catch (RuntimeException e) {
return Optional.of(Details.create(e, true, "Failed to sync a Maestro step state change"));
}
} | @Test
public void testPublishFailure() {
when(publisher.publish(any())).thenReturn(Optional.of(Details.create("test error")));
StepRuntimeSummary stepRuntimeSummary =
StepRuntimeSummary.builder()
.stepId("test-summary")
.stepAttemptId(2)
.stepInstanceId(1)
.dbOperation(DbOperation.UPDATE)
.pendingRecords(
Collections.singletonList(
mock(StepInstanceUpdateJobEvent.StepInstancePendingRecord.class)))
.build();
Optional<Details> details = syncManager.sync(instance, workflowSummary, stepRuntimeSummary);
assertTrue(details.isPresent());
assertEquals("test error", details.get().getMessage());
} |
@Override
public GroupVersion groupVersion() {
return PublicApiUtils.groupVersion(new Menu());
} | @Test
void groupVersion() {
GroupVersion groupVersion = endpoint.groupVersion();
assertThat(groupVersion.toString()).isEqualTo("api.halo.run/v1alpha1");
} |
@Override
public ResourceSet update(ResourceSet oldRs, ResourceSet newRs) {
if (oldRs.getId() == null || newRs.getId() == null
|| !oldRs.getId().equals(newRs.getId())) {
throw new IllegalArgumentException("Resource set IDs mismatched");
}
if (!checkScopeConsistency(newRs)) {
throw new IllegalArgumentException("Can't save a resource set with inconsistent claims.");
}
newRs.setOwner(oldRs.getOwner()); // preserve the owner tag across updates
newRs.setClientId(oldRs.getClientId()); // preserve the client id across updates
ResourceSet saved = repository.save(newRs);
return saved;
} | @Test(expected = IllegalArgumentException.class)
public void testUpdate_mismatchedIds() {
ResourceSet rs = new ResourceSet();
rs.setId(1L);
ResourceSet rs2 = new ResourceSet();
rs2.setId(2L);
resourceSetService.update(rs, rs2);
} |
@Override
public Result reconcile(Request request) {
client.fetch(Comment.class, request.name())
.ifPresent(comment -> {
if (isDeleted(comment)) {
if (removeFinalizers(comment.getMetadata(), Set.of(FINALIZER_NAME))) {
cleanUpResources(comment);
client.update(comment);
}
return;
}
if (addFinalizers(comment.getMetadata(), Set.of(FINALIZER_NAME))) {
replyNotificationSubscriptionHelper.subscribeNewReplyReasonForComment(comment);
client.update(comment);
eventPublisher.publishEvent(new CommentCreatedEvent(this, comment));
}
compatibleCreationTime(comment);
Comment.CommentStatus status = comment.getStatusOrDefault();
status.setHasNewReply(defaultIfNull(status.getUnreadReplyCount(), 0) > 0);
updateUnReplyCountIfNecessary(comment);
updateSameSubjectRefCommentCounter(comment);
// version + 1 is required to truly equal version
// as a version will be incremented after the update
comment.getStatusOrDefault()
.setObservedVersion(comment.getMetadata().getVersion() + 1);
client.update(comment);
});
return new Result(false, null);
} | @Test
void reconcileDelete() {
Comment comment = new Comment();
comment.setMetadata(new Metadata());
comment.getMetadata().setName("test");
comment.getMetadata().setDeletionTimestamp(Instant.now());
Set<String> finalizers = new HashSet<>();
finalizers.add(CommentReconciler.FINALIZER_NAME);
comment.getMetadata().setFinalizers(finalizers);
comment.setSpec(new Comment.CommentSpec());
comment.getSpec().setSubjectRef(getRef());
comment.getSpec().setLastReadTime(now.plusSeconds(5));
comment.setStatus(new Comment.CommentStatus());
when(client.fetch(eq(Comment.class), eq("test")))
.thenReturn(Optional.of(comment));
when(replyService.removeAllByComment(eq(comment.getMetadata().getName())))
.thenReturn(Mono.empty());
when(client.listBy(eq(Comment.class), any(ListOptions.class), isA(PageRequest.class)))
.thenReturn(ListResult.emptyResult());
Reconciler.Result reconcile = commentReconciler.reconcile(new Reconciler.Request("test"));
assertThat(reconcile.reEnqueue()).isFalse();
assertThat(reconcile.retryAfter()).isNull();
verify(replyService).removeAllByComment(eq(comment.getMetadata().getName()));
ArgumentCaptor<Comment> captor = ArgumentCaptor.forClass(Comment.class);
verify(client, times(1)).update(captor.capture());
Comment value = captor.getValue();
assertThat(value.getMetadata().getFinalizers()
.contains(CommentReconciler.FINALIZER_NAME)).isFalse();
} |
@Override
public List<DeptDO> getDeptList(Collection<Long> ids) {
if (CollUtil.isEmpty(ids)) {
return Collections.emptyList();
}
return deptMapper.selectBatchIds(ids);
} | @Test
public void testGetDeptList_ids() {
// mock 数据
DeptDO deptDO01 = randomPojo(DeptDO.class);
deptMapper.insert(deptDO01);
DeptDO deptDO02 = randomPojo(DeptDO.class);
deptMapper.insert(deptDO02);
// 准备参数
List<Long> ids = Arrays.asList(deptDO01.getId(), deptDO02.getId());
// 调用
List<DeptDO> deptDOList = deptService.getDeptList(ids);
// 断言
assertEquals(2, deptDOList.size());
assertEquals(deptDO01, deptDOList.get(0));
assertEquals(deptDO02, deptDOList.get(1));
} |
@Override
public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException {
final boolean exists = Files.exists(session.toPath(file), LinkOption.NOFOLLOW_LINKS);
if(exists) {
if(Files.isSymbolicLink(session.toPath(file))) {
return true;
}
if(!file.isRoot()) {
try {
if(!StringUtils.equals(session.toPath(file).toFile().getCanonicalFile().getName(), file.getName())) {
return false;
}
}
catch(IOException e) {
log.warn(String.format("Failure obtaining canonical file reference for %s", file));
}
}
}
return exists;
} | @Test
public void testFindCaseSensitive() throws Exception {
final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname()));
session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback());
session.login(new DisabledLoginCallback(), new DisabledCancelCallback());
final Path home = new LocalHomeFinderFeature().find();
final Path file = new LocalTouchFeature(session).touch(new Path(home, StringUtils.lowerCase(new AsciiRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus());
assertTrue(new LocalFindFeature(session).find(file));
assertFalse(new LocalFindFeature(session).find(new Path(home, StringUtils.capitalize(file.getName()), EnumSet.of(Path.Type.file))));
session.close();
} |
static URI determineClasspathResourceUri(Path baseDir, String basePackagePath, Path resource) {
String subPackageName = determineSubpackagePath(baseDir, resource);
String resourceName = resource.getFileName().toString();
String classpathResourcePath = of(basePackagePath, subPackageName, resourceName)
.filter(value -> !value.isEmpty()) // default package .
.collect(joining(RESOURCE_SEPARATOR_STRING));
return classpathResourceUri(classpathResourcePath);
} | @Test
void determineFullyQualifiedResourceNameFromRootPackage() {
Path baseDir = Paths.get("path", "to");
String basePackageName = "";
Path resourceFile = Paths.get("path", "to", "com", "example", "app", "app.feature");
URI fqn = ClasspathSupport.determineClasspathResourceUri(baseDir, basePackageName, resourceFile);
assertEquals(URI.create("classpath:com/example/app/app.feature"), fqn);
} |
@Override
public ApiResult<TopicPartition, DeletedRecords> handleResponse(
Node broker,
Set<TopicPartition> keys,
AbstractResponse abstractResponse
) {
DeleteRecordsResponse response = (DeleteRecordsResponse) abstractResponse;
Map<TopicPartition, DeletedRecords> completed = new HashMap<>();
Map<TopicPartition, Throwable> failed = new HashMap<>();
List<TopicPartition> unmapped = new ArrayList<>();
Set<TopicPartition> retriable = new HashSet<>();
for (DeleteRecordsResponseData.DeleteRecordsTopicResult topicResult: response.data().topics()) {
for (DeleteRecordsResponseData.DeleteRecordsPartitionResult partitionResult : topicResult.partitions()) {
Errors error = Errors.forCode(partitionResult.errorCode());
TopicPartition topicPartition = new TopicPartition(topicResult.name(), partitionResult.partitionIndex());
if (error == Errors.NONE) {
completed.put(topicPartition, new DeletedRecords(partitionResult.lowWatermark()));
} else {
handlePartitionError(topicPartition, error, failed, unmapped, retriable);
}
}
}
// Sanity-check if the current leader for these partitions returned results for all of them
for (TopicPartition topicPartition : keys) {
if (unmapped.isEmpty()
&& !completed.containsKey(topicPartition)
&& !failed.containsKey(topicPartition)
&& !retriable.contains(topicPartition)
) {
ApiException sanityCheckException = new ApiException(
"The response from broker " + broker.id() +
" did not contain a result for topic partition " + topicPartition);
log.error(
"DeleteRecords request for topic partition {} failed sanity check",
topicPartition,
sanityCheckException);
failed.put(topicPartition, sanityCheckException);
}
}
return new ApiResult<>(completed, failed, unmapped);
} | @Test
public void testMixedResponse() {
Map<TopicPartition, Short> errorsByPartition = new HashMap<>();
TopicPartition errorPartition = t0p0;
Errors error = Errors.UNKNOWN_SERVER_ERROR;
errorsByPartition.put(errorPartition, error.code());
TopicPartition retriableErrorPartition = t0p1;
Errors retriableError = Errors.NOT_LEADER_OR_FOLLOWER;
errorsByPartition.put(retriableErrorPartition, retriableError.code());
TopicPartition retriableErrorPartition2 = t0p2;
Errors retriableError2 = Errors.REQUEST_TIMED_OUT;
errorsByPartition.put(retriableErrorPartition2, retriableError2.code());
AdminApiHandler.ApiResult<TopicPartition, DeletedRecords> result =
handleResponse(createResponse(errorsByPartition));
Set<TopicPartition> completed = new HashSet<>(recordsToDelete.keySet());
Map<TopicPartition, Throwable> failed = new HashMap<>();
failed.put(errorPartition, error.exception());
completed.removeAll(failed.keySet());
List<TopicPartition> unmapped = new ArrayList<>();
unmapped.add(retriableErrorPartition);
completed.removeAll(unmapped);
Set<TopicPartition> retriable = singleton(retriableErrorPartition2);
completed.removeAll(retriable);
assertResult(result, completed, failed, unmapped, retriable);
} |
public static <T> T[] replaceFirst(T[] src, T oldValue, T[] newValues) {
int index = indexOf(src, oldValue);
if (index == -1) {
return src;
}
T[] dst = (T[]) Array.newInstance(src.getClass().getComponentType(), src.length - 1 + newValues.length);
// copy the first part till the match
System.arraycopy(src, 0, dst, 0, index);
// copy the second part from the match
System.arraycopy(src, index + 1, dst, index + newValues.length, src.length - index - 1);
// copy the newValues into the dst
System.arraycopy(newValues, 0, dst, index, newValues.length);
return dst;
} | @Test
public void replace_whenInMiddle() {
Integer[] result = replaceFirst(new Integer[]{1, 6, 4}, 6, new Integer[]{2, 3});
System.out.println(Arrays.toString(result));
assertArrayEquals(new Integer[]{1, 2, 3, 4}, result);
} |
public synchronized <K> KeyQueryMetadata getKeyQueryMetadataForKey(final String storeName,
final K key,
final Serializer<K> keySerializer) {
Objects.requireNonNull(keySerializer, "keySerializer can't be null");
if (topologyMetadata.hasNamedTopologies()) {
throw new IllegalArgumentException("Cannot invoke the getKeyQueryMetadataForKey(storeName, key, keySerializer)"
+ "method when using named topologies, please use the overload that"
+ "accepts a topologyName parameter to identify the correct store");
}
return getKeyQueryMetadataForKey(storeName,
key,
new DefaultStreamPartitioner<>(keySerializer));
} | @Test
public void shouldGetQueryMetadataForGlobalStoreWithKey() {
final KeyQueryMetadata metadata = metadataState.getKeyQueryMetadataForKey(globalTable, "key", Serdes.String().serializer());
assertEquals(hostOne, metadata.activeHost());
assertTrue(metadata.standbyHosts().isEmpty());
} |
@Override
public Map<K, V> getAllWithTTLOnly(Set<K> keys) {
return get(getAllWithTTLOnlyAsync(keys));
} | @Test
public void testGetAllWithTTLOnly() throws InterruptedException {
RMapCache<Integer, Integer> cache = redisson.getMapCache("testGetAllWithTTLOnly");
cache.put(1, 2, 3, TimeUnit.SECONDS);
cache.put(3, 4, 1, TimeUnit.SECONDS);
cache.put(5, 6, 1, TimeUnit.SECONDS);
Map<Integer, Integer> map = cache.getAllWithTTLOnly(new HashSet<>(Arrays.asList(1, 3, 5)));
assertThat(map).containsOnlyKeys(1, 3, 5);
assertThat(map).containsValues(2, 4, 6);
Thread.sleep(1500);
map = cache.getAllWithTTLOnly(new HashSet<>(Arrays.asList(1, 3, 5)));
assertThat(map).containsOnlyKeys(1);
assertThat(map).containsValues(2);
} |
public static void checkTopic(String topic) throws MQClientException {
if (UtilAll.isBlank(topic)) {
throw new MQClientException("The specified topic is blank", null);
}
if (topic.length() > TOPIC_MAX_LENGTH) {
throw new MQClientException(
String.format("The specified topic is longer than topic max length %d.", TOPIC_MAX_LENGTH), null);
}
if (isTopicOrGroupIllegal(topic)) {
throw new MQClientException(String.format(
"The specified topic[%s] contains illegal characters, allowing only %s", topic,
"^[%|a-zA-Z0-9_-]+$"), null);
}
} | @Test
public void testCheckTopic_BlankTopic() {
String blankTopic = "";
try {
Validators.checkTopic(blankTopic);
failBecauseExceptionWasNotThrown(MQClientException.class);
} catch (MQClientException e) {
assertThat(e).hasMessageStartingWith("The specified topic is blank");
}
} |
@SuppressWarnings("unchecked")
@Override
public <T extends Statement> ConfiguredStatement<T> inject(
final ConfiguredStatement<T> statement
) {
try {
if (statement.getStatement() instanceof CreateAsSelect) {
registerForCreateAs((ConfiguredStatement<? extends CreateAsSelect>) statement);
} else if (statement.getStatement() instanceof CreateSource) {
registerForCreateSource((ConfiguredStatement<? extends CreateSource>) statement);
}
} catch (final KsqlStatementException e) {
throw e;
} catch (final KsqlException e) {
throw new KsqlStatementException(
ErrorMessageUtil.buildErrorMessage(e),
statement.getMaskedStatementText(),
e.getCause());
}
// Remove schema id from SessionConfig
return stripSchemaIdConfig(statement);
} | @Test
public void shouldThrowAuthorizationException() throws Exception {
// Given:
givenStatement("CREATE STREAM sink WITH(value_format='AVRO') AS SELECT * FROM SOURCE;");
when(schemaRegistryClient.register(anyString(), any(ParsedSchema.class)))
.thenThrow(new RestClientException(
"User is denied operation Write on Subject", 403, 40301));
// When:
final Exception e = assertThrows(
KsqlSchemaAuthorizationException.class,
() -> injector.inject(statement)
);
// Then:
assertThat(e.getMessage(), equalTo(
"Authorization denied to Write on Schema Registry subject: [SINK-key]"));
} |
public IssueQuery create(SearchRequest request) {
try (DbSession dbSession = dbClient.openSession(false)) {
final ZoneId timeZone = parseTimeZone(request.getTimeZone()).orElse(clock.getZone());
Collection<RuleDto> ruleDtos = ruleKeysToRuleId(dbSession, request.getRules());
Collection<String> ruleUuids = ruleDtos.stream().map(RuleDto::getUuid).collect(Collectors.toSet());
Collection<String> issueKeys = collectIssueKeys(dbSession, request);
if (request.getRules() != null && request.getRules().stream().collect(Collectors.toSet()).size() != ruleDtos.size()) {
ruleUuids.add("non-existing-uuid");
}
IssueQuery.Builder builder = IssueQuery.builder()
.issueKeys(issueKeys)
.severities(request.getSeverities())
.cleanCodeAttributesCategories(request.getCleanCodeAttributesCategories())
.impactSoftwareQualities(request.getImpactSoftwareQualities())
.impactSeverities(request.getImpactSeverities())
.statuses(request.getStatuses())
.resolutions(request.getResolutions())
.issueStatuses(request.getIssueStatuses())
.resolved(request.getResolved())
.prioritizedRule(request.getPrioritizedRule())
.rules(ruleDtos)
.ruleUuids(ruleUuids)
.assigneeUuids(request.getAssigneeUuids())
.authors(request.getAuthors())
.scopes(request.getScopes())
.languages(request.getLanguages())
.tags(request.getTags())
.types(request.getTypes())
.pciDss32(request.getPciDss32())
.pciDss40(request.getPciDss40())
.owaspAsvs40(request.getOwaspAsvs40())
.owaspAsvsLevel(request.getOwaspAsvsLevel())
.owaspTop10(request.getOwaspTop10())
.owaspTop10For2021(request.getOwaspTop10For2021())
.stigAsdR5V3(request.getStigAsdV5R3())
.casa(request.getCasa())
.sansTop25(request.getSansTop25())
.cwe(request.getCwe())
.sonarsourceSecurity(request.getSonarsourceSecurity())
.assigned(request.getAssigned())
.createdAt(parseStartingDateOrDateTime(request.getCreatedAt(), timeZone))
.createdBefore(parseEndingDateOrDateTime(request.getCreatedBefore(), timeZone))
.facetMode(request.getFacetMode())
.timeZone(timeZone)
.codeVariants(request.getCodeVariants());
List<ComponentDto> allComponents = new ArrayList<>();
boolean effectiveOnComponentOnly = mergeDeprecatedComponentParameters(dbSession, request, allComponents);
addComponentParameters(builder, dbSession, effectiveOnComponentOnly, allComponents, request);
setCreatedAfterFromRequest(dbSession, builder, request, allComponents, timeZone);
String sort = request.getSort();
if (!isNullOrEmpty(sort)) {
builder.sort(sort);
builder.asc(request.getAsc());
}
return builder.build();
}
} | @Test
public void dates_are_inclusive() {
when(clock.getZone()).thenReturn(ZoneId.of("Europe/Paris"));
SearchRequest request = new SearchRequest()
.setCreatedAfter("2013-04-16")
.setCreatedBefore("2013-04-17");
IssueQuery query = underTest.create(request);
assertThat(query.createdAfter().date()).isEqualTo(parseDateTime("2013-04-16T00:00:00+0200"));
assertThat(query.createdAfter().inclusive()).isTrue();
assertThat(query.createdBefore()).isEqualTo(parseDateTime("2013-04-18T00:00:00+0200"));
} |
@VisibleForTesting
void persistQueue(final Account account, final Device device) throws MessagePersistenceException {
final UUID accountUuid = account.getUuid();
final byte deviceId = device.getId();
final Timer.Sample sample = Timer.start();
messagesCache.lockQueueForPersistence(accountUuid, deviceId);
try {
int messageCount = 0;
List<MessageProtos.Envelope> messages;
int consecutiveEmptyCacheRemovals = 0;
do {
messages = messagesCache.getMessagesToPersist(accountUuid, deviceId, MESSAGE_BATCH_LIMIT);
int messagesRemovedFromCache = messagesManager.persistMessages(accountUuid, device, messages);
messageCount += messages.size();
if (messagesRemovedFromCache == 0) {
consecutiveEmptyCacheRemovals += 1;
} else {
consecutiveEmptyCacheRemovals = 0;
}
if (consecutiveEmptyCacheRemovals > CONSECUTIVE_EMPTY_CACHE_REMOVAL_LIMIT) {
throw new MessagePersistenceException("persistence failure loop detected");
}
} while (!messages.isEmpty());
queueSizeDistributionSummery.record(messageCount);
} catch (ItemCollectionSizeLimitExceededException e) {
oversizedQueueCounter.increment();
maybeUnlink(account, deviceId); // may throw, in which case we'll retry later by the usual mechanism
} finally {
messagesCache.unlockQueueForPersistence(accountUuid, deviceId);
sample.stop(persistQueueTimer);
}
} | @Test
void testPersistQueueRetryLoop() {
final String queueName = new String(
MessagesCache.getMessageQueueKey(DESTINATION_ACCOUNT_UUID, DESTINATION_DEVICE_ID), StandardCharsets.UTF_8);
final int messageCount = (MessagePersister.MESSAGE_BATCH_LIMIT * 3) + 7;
final Instant now = Instant.now();
insertMessages(DESTINATION_ACCOUNT_UUID, DESTINATION_DEVICE_ID, messageCount, now);
setNextSlotToPersist(SlotHash.getSlot(queueName));
// returning `0` indicates something not working correctly
when(messagesManager.persistMessages(any(UUID.class), any(), anyList())).thenReturn(0);
assertTimeoutPreemptively(Duration.ofSeconds(1), () ->
assertThrows(MessagePersistenceException.class,
() -> messagePersister.persistQueue(destinationAccount, DESTINATION_DEVICE)));
} |
public NumericIndicator max(Indicator<Num> other) {
return NumericIndicator.of(BinaryOperation.max(this, other));
} | @Test
public void max() {
final NumericIndicator numericIndicator = NumericIndicator.of(cp1);
final NumericIndicator staticOp = numericIndicator.max(5);
assertNumEquals(5, staticOp.getValue(0));
assertNumEquals(9, staticOp.getValue(8));
final NumericIndicator dynamicOp = numericIndicator.max(ema);
assertNumEquals(cp1.getValue(0).max(ema.getValue(0)), dynamicOp.getValue(0));
assertNumEquals(cp1.getValue(8).max(ema.getValue(8)), dynamicOp.getValue(8));
} |
public Optional<Projection> createProjection(final ProjectionSegment projectionSegment) {
if (projectionSegment instanceof ShorthandProjectionSegment) {
return Optional.of(createProjection((ShorthandProjectionSegment) projectionSegment));
}
if (projectionSegment instanceof ColumnProjectionSegment) {
return Optional.of(createProjection((ColumnProjectionSegment) projectionSegment));
}
if (projectionSegment instanceof ExpressionProjectionSegment) {
return Optional.of(createProjection((ExpressionProjectionSegment) projectionSegment));
}
if (projectionSegment instanceof AggregationDistinctProjectionSegment) {
return Optional.of(createProjection((AggregationDistinctProjectionSegment) projectionSegment));
}
if (projectionSegment instanceof AggregationProjectionSegment) {
return Optional.of(createProjection((AggregationProjectionSegment) projectionSegment));
}
if (projectionSegment instanceof SubqueryProjectionSegment) {
return Optional.of(createProjection((SubqueryProjectionSegment) projectionSegment));
}
if (projectionSegment instanceof ParameterMarkerExpressionSegment) {
return Optional.of(createProjection((ParameterMarkerExpressionSegment) projectionSegment));
}
return Optional.empty();
} | @Test
void assertCreateProjectionWhenProjectionSegmentInstanceOfParameterMarkerExpressionSegment() {
ParameterMarkerExpressionSegment parameterMarkerExpressionSegment = new ParameterMarkerExpressionSegment(7, 7, 0);
parameterMarkerExpressionSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("alias")));
Optional<Projection> actual = new ProjectionEngine(databaseType).createProjection(parameterMarkerExpressionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(ParameterMarkerProjection.class));
assertThat(actual.get().getAlias().map(IdentifierValue::getValue).orElse(null), is("alias"));
} |
public synchronized int sendFetches() {
final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests();
sendFetchesInternal(
fetchRequests,
(fetchTarget, data, clientResponse) -> {
synchronized (Fetcher.this) {
handleFetchSuccess(fetchTarget, data, clientResponse);
}
},
(fetchTarget, data, error) -> {
synchronized (Fetcher.this) {
handleFetchFailure(fetchTarget, data, error);
}
});
return fetchRequests.size();
} | @Test
public void testFetchAfterPartitionWithFetchedRecordsIsUnassigned() {
buildFetcher(2);
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 1);
// Returns 3 records while `max.poll.records` is configured to 2
client.prepareResponse(matchesOffset(tidp0, 1), fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0));
assertEquals(1, sendFetches());
consumerClient.poll(time.timer(0));
Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> recordsByPartition = fetchRecords();
List<ConsumerRecord<byte[], byte[]>> recordsToTest = recordsByPartition.get(tp0);
assertEquals(2, recordsToTest.size());
assertEquals(3L, subscriptions.position(tp0).offset);
assertEquals(1, recordsToTest.get(0).offset());
assertEquals(2, recordsToTest.get(1).offset());
assignFromUser(singleton(tp1));
client.prepareResponse(matchesOffset(tidp1, 4), fullFetchResponse(tidp1, nextRecords, Errors.NONE, 100L, 0));
subscriptions.seek(tp1, 4);
assertEquals(1, sendFetches());
consumerClient.poll(time.timer(0));
Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> fetchedRecords = fetchRecords();
assertNull(fetchedRecords.get(tp0));
recordsToTest = fetchedRecords.get(tp1);
assertEquals(2, recordsToTest.size());
assertEquals(6L, subscriptions.position(tp1).offset);
assertEquals(4, recordsToTest.get(0).offset());
assertEquals(5, recordsToTest.get(1).offset());
} |
@Override
public <T extends State> T state(StateNamespace namespace, StateTag<T> address) {
return workItemState.get(namespace, address, StateContexts.nullContext());
} | @Test
public void testMapAddBeforeGet() throws Exception {
StateTag<MapState<String, Integer>> addr =
StateTags.map("map", StringUtf8Coder.of(), VarIntCoder.of());
MapState<String, Integer> mapState = underTest.state(NAMESPACE, addr);
final String tag = "tag";
SettableFuture<Integer> future = SettableFuture.create();
when(mockReader.valueFuture(
protoKeyFromUserKey(tag, StringUtf8Coder.of()), STATE_FAMILY, VarIntCoder.of()))
.thenReturn(future);
ReadableState<Integer> result = mapState.get("tag");
result = result.readLater();
waitAndSet(future, 1, 200);
assertEquals(1, (int) result.read());
mapState.put("tag", 2);
assertEquals(2, (int) result.read());
} |
public void updateServiceMetadata(Service service, ServiceMetadata serviceMetadata) {
MetadataOperation<ServiceMetadata> operation = buildMetadataOperation(service);
operation.setMetadata(serviceMetadata);
WriteRequest operationLog = WriteRequest.newBuilder().setGroup(Constants.SERVICE_METADATA)
.setOperation(DataOperation.CHANGE.name()).setData(ByteString.copyFrom(serializer.serialize(operation)))
.build();
submitMetadataOperation(operationLog);
} | @Test
void testUpdateServiceMetadata() {
assertThrows(NacosRuntimeException.class, () -> {
ServiceMetadata serviceMetadata = new ServiceMetadata();
namingMetadataOperateService.updateServiceMetadata(service, serviceMetadata);
Mockito.verify(service).getNamespace();
Mockito.verify(service).getGroup();
Mockito.verify(service).getName();
});
} |
public static JsonElement parseReader(Reader reader) throws JsonIOException, JsonSyntaxException {
try {
JsonReader jsonReader = new JsonReader(reader);
JsonElement element = parseReader(jsonReader);
if (!element.isJsonNull() && jsonReader.peek() != JsonToken.END_DOCUMENT) {
throw new JsonSyntaxException("Did not consume the entire document.");
}
return element;
} catch (MalformedJsonException e) {
throw new JsonSyntaxException(e);
} catch (IOException e) {
throw new JsonIOException(e);
} catch (NumberFormatException e) {
throw new JsonSyntaxException(e);
}
} | @Test
public void testParseReader() {
StringReader reader = new StringReader("{a:10,b:'c'}");
JsonElement e = JsonParser.parseReader(reader);
assertThat(e.isJsonObject()).isTrue();
assertThat(e.getAsJsonObject().get("a").getAsInt()).isEqualTo(10);
assertThat(e.getAsJsonObject().get("b").getAsString()).isEqualTo("c");
} |
public static Set<Integer> toSet(int[] replicas) {
Set<Integer> result = new HashSet<>();
for (int replica : replicas) {
result.add(replica);
}
return result;
} | @Test
public void testToSet() {
assertEquals(Collections.emptySet(), Replicas.toSet(new int[] {}));
assertEquals(new HashSet<>(Arrays.asList(3, 1, 5)),
Replicas.toSet(new int[] {1, 3, 5}));
assertEquals(new HashSet<>(Arrays.asList(1, 2, 10)),
Replicas.toSet(new int[] {1, 1, 2, 10, 10}));
} |
@Nullable
public byte[] getValue() {
return mValue;
} | @Test
public void setValue_SINT16() {
final MutableData data = new MutableData(new byte[2]);
data.setValue(-6192, Data.FORMAT_SINT16_LE, 0);
assertArrayEquals(new byte[] { (byte) 0xD0, (byte) 0xE7 } , data.getValue());
} |
@Override
public Stream<HoodieInstant> getCandidateInstants(HoodieTableMetaClient metaClient, HoodieInstant currentInstant,
Option<HoodieInstant> lastSuccessfulInstant) {
HoodieActiveTimeline activeTimeline = metaClient.reloadActiveTimeline();
if (ClusteringUtils.isClusteringInstant(activeTimeline, currentInstant)
|| COMPACTION_ACTION.equals(currentInstant.getAction())) {
return getCandidateInstantsForTableServicesCommits(activeTimeline, currentInstant);
} else {
return getCandidateInstantsForNonTableServicesCommits(activeTimeline, currentInstant);
}
} | @Test
public void testConcurrentWritesWithInterleavingSuccessfulCluster() throws Exception {
createCommit(metaClient.createNewInstantTime(), metaClient);
HoodieActiveTimeline timeline = metaClient.getActiveTimeline();
// consider commits before this are all successful
Option<HoodieInstant> lastSuccessfulInstant = timeline.getCommitsTimeline().filterCompletedInstants().lastInstant();
// writer 1 starts
String currentWriterInstant = metaClient.createNewInstantTime();
createInflightCommit(currentWriterInstant, metaClient);
// TODO: Remove sleep stmt once the modified times issue is fixed.
// Sleep thread for at least 1sec for consecutive commits that way they do not have two commits modified times falls on the same millisecond.
Thread.sleep(1000);
// clustering writer starts and complete before ingestion commit.
String replaceWriterInstant = metaClient.createNewInstantTime();
createCluster(replaceWriterInstant, WriteOperationType.CLUSTER, metaClient);
Option<HoodieInstant> currentInstant = Option.of(new HoodieInstant(HoodieInstant.State.INFLIGHT, HoodieTimeline.COMMIT_ACTION, currentWriterInstant));
PreferWriterConflictResolutionStrategy strategy = new PreferWriterConflictResolutionStrategy();
metaClient.reloadActiveTimeline();
List<HoodieInstant> candidateInstants = strategy
.getCandidateInstants(metaClient, currentInstant.get(), lastSuccessfulInstant)
.collect(Collectors.toList());
Assertions.assertEquals(1, candidateInstants.size());
Assertions.assertEquals(replaceWriterInstant, candidateInstants.get(0).getTimestamp());
HoodieCommitMetadata currentMetadata = createCommitMetadata(currentWriterInstant);
ConcurrentOperation thatCommitOperation = new ConcurrentOperation(candidateInstants.get(0), metaClient);
ConcurrentOperation thisCommitOperation = new ConcurrentOperation(currentInstant.get(), currentMetadata);
Assertions.assertTrue(strategy.hasConflict(thisCommitOperation, thatCommitOperation));
try {
strategy.resolveConflict(null, thisCommitOperation, thatCommitOperation);
Assertions.fail("Cannot reach here, should have thrown a conflict");
} catch (HoodieWriteConflictException e) {
// expected
}
} |
@Override
public ConfigAdvanceInfo findConfigAdvanceInfo(final String dataId, final String group, final String tenant) {
final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
List<String> configTagList = this.selectTagByConfig(dataId, group, tenant);
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
ConfigAdvanceInfo configAdvance = databaseOperate.queryOne(configInfoMapper.select(
Arrays.asList("gmt_create", "gmt_modified", "src_user", "src_ip", "c_desc", "c_use", "effect", "type",
"c_schema"), Arrays.asList("data_id", "group_id", "tenant_id")),
new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER);
if (CollectionUtils.isNotEmpty(configTagList)) {
StringBuilder configTagsTmp = new StringBuilder();
for (String configTag : configTagList) {
if (configTagsTmp.length() == 0) {
configTagsTmp.append(configTag);
} else {
configTagsTmp.append(',').append(configTag);
}
}
configAdvance.setConfigTags(configTagsTmp.toString());
}
return configAdvance;
} | @Test
void testFindConfigAdvanceInfo() {
String dataId = "dataId1324";
String group = "group23546";
String tenant = "tenant13245";
//mock select tags
List<String> mockTags = Arrays.asList("tag1", "tag2", "tag3");
when(databaseOperate.queryMany(anyString(), eq(new Object[] {dataId, group, tenant}), eq(String.class))).thenReturn(mockTags);
String schema = "schema12345654";
//mock select config advance
ConfigAdvanceInfo mockedAdvance = new ConfigAdvanceInfo();
mockedAdvance.setSchema(schema);
when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant}),
eq(CONFIG_ADVANCE_INFO_ROW_MAPPER))).thenReturn(mockedAdvance);
//execute return mock obj
ConfigAdvanceInfo configAdvanceInfo = embeddedConfigInfoPersistService.findConfigAdvanceInfo(dataId, group, tenant);
//expect check schema & tags.
assertEquals(mockedAdvance.getSchema(), configAdvanceInfo.getSchema());
assertEquals(String.join(",", mockTags), configAdvanceInfo.getConfigTags());
} |
public static String validateIndexNameIgnoreCase(@Nullable String indexName) {
checkDbIdentifier(indexName, "Index name", INDEX_NAME_MAX_SIZE, true);
return indexName;
} | @Test
public void accept_allowed_identifier_for_index_name_that_is_SQL_reserved_keyword_ignoring_case() {
assertThatCode(() -> validateIndexNameIgnoreCase("value"))
.doesNotThrowAnyException();
assertThatCode(() -> validateIndexNameIgnoreCase("VALUE"))
.doesNotThrowAnyException();
} |
public static String elasticsearchEvent(String indexName, String eventId) {
checkArgument("indexName", indexName);
checkArgument("eventId", eventId);
return String.join(":", ES_EVENT, indexName, eventId);
} | @Test
public void elasticsearchEvent() {
assertThat(EventOriginContext.elasticsearchEvent("gl-events_0", "01DF13GB094MT6390TYQB2Q73Q"))
.isEqualTo("urn:graylog:event:es:gl-events_0:01DF13GB094MT6390TYQB2Q73Q");
assertThatCode(() -> EventOriginContext.elasticsearchEvent("", "01DF13GB094MT6390TYQB2Q73Q"))
.hasMessageContaining("indexName")
.isInstanceOf(IllegalArgumentException.class);
assertThatCode(() -> EventOriginContext.elasticsearchEvent(null, "01DF13GB094MT6390TYQB2Q73Q"))
.hasMessageContaining("indexName")
.isInstanceOf(IllegalArgumentException.class);
assertThatCode(() -> EventOriginContext.elasticsearchEvent("gl-events_0", ""))
.hasMessageContaining("eventId")
.isInstanceOf(IllegalArgumentException.class);
assertThatCode(() -> EventOriginContext.elasticsearchEvent("gl-events_0", null))
.hasMessageContaining("eventId")
.isInstanceOf(IllegalArgumentException.class);
} |
public static boolean setIfEqualOrGreaterThan(AtomicLong oldValue, long newValue) {
while (true) {
long local = oldValue.get();
if (newValue < local) {
return false;
}
if (oldValue.compareAndSet(local, newValue)) {
return true;
}
}
} | @Test
public void testSetIfEqualOrGreaterThan() {
assertTrue(ConcurrencyUtil.setIfEqualOrGreaterThan(new AtomicLong(1), 1));
assertTrue(ConcurrencyUtil.setIfEqualOrGreaterThan(new AtomicLong(1), 2));
assertFalse(ConcurrencyUtil.setIfEqualOrGreaterThan(new AtomicLong(2), 1));
} |
private void cleanupRemovedIndices()
throws IOException {
File tmpIdxFile = new File(_segmentDirectory, V1Constants.INDEX_FILE_NAME + ".tmp");
// Sort indices by column name and index type while copying, so that the
// new index_map file is easy to inspect for troubleshooting.
List<IndexEntry> retained = copyIndices(_indexFile, tmpIdxFile, _columnEntries);
FileUtils.deleteQuietly(_indexFile);
Preconditions
.checkState(tmpIdxFile.renameTo(_indexFile), "Failed to rename temp index file: %s to original index file: %s",
tmpIdxFile, _indexFile);
File mapFile = new File(_segmentDirectory, V1Constants.INDEX_MAP_FILE_NAME);
FileUtils.deleteQuietly(mapFile);
try (PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(mapFile)))) {
persistIndexMaps(retained, writer);
}
} | @Test
public void testCleanupRemovedIndices()
throws IOException, ConfigurationException {
try (SingleFileIndexDirectory sfd = new SingleFileIndexDirectory(TEMP_DIR, _segmentMetadata, ReadMode.mmap)) {
PinotDataBuffer buf = sfd.newBuffer("col1", StandardIndexes.forward(), 1024);
buf.putInt(0, 1); // from begin position.
buf = sfd.newBuffer("col1", StandardIndexes.dictionary(), 1024);
buf.putChar(111, 'h');
buf = sfd.newBuffer("col2", StandardIndexes.forward(), 1024);
buf.putChar(222, 'w');
buf = sfd.newBuffer("col1", StandardIndexes.json(), 1024);
buf.putLong(333, 111111L);
buf = sfd.newBuffer("col2", StandardIndexes.h3(), 1024);
buf.putDouble(1016, 222.222); // touch end position.
}
// Remove the JSON index to trigger cleanup, but keep H3 index.
try (SingleFileIndexDirectory sfd = new SingleFileIndexDirectory(TEMP_DIR, _segmentMetadata, ReadMode.mmap)) {
assertTrue(sfd.hasIndexFor("col1", StandardIndexes.json()));
sfd.removeIndex("col1", StandardIndexes.json());
}
// Read indices back and check the content.
try (SingleFileIndexDirectory sfd = new SingleFileIndexDirectory(TEMP_DIR, _segmentMetadata, ReadMode.mmap)) {
assertFalse(sfd.hasIndexFor("col1", StandardIndexes.json()));
assertTrue(sfd.hasIndexFor("col1", StandardIndexes.forward()));
PinotDataBuffer buf = sfd.getBuffer("col1", StandardIndexes.forward());
assertEquals(buf.getInt(0), 1);
assertTrue(sfd.hasIndexFor("col1", StandardIndexes.dictionary()));
buf = sfd.getBuffer("col1", StandardIndexes.dictionary());
assertEquals(buf.getChar(111), 'h');
assertTrue(sfd.hasIndexFor("col2", StandardIndexes.forward()));
buf = sfd.getBuffer("col2", StandardIndexes.forward());
assertEquals(buf.getChar(222), 'w');
assertTrue(sfd.hasIndexFor("col2", StandardIndexes.h3()));
buf = sfd.getBuffer("col2", StandardIndexes.h3());
assertEquals(buf.getDouble(1016), 222.222);
}
} |
@Override
public Set<OAuth2RefreshTokenEntity> getAllRefreshTokensForUser(String userName) {
return tokenRepository.getRefreshTokensByUserName(userName);
} | @Test
public void getAllRefreshTokensForUser(){
when(tokenRepository.getRefreshTokensByUserName(userName)).thenReturn(newHashSet(refreshToken));
Set<OAuth2RefreshTokenEntity> tokens = service.getAllRefreshTokensForUser(userName);
assertEquals(1, tokens.size());
assertTrue(tokens.contains(refreshToken));
} |
@Secured(action = ActionTypes.READ)
@GetMapping("/service")
public Object serviceDetail(@RequestParam(defaultValue = Constants.DEFAULT_NAMESPACE_ID) String namespaceId,
String serviceName) throws NacosException {
String serviceNameWithoutGroup = NamingUtils.getServiceName(serviceName);
String groupName = NamingUtils.getGroupName(serviceName);
return judgeCatalogService().getServiceDetail(namespaceId, groupName, serviceNameWithoutGroup);
} | @Test
void testServiceDetail() throws Exception {
Object expected = new Object();
when(catalogServiceV2.getServiceDetail(Constants.DEFAULT_NAMESPACE_ID, TEST_GROUP_NAME, TEST_SERVICE_NAME)).thenReturn(expected);
Object actual = catalogController.serviceDetail(Constants.DEFAULT_NAMESPACE_ID,
TEST_GROUP_NAME + Constants.SERVICE_INFO_SPLITER + TEST_SERVICE_NAME);
assertEquals(expected, actual);
} |
public static KafkaUserModel fromCrd(KafkaUser kafkaUser,
String secretPrefix,
boolean aclsAdminApiSupported) {
KafkaUserModel result = new KafkaUserModel(kafkaUser.getMetadata().getNamespace(),
kafkaUser.getMetadata().getName(),
Labels.fromResource(kafkaUser).withStrimziKind(kafkaUser.getKind()),
secretPrefix);
validateTlsUsername(kafkaUser);
validateDesiredPassword(kafkaUser);
result.setOwnerReference(kafkaUser);
result.setAuthentication(kafkaUser.getSpec().getAuthentication());
if (kafkaUser.getSpec().getAuthorization() != null && kafkaUser.getSpec().getAuthorization().getType().equals(KafkaUserAuthorizationSimple.TYPE_SIMPLE)) {
if (aclsAdminApiSupported) {
KafkaUserAuthorizationSimple simple = (KafkaUserAuthorizationSimple) kafkaUser.getSpec().getAuthorization();
result.setSimpleAclRules(simple.getAcls());
} else {
throw new InvalidResourceException("Simple authorization ACL rules are configured but not supported in the Kafka cluster configuration.");
}
}
result.setQuotas(kafkaUser.getSpec().getQuotas());
if (kafkaUser.getSpec().getTemplate() != null
&& kafkaUser.getSpec().getTemplate().getSecret() != null
&& kafkaUser.getSpec().getTemplate().getSecret().getMetadata() != null) {
result.templateSecretLabels = kafkaUser.getSpec().getTemplate().getSecret().getMetadata().getLabels();
result.templateSecretAnnotations = kafkaUser.getSpec().getTemplate().getSecret().getMetadata().getAnnotations();
}
return result;
} | @Test
public void testFromCrdScramShaUserWithEmptyPasswordThrows() {
KafkaUser emptyPassword = new KafkaUserBuilder(scramShaUser)
.editSpec()
.withNewKafkaUserScramSha512ClientAuthentication()
.withNewPassword()
.endPassword()
.endKafkaUserScramSha512ClientAuthentication()
.endSpec()
.build();
InvalidResourceException e = assertThrows(InvalidResourceException.class, () -> {
KafkaUserModel.fromCrd(emptyPassword, UserOperatorConfig.SECRET_PREFIX.defaultValue(), Boolean.parseBoolean(UserOperatorConfig.ACLS_ADMIN_API_SUPPORTED.defaultValue()));
});
assertThat(e.getMessage(), is("Resource requests custom SCRAM-SHA-512 password but doesn't specify the secret name and/or key"));
} |
public Node parse() throws ScanException {
return E();
} | @Test
public void keywordGluedToLitteral() throws Exception {
Parser<Object> p = new Parser("%x{}a");
Node t = p.parse();
SimpleKeywordNode witness = new SimpleKeywordNode("x");
witness.setOptions(new ArrayList<String>());
witness.next = new Node(Node.LITERAL, "a");
assertEquals(witness, t);
} |
public OpenAPI read(Class<?> cls) {
return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>());
} | @Test(description = "RequestBody with ref")
public void testRequestBodyWithRef() {
Components components = new Components();
components.addRequestBodies("User", new RequestBody().description("Test RequestBody"));
OpenAPI oas = new OpenAPI()
.info(new Info().description("info"))
.components(components);
Reader reader = new Reader(oas);
OpenAPI openAPI = reader.read(RefRequestBodyResource.class);
String yaml = "openapi: 3.0.1\n" +
"info:\n" +
" description: info\n" +
"paths:\n" +
" /:\n" +
" get:\n" +
" summary: Simple get operation\n" +
" description: Defines a simple get operation with a payload complex input object\n" +
" operationId: sendPayload\n" +
" requestBody:\n" +
" $ref: '#/components/requestBodies/User'\n" +
" responses:\n" +
" default:\n" +
" description: default response\n" +
" content:\n" +
" '*/*': {}\n" +
" deprecated: true\n" +
"components:\n" +
" schemas:\n" +
" User:\n" +
" type: object\n" +
" properties:\n" +
" id:\n" +
" type: integer\n" +
" format: int64\n" +
" username:\n" +
" type: string\n" +
" firstName:\n" +
" type: string\n" +
" lastName:\n" +
" type: string\n" +
" email:\n" +
" type: string\n" +
" password:\n" +
" type: string\n" +
" phone:\n" +
" type: string\n" +
" userStatus:\n" +
" type: integer\n" +
" description: User Status\n" +
" format: int32\n" +
" xml:\n" +
" name: User\n" +
" requestBodies:\n" +
" User:\n" +
" description: Test RequestBody\n";
SerializationMatchers.assertEqualsToYaml(openAPI, yaml);
} |
@Override
public KsMaterializedQueryResult<WindowedRow> get(
final GenericKey key,
final int partition,
final Range<Instant> windowStartBounds,
final Range<Instant> windowEndBounds,
final Optional<Position> position
) {
try {
final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore
.store(QueryableStoreTypes.timestampedWindowStore(), partition);
final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds);
final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds);
try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it
= cacheBypassFetcher.fetch(store, key, lower, upper)) {
final Builder<WindowedRow> builder = ImmutableList.builder();
while (it.hasNext()) {
final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next();
final Instant windowStart = Instant.ofEpochMilli(next.key);
if (!windowStartBounds.contains(windowStart)) {
continue;
}
final Instant windowEnd = windowStart.plus(windowSize);
if (!windowEndBounds.contains(windowEnd)) {
continue;
}
final TimeWindow window =
new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli());
final WindowedRow row = WindowedRow.of(
stateStore.schema(),
new Windowed<>(key, window),
next.value.value(),
next.value.timestamp()
);
builder.add(row);
}
return KsMaterializedQueryResult.rowIterator(builder.build().iterator());
}
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
} | @Test
public void shouldReturnValuesForOpenEndBounds_fetchAll() {
// Given:
final Range<Instant> end = Range.open(
NOW,
NOW.plusSeconds(10)
);
final Range<Instant> startEquiv = Range.open(
end.lowerEndpoint().minus(WINDOW_SIZE),
end.upperEndpoint().minus(WINDOW_SIZE)
);
when(keyValueIterator.hasNext())
.thenReturn(true, true, true, false);
when(keyValueIterator.next())
.thenReturn(new KeyValue<>(new Windowed<>(A_KEY,
new TimeWindow(startEquiv.lowerEndpoint().toEpochMilli(),
startEquiv.lowerEndpoint().toEpochMilli() + WINDOW_SIZE.toMillis())), VALUE_1))
.thenReturn(new KeyValue<>(new Windowed<>(A_KEY2,
new TimeWindow(startEquiv.lowerEndpoint().plusMillis(1).toEpochMilli(),
startEquiv.lowerEndpoint().toEpochMilli() + WINDOW_SIZE.toMillis() + 1)), VALUE_2))
.thenReturn(new KeyValue<>(new Windowed<>(A_KEY3,
new TimeWindow(startEquiv.upperEndpoint().toEpochMilli(),
startEquiv.upperEndpoint().toEpochMilli() + WINDOW_SIZE.toMillis())), VALUE_3))
.thenThrow(new AssertionError());
// When:
final Iterator<WindowedRow> rowIterator =
table.get(PARTITION, Range.all(), end).rowIterator;
// Then:
assertThat(rowIterator.hasNext(), is(true));
assertThat(rowIterator.next(),
is (WindowedRow.of(
SCHEMA,
windowedKey(A_KEY2, startEquiv.lowerEndpoint().plusMillis(1)),
VALUE_2.value(),
VALUE_2.timestamp())));
assertThat(rowIterator.hasNext(), is(false));
} |
@Override
public GenericRow transform(GenericRow record) {
try {
GenericRow originalRow = _fieldsToUnnest.isEmpty() ? null : record.copy(_fieldsToUnnest);
flattenMap(record, new ArrayList<>(record.getFieldToValueMap().keySet()));
for (String field : _fieldsToUnnest) {
unnestCollection(record, field);
}
Object unnestedRows = record.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
if (originalRow != null && unnestedRows instanceof Collection) {
for (GenericRow unnestedRow : (Collection<GenericRow>) unnestedRows) {
for (String field : _fieldsToUnnest) {
unnestedRow.putValue(field, originalRow.getValue(field));
}
}
}
renamePrefixes(record);
} catch (Exception e) {
if (!_continueOnError) {
throw new RuntimeException("Caught exception while transforming complex types", e);
} else {
LOGGER.debug("Caught exception while transforming complex types for record: {}", record.toString(), e);
record.putValue(GenericRow.INCOMPLETE_RECORD_KEY, true);
}
}
return record;
} | @Test
public void testUnnestMultiLevelArray() {
// {
// "level1" : [ {
// "level2" : {
// "level3" : [ {
// "level4" : "foo_bar"
// }, {
// "level4" : "foo_bar"
// } ]
// }
// }, {
// "level2" : {
// "level3" : [ {
// "level4" : "foo_bar"
// }, {
// "level4" : "foo_bar"
// } ]
// }
// } ]
// }
GenericRow genericRow = new GenericRow();
Map<String, String> level3 = new HashMap<>();
level3.put("level4", "foo_bar");
Map<String, Object> level2 = new HashMap<>();
Object[] level3Arr = new Object[]{level3, level3};
level2.put("level3", level3Arr);
Map<String, Object> level1 = new HashMap<>();
level1.put("level2", level2);
Object[] level1Arr = new Object[]{level1, level1};
genericRow.putValue("level1", level1Arr);
List<String> fieldsToUnnest = new ArrayList<>();
fieldsToUnnest.add("level1");
fieldsToUnnest.add("level1.level2.level3");
System.out.println(genericRow);
ComplexTypeTransformer complexTypeTransformer = new ComplexTypeTransformer(fieldsToUnnest, ".");
GenericRow result = complexTypeTransformer.transform(genericRow);
Assert.assertNotNull(result.getValue(GenericRow.MULTIPLE_RECORDS_KEY));
Collection<GenericRow> rows = (Collection<GenericRow>) result.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
Assert.assertEquals(rows.size(), 4);
for (GenericRow row : rows) {
Assert.assertEquals(row.getValue("level1.level2.level3.level4"), "foo_bar");
}
} |
@Override
public int compareVersions(String v1, String v2) {
return Version.parse(v1).compareTo(Version.parse(v2));
} | @Test
void compareSnapshotVersion() {
assertTrue(versionManager.compareVersions("1.1.0", "1.0.0-SNAPSHOT") > 0);
assertTrue(versionManager.compareVersions("1.1.0", "1.2.0-SNAPSHOT") < 0);
assertTrue(versionManager.compareVersions("1.0.0-SNAPSHOT", "1.1.0-SNAPSHOT") < 0);
assertEquals(0, versionManager.compareVersions("1.0.0-SNAPSHOT", "1.0.0-SNAPSHOT"));
} |
public Map<String, Object> getKsqlFunctionsConfigProps(final String functionName) {
final Map<String, Object> udfProps = originalsWithPrefix(
KSQL_FUNCTIONS_PROPERTY_PREFIX + functionName.toLowerCase(), false);
final Map<String, Object> globals = originalsWithPrefix(
KSQ_FUNCTIONS_GLOBAL_PROPERTY_PREFIX, false);
udfProps.putAll(globals);
return udfProps;
} | @Test
public void shouldReturnUdfConfig() {
// Given:
final String functionName = "bob";
final String udfConfigName =
KsqlConfig.KSQL_FUNCTIONS_PROPERTY_PREFIX + functionName + ".some-setting";
final KsqlConfig config = new KsqlConfig(ImmutableMap.of(
udfConfigName, "should-be-visible"
));
// When:
final Map<String, ?> udfProps = config.getKsqlFunctionsConfigProps(functionName);
// Then:
assertThat(udfProps.get(udfConfigName), is("should-be-visible"));
} |
@Override
public String toString() {
return StringUtil.simpleClassName(this) + "(default: " + defaultValue + ", map: " + map + ')';
} | @Test
public void testToString() {
DomainNameMapping<String> mapping = new DomainNameMappingBuilder<String>("NotFound")
.add("*.netty.io", "Netty")
.add("downloads.netty.io", "Netty-Download")
.build();
assertEquals(
"ImmutableDomainNameMapping(default: NotFound, map: {*.netty.io=Netty, downloads.netty.io=Netty-Download})",
mapping.toString());
} |
@Override
public boolean validate(Path path, ResourceContext context) {
// explicitly call a method not depending on LinkResourceService
return validate(path);
} | @Test
public void testSatisfyWaypoints() {
sut = new WaypointConstraint(DID1, DID2, DID3);
assertThat(sut.validate(path, resourceContext), is(true));
} |
@Override
public List<T> select(List<T> context) {
return context;
} | @Test
void testSelect() {
NoneSelector<Instance> noneSelector = new NoneSelector<>();
List<Instance> providers = Collections.emptyList();
assertEquals(providers, noneSelector.select(providers));
} |
@Override
public void removeNetwork(String netId) {
checkArgument(!Strings.isNullOrEmpty(netId), ERR_NULL_NETWORK_ID);
synchronized (this) {
if (isNetworkInUse(netId)) {
final String error = String.format(MSG_NETWORK, netId, ERR_IN_USE);
throw new IllegalStateException(error);
}
Network osNet = osNetworkStore.removeNetwork(netId);
if (osNet != null) {
log.info(String.format(MSG_NETWORK, deriveResourceName(osNet), MSG_REMOVED));
}
Versioned<OpenstackNetwork> augmentedNetwork = augmentedNetworkMap.remove(netId);
if (augmentedNetwork != null) {
log.info(String.format(MSG_NETWORK_TYPE,
augmentedNetwork.value().type(), MSG_REMOVED));
}
}
} | @Test(expected = IllegalArgumentException.class)
public void testRemoveNetworkWithNull() {
target.removeNetwork(null);
} |
static String getConfigValueAsString(ServiceConfiguration conf,
String configProp) throws IllegalArgumentException {
String value = getConfigValueAsStringImpl(conf, configProp);
log.info("Configuration for [{}] is [{}]", configProp, value);
return value;
} | @Test
public void testGetConfigValueAsStringWorks() {
Properties props = new Properties();
props.setProperty("prop1", "audience");
ServiceConfiguration config = new ServiceConfiguration();
config.setProperties(props);
String actual = ConfigUtils.getConfigValueAsString(config, "prop1");
assertEquals("audience", actual);
} |
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
try {
URL indexResource = getServletContext().getResource("/index.html");
String content = IOUtils.toString(indexResource, StandardCharsets.UTF_8);
// read original content from resource
if (bodyAddon != null) {
if (content.contains(TAG_BODY_CLOSING)) {
content = content.replace(TAG_BODY_CLOSING, bodyAddon + TAG_BODY_CLOSING);
} else if (content.contains(TAG_HTML_CLOSING)) {
content = content.replace(TAG_HTML_CLOSING, bodyAddon + TAG_HTML_CLOSING);
} else {
content = content + bodyAddon;
}
}
// process head addon
if (headAddon != null) {
if (content.contains(TAG_HEAD_CLOSING)) {
content = content.replace(TAG_HEAD_CLOSING, headAddon + TAG_HEAD_CLOSING);
} else if (content.contains(TAG_BODY_OPENING)) {
content = content.replace(TAG_BODY_OPENING, headAddon + TAG_BODY_OPENING);
} else {
LOGGER.error(
"Unable to process Head html addon. Could not find proper anchor in index.html.");
}
}
resp.setContentType("text/html");
resp.setStatus(HttpServletResponse.SC_OK);
resp.getWriter().append(content);
} catch (IOException e) {
LOGGER.error("Error rendering index.html.", e);
}
} | @Test
void testZeppelinWebHtmlAddon() throws IOException, ServletException {
ZeppelinConfiguration zConf = mock(ZeppelinConfiguration.class);
when(zConf.getHtmlBodyAddon()).thenReturn(TEST_BODY_ADDON);
when(zConf.getHtmlHeadAddon()).thenReturn(TEST_HEAD_ADDON);
ServletConfig sc = mock(ServletConfig.class);
ServletContext ctx = mock(ServletContext.class);
when(ctx.getResource("/index.html"))
.thenReturn(new URL("file:" + FILE_PATH_INDEX_HTML_ZEPPELIN_WEB));
when(sc.getServletContext()).thenReturn(ctx);
IndexHtmlServlet servlet = new IndexHtmlServlet(zConf);
servlet.init(sc);
HttpServletResponse mockResponse = mock(HttpServletResponse.class);
HttpServletRequest mockRequest = mock(HttpServletRequest.class);
// Catch content in ByteArrayOutputStream
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintWriter writer = new PrintWriter(out);
when(mockResponse.getWriter()).thenReturn(writer);
servlet.doGet(mockRequest, mockResponse);
writer.flush();
// Get Content
String content = new String(out.toString());
assertThat(content, containsString(TEST_BODY_ADDON));
assertThat(content, containsString(TEST_HEAD_ADDON));
} |
@Override
protected Component<? super Component<?, ?>, ?> doBuild(DeployState deployState, TreeConfigProducer<AnyConfigProducer> ancestor, Element spec) {
var component = buildComponent(spec, deployState, ancestor);
addChildren(deployState, ancestor, spec, component);
return component;
} | @Test
void ensureCorrectModel() {
Component<?, ?> handler = new DomComponentBuilder().doBuild(root.getDeployState(), root, parse(
"<handler id='theId' class='theClass' bundle='theBundle' />"));
BundleInstantiationSpecification instantiationSpecification = handler.model.bundleInstantiationSpec;
assertEquals("theId", instantiationSpecification.id.stringValue());
assertEquals("theClass", instantiationSpecification.classId.stringValue());
assertEquals("theBundle", instantiationSpecification.bundle.stringValue());
} |
public static HeaderTemplate create(String name, Iterable<String> values) {
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("name is required.");
}
if (values == null) {
throw new IllegalArgumentException("values are required");
}
return new HeaderTemplate(name, values, Util.UTF_8);
} | @Test
void it_should_throw_exception_when_value_is_null() {
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
() -> HeaderTemplate.create("test", null));
assertThat(exception.getMessage()).isEqualTo("values are required");
} |
public static <T> T toBean(Object source, Class<T> clazz) {
return toBean(source, clazz, null);
} | @Test
public void valueProviderToBeanTest() {
// https://gitee.com/dromara/hutool/issues/I5B4R7
final CopyOptions copyOptions = CopyOptions.create();
final Map<String, String> filedMap = new HashMap<>();
filedMap.put("name", "sourceId");
copyOptions.setFieldMapping(filedMap);
final TestPojo pojo = BeanUtil.toBean(TestPojo.class, new ValueProvider<String>() {
final HashMap<String, Object> map = new HashMap<>();
{
map.put("sourceId", "123");
}
@Override
public Object value(final String key, final Type valueType) {
return map.get(key);
}
@Override
public boolean containsKey(final String key) {
return map.containsKey(key);
}
}, copyOptions);
assertEquals("123", pojo.getName());
} |
public static byte setBooleanToByte(byte modifiers, int i, boolean bool) {
boolean old = getBooleanFromByte(modifiers, i);
if (old && !bool) { // true-->false
return (byte) (modifiers - (1 << i));
} else if (!old && bool) { // false-->true
return (byte) (modifiers + (1 << i));
}
return modifiers;
} | @Test
public void setBooleanToByte() {
byte b = 0x35; // 0011 0101
byte b1 = CodecUtils.setBooleanToByte(b, 0, true);
Assert.assertEquals(b, b1);
byte b2 = CodecUtils.setBooleanToByte(b, 1, false);
Assert.assertEquals(b, b2);
byte b3 = CodecUtils.setBooleanToByte(b, 3, true);
Assert.assertFalse(b == b3);
Assert.assertTrue(CodecUtils.getBooleanFromByte(b3, 3));
byte b4 = CodecUtils.setBooleanToByte(b, 4, false);
Assert.assertFalse(b == b4);
Assert.assertFalse(CodecUtils.getBooleanFromByte(b4, 4));
} |
@Override
public NetworkClientDelegate.PollResult poll(long currentTimeMs) {
if (!coordinatorRequestManager.coordinator().isPresent() ||
membershipManager.shouldSkipHeartbeat()) {
membershipManager.onHeartbeatRequestSkipped();
return NetworkClientDelegate.PollResult.EMPTY;
}
pollTimer.update(currentTimeMs);
if (pollTimer.isExpired() && !membershipManager.isLeavingGroup()) {
logger.warn("Consumer poll timeout has expired. This means the time between " +
"subsequent calls to poll() was longer than the configured max.poll.interval.ms, " +
"which typically implies that the poll loop is spending too much time processing " +
"messages. You can address this either by increasing max.poll.interval.ms or by " +
"reducing the maximum size of batches returned in poll() with max.poll.records.");
membershipManager.transitionToSendingLeaveGroup(true);
NetworkClientDelegate.UnsentRequest leaveHeartbeat = makeHeartbeatRequest(currentTimeMs, true);
// We can ignore the leave response because we can join before or after receiving the response.
heartbeatRequestState.reset();
heartbeatState.reset();
return new NetworkClientDelegate.PollResult(heartbeatRequestState.heartbeatIntervalMs, Collections.singletonList(leaveHeartbeat));
}
// Case 1: The member is leaving
boolean heartbeatNow = membershipManager.state() == MemberState.LEAVING ||
// Case 2: The member state indicates it should send a heartbeat without waiting for the interval, and there is no heartbeat request currently in-flight
(membershipManager.shouldHeartbeatNow() && !heartbeatRequestState.requestInFlight());
if (!heartbeatRequestState.canSendRequest(currentTimeMs) && !heartbeatNow) {
return new NetworkClientDelegate.PollResult(heartbeatRequestState.timeToNextHeartbeatMs(currentTimeMs));
}
NetworkClientDelegate.UnsentRequest request = makeHeartbeatRequest(currentTimeMs, false);
return new NetworkClientDelegate.PollResult(heartbeatRequestState.heartbeatIntervalMs, Collections.singletonList(request));
} | @Test
public void testPollTimerExpirationShouldNotMarkMemberStaleIfMemberAlreadyLeaving() {
when(membershipManager.shouldSkipHeartbeat()).thenReturn(false);
when(membershipManager.isLeavingGroup()).thenReturn(true);
time.sleep(DEFAULT_MAX_POLL_INTERVAL_MS);
NetworkClientDelegate.PollResult result = heartbeatRequestManager.poll(time.milliseconds());
// No transition to leave due to stale member should be triggered, because the member is
// already leaving the group
verify(membershipManager, never()).transitionToSendingLeaveGroup(anyBoolean());
assertEquals(1, result.unsentRequests.size(), "A heartbeat request should be generated to" +
" complete the ongoing leaving operation that was triggered before the poll timer expired.");
} |
@Override
public Double getValue() {
return getRatio().getValue();
} | @Test
public void handlesInfiniteDenominators() throws Exception {
final RatioGauge infinite = new RatioGauge() {
@Override
protected Ratio getRatio() {
return Ratio.of(10, Double.POSITIVE_INFINITY);
}
};
assertThat(infinite.getValue())
.isNaN();
} |
public String generateInvalidPayloadExceptionMessage(final byte[] hl7Bytes) {
if (hl7Bytes == null) {
return "HL7 payload is null";
}
return generateInvalidPayloadExceptionMessage(hl7Bytes, hl7Bytes.length);
} | @Test
public void testGenerateInvalidPayloadExceptionMessageWithEmbeddedStartOfBlock() {
byte[] basePayload = TEST_MESSAGE.getBytes();
ByteArrayOutputStream payloadStream = new ByteArrayOutputStream(basePayload.length + 1);
int embeddedStartOfBlockIndex = basePayload.length / 2;
payloadStream.write(basePayload, 0, embeddedStartOfBlockIndex);
payloadStream.write(MllpProtocolConstants.START_OF_BLOCK);
payloadStream.write(basePayload, embeddedStartOfBlockIndex, basePayload.length - embeddedStartOfBlockIndex);
String expected
= "HL7 payload contains an embedded START_OF_BLOCK {0xb, ASCII <VT>} at index " + embeddedStartOfBlockIndex;
assertEquals(expected, hl7util.generateInvalidPayloadExceptionMessage(payloadStream.toByteArray()));
} |
@Override
public PluginRuntime getPluginRuntime() {
return new PluginRuntime(getId())
.addInfo("awaitTerminationMillis", awaitTerminationMillis + "ms");
} | @Test
public void testGetRuntime() {
Assert.assertNotNull(new ThreadPoolExecutorShutdownPlugin(1000L).getPluginRuntime());
} |
public void executeInInteractiveMode() {
executeInInteractiveMode(null);
} | @Test
void testHistoryFile() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
InputStream inputStream = new ByteArrayInputStream("help;\nuse catalog cat;\n".getBytes());
Path historyFilePath = historyTempFile();
try (Terminal terminal =
new DumbTerminal(inputStream, new TerminalUtils.MockOutputStream());
CliClient client =
new CliClient(() -> terminal, mockExecutor, historyFilePath, null)) {
client.executeInInteractiveMode();
List<String> content = Files.readAllLines(historyFilePath);
assertThat(content).hasSize(2);
assertThat(content.get(0)).contains("help");
assertThat(content.get(1)).contains("use catalog cat");
}
} |
public void add(final Portal portal) throws SQLException {
boolean isNamedPortal = !portal.getName().isEmpty();
Preconditions.checkState(!isNamedPortal || !portals.containsKey(portal.getName()), "Named portal `%s` must be explicitly closed", portal.getName());
Portal previousPortal = portals.put(portal.getName(), portal);
if (null != previousPortal) {
previousPortal.close();
}
} | @Test
void assertAddDuplicateNamedPortal() throws SQLException {
Portal portal = mock(Portal.class);
when(portal.getName()).thenReturn("P_1");
portalContext.add(portal);
assertThrows(IllegalStateException.class, () -> portalContext.add(portal));
} |
public String getQuery() throws Exception {
return getQuery(weatherConfiguration.getLocation());
} | @Test
public void testCurrentLocationQuery2() throws Exception {
WeatherConfiguration weatherConfiguration = new WeatherConfiguration();
weatherConfiguration.setMode(WeatherMode.XML);
weatherConfiguration.setLocation("current");
weatherConfiguration.setPeriod("3");
weatherConfiguration.setLanguage(WeatherLanguage.nl);
weatherConfiguration.setUnits(WeatherUnits.IMPERIAL);
weatherConfiguration.setAppid(APPID);
WeatherQuery weatherQuery = new WeatherQuery(weatherConfiguration);
weatherConfiguration.setGeoLocationProvider(geoLocationProvider);
String query = weatherQuery.getQuery();
assertThat(query, is(
"http://api.openweathermap.org/data/2.5/forecast/daily?lat=51.98&lon=4.13&lang=nl&cnt=3&units=imperial&mode=xml&APPID=9162755b2efa555823cfe0451d7fff38"));
} |
@Deprecated
public static <T> T mapToBean(Map<?, ?> map, Class<T> beanClass, boolean isIgnoreError) {
return fillBeanWithMap(map, ReflectUtil.newInstanceIfPossible(beanClass), isIgnoreError);
} | @Test
public void mapToBeanTest() {
final HashMap<String, Object> map = MapUtil.newHashMap();
map.put("a_name", "Joe");
map.put("b_age", 12);
// 别名,用于对应bean的字段名
final HashMap<String, String> mapping = MapUtil.newHashMap();
mapping.put("a_name", "name");
mapping.put("b_age", "age");
final Person person = BeanUtil.toBean(map, Person.class, CopyOptions.create().setFieldMapping(mapping));
assertEquals("Joe", person.getName());
assertEquals(12, person.getAge());
} |
@Deprecated
@Override
public void init(final ProcessorContext context,
final StateStore root) {
this.context = context instanceof InternalProcessorContext ? (InternalProcessorContext<?, ?>) context : null;
taskId = context.taskId();
initStoreSerde(context);
streamsMetrics = (StreamsMetricsImpl) context.metrics();
registerMetrics();
final Sensor restoreSensor =
StateStoreMetrics.restoreSensor(taskId.toString(), metricsScope, name(), streamsMetrics);
// register and possibly restore the state from the logs
maybeMeasureLatency(() -> super.init(context, root), time, restoreSensor);
} | @SuppressWarnings("deprecation")
@Test
public void shouldDelegateDeprecatedInit() {
setUp();
final MeteredSessionStore<String, String> outer = new MeteredSessionStore<>(
innerStore,
STORE_TYPE,
Serdes.String(),
Serdes.String(),
new MockTime()
);
doNothing().when(innerStore).init((ProcessorContext) context, outer);
outer.init((ProcessorContext) context, outer);
} |
public boolean appliesTo(String pipelineName, String stageName) {
boolean pipelineMatches = this.pipelineName.equals(pipelineName) ||
this.pipelineName.equals(GoConstants.ANY_PIPELINE);
boolean stageMatches = this.stageName.equals(stageName) ||
this.stageName.equals(GoConstants.ANY_STAGE);
return pipelineMatches && stageMatches;
} | @Test
void anyStageShouldAlwaysApply() {
NotificationFilter filter = new NotificationFilter("cruise2", GoConstants.ANY_STAGE, StageEvent.Breaks, false);
assertThat(filter.appliesTo("cruise2", "dev")).isTrue();
} |
@Override
public Page<ConfigInfo> findConfigInfo4Page(final int pageNo, final int pageSize, final String dataId,
final String group, final String tenant, final Map<String, Object> configAdvanceInfo) {
String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant;
PaginationHelper<ConfigInfo> helper = createPaginationHelper();
final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName");
final String content = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("content");
final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags");
MapperResult sql;
MapperResult sqlCount;
final MapperContext context = new MapperContext();
context.putWhereParameter(FieldConstant.TENANT_ID, tenantTmp);
if (StringUtils.isNotBlank(dataId)) {
context.putWhereParameter(FieldConstant.DATA_ID, dataId);
}
if (StringUtils.isNotBlank(group)) {
context.putWhereParameter(FieldConstant.GROUP_ID, group);
}
if (StringUtils.isNotBlank(appName)) {
context.putWhereParameter(FieldConstant.APP_NAME, appName);
}
if (!StringUtils.isBlank(content)) {
context.putWhereParameter(FieldConstant.CONTENT, content);
}
context.setStartRow((pageNo - 1) * pageSize);
context.setPageSize(pageSize);
if (StringUtils.isNotBlank(configTags)) {
String[] tagArr = configTags.split(",");
context.putWhereParameter(FieldConstant.TAG_ARR, tagArr);
ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper(
dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION);
sqlCount = configTagsRelationMapper.findConfigInfo4PageCountRows(context);
sql = configTagsRelationMapper.findConfigInfo4PageFetchRows(context);
} else {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
sqlCount = configInfoMapper.findConfigInfo4PageCountRows(context);
sql = configInfoMapper.findConfigInfo4PageFetchRows(context);
}
try {
Page<ConfigInfo> page = helper.fetchPageLimit(sqlCount, sql, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER);
for (ConfigInfo configInfo : page.getPageItems()) {
Pair<String, String> pair = EncryptionHandler.decryptHandler(configInfo.getDataId(),
configInfo.getEncryptedDataKey(), configInfo.getContent());
configInfo.setContent(pair.getSecond());
}
return page;
} catch (CannotGetJdbcConnectionException e) {
LogUtil.FATAL_LOG.error("[db-error] ", e);
throw e;
}
} | @Test
void testFindConfigInfo4PageWithTags() {
String dataId = "dataId4567222";
String group = "group3456789";
String tenant = "tenant4567890";
Map<String, Object> configAdvanceInfo = new HashMap<>();
configAdvanceInfo.put("config_tags", "tags1,tags3");
//mock total count
when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {tenant, dataId, group, "tags1", "tags3"}),
eq(Integer.class))).thenReturn(new Integer(9));
//mock page list
List<ConfigInfo> result = new ArrayList<>();
result.add(createMockConfigInfo(0));
result.add(createMockConfigInfo(1));
result.add(createMockConfigInfo(2));
when(jdbcTemplate.query(anyString(), eq(new Object[] {tenant, dataId, group, "tags1", "tags3"}),
eq(CONFIG_INFO_ROW_MAPPER))).thenReturn(result);
Page<ConfigInfo> configInfo4Page = externalConfigInfoPersistService.findConfigInfo4Page(1, 3, dataId, group, tenant,
configAdvanceInfo);
assertEquals(result.size(), configInfo4Page.getPageItems().size());
assertEquals(9, configInfo4Page.getTotalCount());
} |
public static void main(String[] args) {
// populate the in-memory database
initData();
// query the data using the service
queryData();
} | @Test
void shouldExecuteWithoutException() {
assertDoesNotThrow(() -> App.main(new String[]{}));
} |
protected S state() {
return state;
} | @Test
public void testChannelInputShutdownEvent() {
final AtomicReference<Error> error = new AtomicReference<Error>();
EmbeddedChannel channel = new EmbeddedChannel(new ReplayingDecoder<Integer>(0) {
private boolean decoded;
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
if (!(in instanceof ReplayingDecoderByteBuf)) {
error.set(new AssertionError("in must be of type " + ReplayingDecoderByteBuf.class
+ " but was " + in.getClass()));
return;
}
if (!decoded) {
decoded = true;
in.readByte();
state(1);
} else {
// This will throw an ReplayingError
in.skipBytes(Integer.MAX_VALUE);
}
}
});
assertFalse(channel.writeInbound(Unpooled.wrappedBuffer(new byte[] {0, 1})));
channel.pipeline().fireUserEventTriggered(ChannelInputShutdownEvent.INSTANCE);
assertFalse(channel.finishAndReleaseAll());
Error err = error.get();
if (err != null) {
throw err;
}
} |
public SubsetItem getClientsSubset(String serviceName,
int minClusterSubsetSize,
int partitionId,
Map<URI, Double> possibleUris,
long version,
SimpleLoadBalancerState state)
{
SubsettingStrategy<URI> subsettingStrategy = _subsettingStrategyFactory.get(serviceName, minClusterSubsetSize, partitionId);
if (subsettingStrategy == null)
{
return new SubsetItem(false, false, possibleUris, Collections.emptySet());
}
DeterministicSubsettingMetadata metadata = _subsettingMetadataProvider.getSubsettingMetadata(state);
if (metadata == null)
{
return new SubsetItem(false, false, possibleUris, Collections.emptySet());
}
synchronized (_lockMap.computeIfAbsent(serviceName, name -> new Object()))
{
SubsetCache subsetCache = _subsetCache.get(serviceName);
if (isCacheValid(version, metadata.getPeerClusterVersion(), minClusterSubsetSize, subsetCache))
{
if (subsetCache.getWeightedSubsets().containsKey(partitionId))
{
return new SubsetItem(true, false, subsetCache.getWeightedSubsets().get(partitionId), Collections.emptySet());
}
}
Map<URI, Double> subsetMap = subsettingStrategy.getWeightedSubset(possibleUris, metadata);
if (subsetMap == null)
{
return new SubsetItem(false, false, possibleUris, Collections.emptySet());
}
else
{
LOG.debug("Force updating subset cache for service " + serviceName);
Set<URI> doNotSlowStartUris = new HashSet<>();
if (subsetCache != null)
{
Set<URI> oldPossibleUris = subsetCache.getPossibleUris().getOrDefault(partitionId, Collections.emptySet());
for (URI uri : subsetMap.keySet())
{
if (oldPossibleUris.contains(uri))
{
doNotSlowStartUris.add(uri);
}
}
subsetCache.setVersion(version);
subsetCache.setPeerClusterVersion(metadata.getPeerClusterVersion());
subsetCache.setMinClusterSubsetSize(minClusterSubsetSize);
subsetCache.getPossibleUris().put(partitionId, possibleUris.keySet());
subsetCache.getWeightedSubsets().put(partitionId, subsetMap);
}
else
{
LOG.info("Cluster subsetting enabled for service: " + serviceName);
Map<Integer, Set<URI>> servicePossibleUris = new HashMap<>();
Map<Integer, Map<URI, Double>> serviceWeightedSubset = new HashMap<>();
servicePossibleUris.put(partitionId, possibleUris.keySet());
serviceWeightedSubset.put(partitionId, subsetMap);
subsetCache = new SubsetCache(version, metadata.getPeerClusterVersion(),
minClusterSubsetSize, servicePossibleUris, serviceWeightedSubset);
_subsetCache.put(serviceName, subsetCache);
}
LOG.debug("Subset cache updated for service " + serviceName + ": " + subsetCache);
return new SubsetItem(true, true, subsetMap, doNotSlowStartUris);
}
}
} | @Test
public void testDoNotSlowStart()
{
Mockito.when(_subsettingMetadataProvider.getSubsettingMetadata(_state))
.thenReturn(new DeterministicSubsettingMetadata(0, 5, 0));
Map<URI, Double> weightedUris = createUris(20);
SubsettingState.SubsetItem subsetItem = _subsettingState.getClientsSubset(SERVICE_NAME, 4, 0,
weightedUris, 0, _state);
Map<URI, Double> weightedUris1 = createUris(40);
SubsettingState.SubsetItem subsetItem1 = _subsettingState.getClientsSubset(SERVICE_NAME, 4, 0,
weightedUris1, 1, _state);
verifyDoNotSlowStart(subsetItem1.getWeightedUriSubset(), subsetItem1.getDoNotSlowStartUris(), weightedUris);
} |
@SuppressWarnings("MethodLength")
public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header)
{
final MessageHeaderDecoder headerDecoder = decoders.header;
headerDecoder.wrap(buffer, offset);
final int schemaId = headerDecoder.schemaId();
if (schemaId != MessageHeaderDecoder.SCHEMA_ID)
{
throw new ArchiveException("expected schemaId=" + MessageHeaderDecoder.SCHEMA_ID + ", actual=" + schemaId);
}
final int templateId = headerDecoder.templateId();
switch (templateId)
{
case ConnectRequestDecoder.TEMPLATE_ID:
{
final ConnectRequestDecoder decoder = decoders.connectRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final ControlSession session = conductor.newControlSession(
image.correlationId(),
decoder.correlationId(),
decoder.responseStreamId(),
decoder.version(),
decoder.responseChannel(),
ArrayUtil.EMPTY_BYTE_ARRAY,
this);
controlSessionByIdMap.put(session.sessionId(), session);
break;
}
case CloseSessionRequestDecoder.TEMPLATE_ID:
{
final CloseSessionRequestDecoder decoder = decoders.closeSessionRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final ControlSession session = controlSessionByIdMap.get(controlSessionId);
if (null != session)
{
session.abort();
}
break;
}
case StartRecordingRequestDecoder.TEMPLATE_ID:
{
final StartRecordingRequestDecoder decoder = decoders.startRecordingRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStartRecording(
correlationId,
decoder.streamId(),
decoder.sourceLocation(),
false,
decoder.channel());
}
break;
}
case StopRecordingRequestDecoder.TEMPLATE_ID:
{
final StopRecordingRequestDecoder decoder = decoders.stopRecordingRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStopRecording(correlationId, decoder.streamId(), decoder.channel());
}
break;
}
case ReplayRequestDecoder.TEMPLATE_ID:
{
final ReplayRequestDecoder decoder = decoders.replayRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final int fileIoMaxLength = FILE_IO_MAX_LENGTH_VERSION <= headerDecoder.version() ?
decoder.fileIoMaxLength() : Aeron.NULL_VALUE;
final long recordingId = decoder.recordingId();
final long position = decoder.position();
final long replayLength = decoder.length();
final int replayStreamId = decoder.replayStreamId();
final long replayToken = REPLAY_TOKEN_VERSION <= headerDecoder.version() ?
decoder.replayToken() : Aeron.NULL_VALUE;
final String replayChannel = decoder.replayChannel();
final ChannelUri channelUri = ChannelUri.parse(replayChannel);
final ControlSession controlSession = setupSessionAndChannelForReplay(
channelUri, replayToken, recordingId, correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStartReplay(
correlationId,
recordingId,
position,
replayLength,
fileIoMaxLength,
replayStreamId,
channelUri.toString());
}
break;
}
case StopReplayRequestDecoder.TEMPLATE_ID:
{
final StopReplayRequestDecoder decoder = decoders.stopReplayRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStopReplay(correlationId, decoder.replaySessionId());
}
break;
}
case ListRecordingsRequestDecoder.TEMPLATE_ID:
{
final ListRecordingsRequestDecoder decoder = decoders.listRecordingsRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onListRecordings(correlationId, decoder.fromRecordingId(), decoder.recordCount());
}
break;
}
case ListRecordingsForUriRequestDecoder.TEMPLATE_ID:
{
final ListRecordingsForUriRequestDecoder decoder = decoders.listRecordingsForUriRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
final int channelLength = decoder.channelLength();
final byte[] bytes = 0 == channelLength ? ArrayUtil.EMPTY_BYTE_ARRAY : new byte[channelLength];
decoder.getChannel(bytes, 0, channelLength);
controlSession.onListRecordingsForUri(
correlationId,
decoder.fromRecordingId(),
decoder.recordCount(),
decoder.streamId(),
bytes);
}
break;
}
case ListRecordingRequestDecoder.TEMPLATE_ID:
{
final ListRecordingRequestDecoder decoder = decoders.listRecordingRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onListRecording(correlationId, decoder.recordingId());
}
break;
}
case ExtendRecordingRequestDecoder.TEMPLATE_ID:
{
final ExtendRecordingRequestDecoder decoder = decoders.extendRecordingRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onExtendRecording(
correlationId,
decoder.recordingId(),
decoder.streamId(),
decoder.sourceLocation(),
false,
decoder.channel());
}
break;
}
case RecordingPositionRequestDecoder.TEMPLATE_ID:
{
final RecordingPositionRequestDecoder decoder = decoders.recordingPositionRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onGetRecordingPosition(correlationId, decoder.recordingId());
}
break;
}
case TruncateRecordingRequestDecoder.TEMPLATE_ID:
{
final TruncateRecordingRequestDecoder decoder = decoders.truncateRecordingRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onTruncateRecording(correlationId, decoder.recordingId(), decoder.position());
}
break;
}
case StopRecordingSubscriptionRequestDecoder.TEMPLATE_ID:
{
final StopRecordingSubscriptionRequestDecoder decoder = decoders.stopRecordingSubscriptionRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStopRecordingSubscription(correlationId, decoder.subscriptionId());
}
break;
}
case StopPositionRequestDecoder.TEMPLATE_ID:
{
final StopPositionRequestDecoder decoder = decoders.stopPositionRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onGetStopPosition(correlationId, decoder.recordingId());
}
break;
}
case FindLastMatchingRecordingRequestDecoder.TEMPLATE_ID:
{
final FindLastMatchingRecordingRequestDecoder decoder = decoders.findLastMatchingRecordingRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
final int channelLength = decoder.channelLength();
final byte[] bytes = 0 == channelLength ? ArrayUtil.EMPTY_BYTE_ARRAY : new byte[channelLength];
decoder.getChannel(bytes, 0, channelLength);
controlSession.onFindLastMatchingRecording(
correlationId,
decoder.minRecordingId(),
decoder.sessionId(),
decoder.streamId(),
bytes);
}
break;
}
case ListRecordingSubscriptionsRequestDecoder.TEMPLATE_ID:
{
final ListRecordingSubscriptionsRequestDecoder decoder = decoders.listRecordingSubscriptionsRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onListRecordingSubscriptions(
correlationId,
decoder.pseudoIndex(),
decoder.subscriptionCount(),
decoder.applyStreamId() == BooleanType.TRUE,
decoder.streamId(),
decoder.channel());
}
break;
}
case BoundedReplayRequestDecoder.TEMPLATE_ID:
{
final BoundedReplayRequestDecoder decoder = decoders.boundedReplayRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final long position = decoder.position();
final long replayLength = decoder.length();
final long recordingId = decoder.recordingId();
final int limitCounterId = decoder.limitCounterId();
final int replayStreamId = decoder.replayStreamId();
final int fileIoMaxLength = FILE_IO_MAX_LENGTH_VERSION <= headerDecoder.version() ?
decoder.fileIoMaxLength() : Aeron.NULL_VALUE;
final long replayToken = REPLAY_TOKEN_VERSION <= headerDecoder.version() ?
decoder.replayToken() : Aeron.NULL_VALUE;
final String replayChannel = decoder.replayChannel();
final ChannelUri channelUri = ChannelUri.parse(replayChannel);
final ControlSession controlSession = setupSessionAndChannelForReplay(
channelUri, replayToken, recordingId, correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStartBoundedReplay(
correlationId,
recordingId,
position,
replayLength,
limitCounterId,
fileIoMaxLength,
replayStreamId,
channelUri.toString());
}
break;
}
case StopAllReplaysRequestDecoder.TEMPLATE_ID:
{
final StopAllReplaysRequestDecoder decoder = decoders.stopAllReplaysRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStopAllReplays(correlationId, decoder.recordingId());
}
break;
}
case ReplicateRequestDecoder.TEMPLATE_ID:
{
final ReplicateRequestDecoder decoder = decoders.replicateRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onReplicate(
correlationId,
decoder.srcRecordingId(),
decoder.dstRecordingId(),
AeronArchive.NULL_POSITION,
Aeron.NULL_VALUE,
Aeron.NULL_VALUE,
decoder.srcControlStreamId(),
Aeron.NULL_VALUE,
Aeron.NULL_VALUE,
decoder.srcControlChannel(),
decoder.liveDestination(),
"",
NullCredentialsSupplier.NULL_CREDENTIAL,
"");
}
break;
}
case StopReplicationRequestDecoder.TEMPLATE_ID:
{
final StopReplicationRequestDecoder decoder = decoders.stopReplicationRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStopReplication(correlationId, decoder.replicationId());
}
break;
}
case StartPositionRequestDecoder.TEMPLATE_ID:
{
final StartPositionRequestDecoder decoder = decoders.startPositionRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onGetStartPosition(correlationId, decoder.recordingId());
}
break;
}
case DetachSegmentsRequestDecoder.TEMPLATE_ID:
{
final DetachSegmentsRequestDecoder decoder = decoders.detachSegmentsRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onDetachSegments(correlationId, decoder.recordingId(), decoder.newStartPosition());
}
break;
}
case DeleteDetachedSegmentsRequestDecoder.TEMPLATE_ID:
{
final DeleteDetachedSegmentsRequestDecoder decoder = decoders.deleteDetachedSegmentsRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onDeleteDetachedSegments(correlationId, decoder.recordingId());
}
break;
}
case PurgeSegmentsRequestDecoder.TEMPLATE_ID:
{
final PurgeSegmentsRequestDecoder decoder = decoders.purgeSegmentsRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onPurgeSegments(correlationId, decoder.recordingId(), decoder.newStartPosition());
}
break;
}
case AttachSegmentsRequestDecoder.TEMPLATE_ID:
{
final AttachSegmentsRequestDecoder decoder = decoders.attachSegmentsRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onAttachSegments(correlationId, decoder.recordingId());
}
break;
}
case MigrateSegmentsRequestDecoder.TEMPLATE_ID:
{
final MigrateSegmentsRequestDecoder decoder = decoders.migrateSegmentsRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onMigrateSegments(correlationId, decoder.srcRecordingId(), decoder.dstRecordingId());
}
break;
}
case AuthConnectRequestDecoder.TEMPLATE_ID:
{
final AuthConnectRequestDecoder decoder = decoders.authConnectRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final String responseChannel = decoder.responseChannel();
final int credentialsLength = decoder.encodedCredentialsLength();
final byte[] credentials;
if (credentialsLength > 0)
{
credentials = new byte[credentialsLength];
decoder.getEncodedCredentials(credentials, 0, credentialsLength);
}
else
{
credentials = ArrayUtil.EMPTY_BYTE_ARRAY;
}
final ControlSession session = conductor.newControlSession(
image.correlationId(),
decoder.correlationId(),
decoder.responseStreamId(),
decoder.version(),
responseChannel,
credentials,
this);
controlSessionByIdMap.put(session.sessionId(), session);
break;
}
case ChallengeResponseDecoder.TEMPLATE_ID:
{
final ChallengeResponseDecoder decoder = decoders.challengeResponse;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final ControlSession session = controlSessionByIdMap.get(controlSessionId);
if (null != session)
{
final int credentialsLength = decoder.encodedCredentialsLength();
final byte[] credentials;
if (credentialsLength > 0)
{
credentials = new byte[credentialsLength];
decoder.getEncodedCredentials(credentials, 0, credentialsLength);
}
else
{
credentials = ArrayUtil.EMPTY_BYTE_ARRAY;
}
session.onChallengeResponse(decoder.correlationId(), credentials);
}
break;
}
case KeepAliveRequestDecoder.TEMPLATE_ID:
{
final KeepAliveRequestDecoder decoder = decoders.keepAliveRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onKeepAlive(correlationId);
}
break;
}
case TaggedReplicateRequestDecoder.TEMPLATE_ID:
{
final TaggedReplicateRequestDecoder decoder = decoders.taggedReplicateRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onReplicate(
correlationId,
decoder.srcRecordingId(),
decoder.dstRecordingId(),
AeronArchive.NULL_POSITION,
decoder.channelTagId(),
decoder.subscriptionTagId(),
decoder.srcControlStreamId(),
Aeron.NULL_VALUE,
Aeron.NULL_VALUE,
decoder.srcControlChannel(),
decoder.liveDestination(),
"",
NullCredentialsSupplier.NULL_CREDENTIAL,
"");
}
break;
}
case StartRecordingRequest2Decoder.TEMPLATE_ID:
{
final StartRecordingRequest2Decoder decoder = decoders.startRecordingRequest2;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStartRecording(
correlationId,
decoder.streamId(),
decoder.sourceLocation(),
decoder.autoStop() == BooleanType.TRUE,
decoder.channel());
}
break;
}
case ExtendRecordingRequest2Decoder.TEMPLATE_ID:
{
final ExtendRecordingRequest2Decoder decoder = decoders.extendRecordingRequest2;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onExtendRecording(
correlationId,
decoder.recordingId(),
decoder.streamId(),
decoder.sourceLocation(),
decoder.autoStop() == BooleanType.TRUE,
decoder.channel());
}
break;
}
case StopRecordingByIdentityRequestDecoder.TEMPLATE_ID:
{
final StopRecordingByIdentityRequestDecoder decoder = decoders.stopRecordingByIdentityRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onStopRecordingByIdentity(correlationId, decoder.recordingId());
}
break;
}
case ReplicateRequest2Decoder.TEMPLATE_ID:
{
final ReplicateRequest2Decoder decoder = decoders.replicateRequest2;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
final int fileIoMaxLength = FILE_IO_MAX_LENGTH_VERSION <= headerDecoder.version() ?
decoder.fileIoMaxLength() : Aeron.NULL_VALUE;
final int sessionId = SESSION_ID_VERSION <= headerDecoder.version() ?
decoder.replicationSessionId() : Aeron.NULL_VALUE;
final String srcControlChannel = decoder.srcControlChannel();
final String liveDestination = decoder.liveDestination();
final String replicationChannel = decoder.replicationChannel();
final byte[] encodedCredentials;
if (ENCODED_CREDENTIALS_VERSION <= headerDecoder.version())
{
encodedCredentials = new byte[decoder.encodedCredentialsLength()];
decoder.getEncodedCredentials(encodedCredentials, 0, decoder.encodedCredentialsLength());
}
else
{
encodedCredentials = NullCredentialsSupplier.NULL_CREDENTIAL;
}
final String srcResponseChannel = decoder.srcResponseChannel();
if (null != controlSession)
{
controlSession.onReplicate(
correlationId,
decoder.srcRecordingId(),
decoder.dstRecordingId(),
decoder.stopPosition(),
decoder.channelTagId(),
decoder.subscriptionTagId(),
decoder.srcControlStreamId(),
fileIoMaxLength,
sessionId,
srcControlChannel,
liveDestination,
replicationChannel,
encodedCredentials,
srcResponseChannel
);
}
break;
}
case PurgeRecordingRequestDecoder.TEMPLATE_ID:
{
final PurgeRecordingRequestDecoder decoder = decoders.purgeRecordingRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onPurgeRecording(correlationId, decoder.recordingId());
}
break;
}
case MaxRecordedPositionRequestDecoder.TEMPLATE_ID:
{
final MaxRecordedPositionRequestDecoder decoder = decoders.maxRecordedPositionRequest;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onGetMaxRecordedPosition(correlationId, decoder.recordingId());
}
break;
}
case ArchiveIdRequestDecoder.TEMPLATE_ID:
{
final ArchiveIdRequestDecoder decoder = decoders.archiveIdRequestDecoder;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
controlSession.onArchiveId(correlationId);
}
break;
}
case ReplayTokenRequestDecoder.TEMPLATE_ID:
{
final ReplayTokenRequestDecoder decoder = decoders.replayTokenRequestDecoder;
decoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final long controlSessionId = decoder.controlSessionId();
final long correlationId = decoder.correlationId();
final long recordingId = decoder.recordingId();
final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId);
if (null != controlSession)
{
final long replayToken = conductor.generateReplayToken(controlSession, recordingId);
controlSession.sendResponse(
correlationId, replayToken, ControlResponseCode.OK, "", conductor.controlResponseProxy());
}
}
}
} | @Test
void shouldHandleReplicationRequest2()
{
final ControlSessionDemuxer controlSessionDemuxer = new ControlSessionDemuxer(
new ControlRequestDecoders(), mockImage, mockConductor, mockAuthorisationService);
setupControlSession(controlSessionDemuxer, CONTROL_SESSION_ID);
final ExpandableArrayBuffer buffer = new ExpandableArrayBuffer();
final MessageHeaderEncoder headerEncoder = new MessageHeaderEncoder();
final ReplicateRequest2Encoder replicateRequest2Encoder = new ReplicateRequest2Encoder();
replicateRequest2Encoder.wrapAndApplyHeader(buffer, 0, headerEncoder);
final byte[] encodedCredentials = "some password".getBytes(StandardCharsets.US_ASCII);
replicateRequest2Encoder
.controlSessionId(928374L)
.correlationId(9382475L)
.srcRecordingId(1234234L)
.dstRecordingId(2532453245L)
.stopPosition(2315345L)
.channelTagId(234L)
.subscriptionTagId(235L)
.srcControlStreamId(982374)
.fileIoMaxLength(4096)
.srcControlChannel("src")
.liveDestination("live")
.replicationChannel("replication")
.putEncodedCredentials(encodedCredentials, 0, encodedCredentials.length)
.srcResponseChannel("response");
final int replicateRequestLength = replicateRequest2Encoder.encodedLength();
controlSessionDemuxer.onFragment(buffer, 0, replicateRequestLength, mockHeader);
final ReplicateRequest2Decoder expected = new ReplicateRequest2Decoder()
.wrapAndApplyHeader(buffer, 0, new MessageHeaderDecoder());
verify(mockSession).onReplicate(
expected.correlationId(),
expected.srcRecordingId(),
expected.dstRecordingId(),
expected.stopPosition(),
expected.channelTagId(),
expected.subscriptionTagId(),
expected.srcControlStreamId(),
expected.fileIoMaxLength(),
expected.replicationSessionId(),
expected.srcControlChannel(),
expected.liveDestination(),
expected.replicationChannel(),
encodedCredentials(expected),
expected.srcResponseChannel());
} |
public static DataType getDataType(final List<Field<?>> fields,
final String fieldName) {
return fields.stream()
.filter(fld -> Objects.equals(fieldName,fld.getName()))
.map(Field::getDataType)
.findFirst()
.orElseThrow(() -> new KiePMMLInternalException(String.format("Failed to find DataType for " +
"field %s",
fieldName)));
} | @Test
void getDataTypeFromDerivedFieldsAndDataDictionary() {
final DataDictionary dataDictionary = new DataDictionary();
IntStream.range(0, 3).forEach(i -> {
final DataField dataField = getRandomDataField();
dataDictionary.addDataFields(dataField);
});
final List<DerivedField> derivedFields = dataDictionary.getDataFields()
.stream()
.map(dataField -> {
DerivedField toReturn = new DerivedField();
toReturn.setName("DER_" +dataField.getName());
DataType dataType = getRandomDataType();
while (dataType.equals(dataField.getDataType())) {
dataType = getRandomDataType();
}
toReturn.setDataType(dataType);
return toReturn;
})
.collect(Collectors.toList());
final List<Field<?>> fields = new ArrayList<>();
dataDictionary.getDataFields().stream()
.map(Field.class::cast)
.forEach(fields::add);
derivedFields.stream()
.map(Field.class::cast)
.forEach(fields::add);
dataDictionary.getDataFields().forEach(dataField -> {
String fieldName =dataField.getName();
DataType retrieved = org.kie.pmml.compiler.api.utils.ModelUtils.getDataType(fields, fieldName);
assertThat(retrieved).isNotNull();
DataType expected = dataField.getDataType();
assertThat(retrieved).isEqualTo(expected);
});
derivedFields.forEach(derivedField -> {
String fieldName =derivedField.getName();
DataType retrieved = org.kie.pmml.compiler.api.utils.ModelUtils.getDataType(fields, fieldName);
assertThat(retrieved).isNotNull();
DataType expected = derivedField.getDataType();
assertThat(retrieved).isEqualTo(expected);
});
} |
@Override
public int compare(String version1, String version2) {
if(ObjectUtil.equal(version1, version2)) {
return 0;
}
if (version1 == null && version2 == null) {
return 0;
} else if (version1 == null) {// null或""视为最小版本,排在前
return -1;
} else if (version2 == null) {
return 1;
}
return CompareUtil.compare(Version.of(version1), Version.of(version2));
} | @Test
public void startWithNoneNumberTest() {
final int compare = VersionComparator.INSTANCE.compare("V1", "A1");
assertTrue(compare > 0);
} |
public long getNumConsumingSegmentsQueried() {
return _brokerResponse.has(NUM_CONSUMING_SEGMENTS_QUERIED) ? _brokerResponse.get(NUM_CONSUMING_SEGMENTS_QUERIED)
.asLong() : -1L;
} | @Test
public void testGetNumConsumingSegmentsQueried() {
// Run the test
final long result = _executionStatsUnderTest.getNumConsumingSegmentsQueried();
// Verify the results
assertEquals(10L, result);
} |
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public SimpleVersionedSerializer<CommitRecoverable> getCommitRecoverableSerializer() {
return (SimpleVersionedSerializer) GSCommitRecoverableSerializer.INSTANCE;
} | @Test
public void testGetCommitRecoverableSerializer() {
Object serializer = writer.getCommitRecoverableSerializer();
assertEquals(GSCommitRecoverableSerializer.class, serializer.getClass());
} |
Optional<PriorityAndResource> getPriorityAndResource(
final TaskExecutorProcessSpec taskExecutorProcessSpec) {
tryAdaptAndAddTaskExecutorResourceSpecIfNotExist(taskExecutorProcessSpec);
return Optional.ofNullable(
taskExecutorProcessSpecToPriorityAndResource.get(taskExecutorProcessSpec));
} | @Test
void testExternalResourceFailExceedMax() {
assumeThat(isExternalResourceSupported()).isTrue();
assertThatThrownBy(
() ->
getAdapterWithExternalResources(
SUPPORTED_EXTERNAL_RESOURCE_NAME,
SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
.getPriorityAndResource(
TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX))
.isInstanceOf(IllegalStateException.class);
} |
@Override
public void parse(InputStream stream, ContentHandler handler, Metadata metadata,
ParseContext context) throws IOException, SAXException, TikaException {
SQLite3DBParser p = new SQLite3DBParser();
p.parse(stream, handler, metadata, context);
} | @Test
public void testSpacesInBodyContentHandler() throws Exception {
Metadata metadata = new Metadata();
metadata.set(TikaCoreProperties.RESOURCE_NAME_KEY, TEST_FILE_NAME);
ContentHandler handler = new BodyContentHandler(-1);
ParseContext ctx = new ParseContext();
try (InputStream stream = getResourceAsStream(TEST_FILE1)) {
TikaTest.AUTO_DETECT_PARSER.parse(stream, handler, metadata, ctx);
}
String s = handler.toString();
TikaTest.assertContains("0\t2.3\t2.4\tlorem", s);
TikaTest.assertContains("tempor\n", s);
assertEquals("0", metadata.get(SQLite3Parser.SQLITE_APPLICATION_ID));
assertEquals("0", metadata.get(SQLite3Parser.SQLITE_USER_VERSION));
} |
public String validate(String password) {
return validators.stream()
.map(validator -> validator.validate(password))
.filter(Optional::isPresent).map(Optional::get)
.reduce("", (partialString, element) -> (partialString.isEmpty() ? "" : partialString + ", ") + element);
} | @Test
public void testPolicyCombinedOutput() {
String specialCharacterErrorMessage = "must contain at least one special character";
String upperCaseErrorMessage = "must contain at least one upper case";
String output = passwordValidator.validate("password123");
Assert.assertTrue(output.contains(specialCharacterErrorMessage) && output.contains(upperCaseErrorMessage));
} |
@Override
public String originalArgument() {
return value;
} | @Test
public void shouldReturnStringValueForCommandLine() {
assertThat(argument.originalArgument(), is("test"));
} |
public String getStringHeader(Message in, String header, String defaultValue) {
String headerValue = in.getHeader(header, String.class);
return ObjectHelper.isNotEmpty(headerValue) ? headerValue : defaultValue;
} | @Test
public void testGetStringHeaderWithWhiteSpaces() {
when(in.getHeader(HEADER_METRIC_NAME, String.class)).thenReturn(" ");
assertThat(okProducer.getStringHeader(in, HEADER_METRIC_NAME, "value"), is("value"));
inOrder.verify(in, times(1)).getHeader(HEADER_METRIC_NAME, String.class);
inOrder.verifyNoMoreInteractions();
} |
public E set(int index, E value) {
if (index >= mSize || index + mSize < 0) {
throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mSize);
}
return mElements.set(index < 0 ? index + mSize : index, value);
} | @Test
void testIllegalSetNegative() throws Exception {
Assertions.assertThrows(IndexOutOfBoundsException.class, () -> {
Stack<String> stack = new Stack<String>();
stack.set(-1, "illegal");
});
} |
@Override
public void execute() {
PutItemResponse result = ddbClient.putItem(PutItemRequest.builder().tableName(determineTableName())
.item(determineItem()).expected(determineUpdateCondition())
.returnValues(determineReturnValues()).build());
addAttributesToResult(result.attributes());
} | @Test
public void execute() {
Map<String, AttributeValue> attributeMap = new HashMap<>();
AttributeValue attributeValue = AttributeValue.builder().s("test value").build();
attributeMap.put("name", attributeValue);
exchange.getIn().setHeader(Ddb2Constants.ITEM, attributeMap);
Map<String, ExpectedAttributeValue> expectedAttributeValueMap = new HashMap<>();
expectedAttributeValueMap.put("name", ExpectedAttributeValue.builder().attributeValueList(attributeValue).build());
exchange.getIn().setHeader(Ddb2Constants.UPDATE_CONDITION, expectedAttributeValueMap);
command.execute();
assertEquals("DOMAIN1", ddbClient.putItemRequest.tableName());
assertEquals(attributeMap, ddbClient.putItemRequest.item());
assertEquals(expectedAttributeValueMap, ddbClient.putItemRequest.expected());
assertEquals(AttributeValue.builder().s("attrValue").build(),
exchange.getIn().getHeader(Ddb2Constants.ATTRIBUTES, Map.class).get("attrName"));
} |
@Udf(schema = "ARRAY<STRUCT<K STRING, V BIGINT>>")
public List<Struct> entriesBigInt(
@UdfParameter(description = "The map to create entries from") final Map<String, Long> map,
@UdfParameter(description = "If true then the resulting entries are sorted by key")
final boolean sorted
) {
return entries(map, BIGINT_STRUCT_SCHEMA, sorted);
} | @Test
public void shouldReturnNullListForNullMapBigInt() {
assertNull(entriesUdf.entriesBigInt(null, false));
} |
@Override
public boolean deletePlugin(String pluginId) {
if (currentPluginId.equals(pluginId)) {
return original.deletePlugin(pluginId);
} else {
throw new IllegalAccessError(PLUGIN_PREFIX + currentPluginId + " tried to execute deletePlugin for foreign pluginId!");
}
} | @Test
public void deletePlugin() {
pluginManager.loadPlugins();
assertThrows(IllegalAccessError.class, () -> wrappedPluginManager.deletePlugin(OTHER_PLUGIN_ID));
assertTrue(wrappedPluginManager.deletePlugin(THIS_PLUGIN_ID));
} |
@SuppressWarnings("SameParameterValue")
protected final String formatSqlMaybeWithParam(String sqlStr, Object... params) {
if (StringUtils.isBlank(sqlStr)) {
return null;
}
if (ArrayUtils.isNotEmpty(params)) {
for (int i = 0; i < params.length; ++i) {
String target = Constants.LEFT_BRACE + i + Constants.RIGHT_BRACE;
if (sqlStr.contains(target)) {
sqlStr = sqlStr.replace(target, formatParam(null, params[i]));
} else {
Matcher matcher = Pattern.compile("[{]" + i + ",[a-zA-Z0-9.,=]+}").matcher(sqlStr);
if (!matcher.find()) {
throw ExceptionUtils.mpe("Please check the syntax correctness! sql not contains: \"%s\"", target);
}
String group = matcher.group();
sqlStr = sqlStr.replace(group, formatParam(group.substring(target.length(), group.length() - 1), params[i]));
}
}
}
return sqlStr;
} | @Test
void formatSqlMaybeWithParam() {
QueryWrapper<Object> wrapper = new QueryWrapper<>();
String s = wrapper.formatSqlMaybeWithParam("c={0}", 1);
assertThat(s).isEqualTo("c=#{ew.paramNameValuePairs.MPGENVAL1}");
s = wrapper.formatSqlMaybeWithParam("c={0,javaType=int}", 1);
assertThat(s).isEqualTo("c=#{ew.paramNameValuePairs.MPGENVAL2,javaType=int}");
s = wrapper.formatSqlMaybeWithParam("c={0,javaType=int} and b={1,jdbcType=NUMERIC} pp", 1, 2);
assertThat(s).isEqualTo("c=#{ew.paramNameValuePairs.MPGENVAL3,javaType=int} " +
"and b=#{ew.paramNameValuePairs.MPGENVAL4,jdbcType=NUMERIC} pp");
s = wrapper.formatSqlMaybeWithParam("c={0,javaType=int,jdbcType=NUMERIC,typeHandler=xxx.xxx.MyTypeHandler} pp", 1);
assertThat(s).isEqualTo("c=#{ew.paramNameValuePairs.MPGENVAL5,javaType=int,jdbcType=NUMERIC,typeHandler=xxx.xxx.MyTypeHandler} pp");
Exception ex = null;
try {
wrapper.formatSqlMaybeWithParam("c={1} pp", 1);
} catch (Exception e) {
ex = e;
}
assertThat(ex).isNotNull();
try {
wrapper.formatSqlMaybeWithParam("c={1}", 1);
} catch (Exception e) {
ex = e;
}
assertThat(ex).isNotNull();
System.out.println(ex.getMessage());
} |
@PUT
@Path("/domain")
@Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8
/* , MediaType.APPLICATION_XML */})
public TimelinePutResponse putDomain(
@Context HttpServletRequest req,
@Context HttpServletResponse res,
TimelineDomain domain) {
init(res);
UserGroupInformation callerUGI = getUser(req);
if (callerUGI == null) {
String msg = "The owner of the posted timeline domain is not set";
LOG.error(msg);
throw new ForbiddenException(msg);
}
domain.setOwner(callerUGI.getShortUserName());
try {
timelineDataManager.putDomain(domain, callerUGI);
} catch (YarnException e) {
// The user doesn't have the access to override the existing domain.
LOG.error(e.getMessage(), e);
throw new ForbiddenException(e);
} catch (RuntimeException e) {
LOG.error("Error putting domain", e);
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
} catch (IOException e) {
LOG.error("Error putting domain", e);
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
}
return new TimelinePutResponse();
} | @Test
void testPutDomain() throws Exception {
TimelineDomain domain = new TimelineDomain();
domain.setId("test_domain_id");
WebResource r = resource();
// No owner, will be rejected
ClientResponse response = r.path("ws").path("v1")
.path("timeline").path("domain")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, domain);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
assertResponseStatusCode(Status.FORBIDDEN, response.getStatusInfo());
response = r.path("ws").path("v1")
.path("timeline").path("domain")
.queryParam("user.name", "tester")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, domain);
assertResponseStatusCode(Status.OK, response.getStatusInfo());
// Verify the domain exists
response = r.path("ws").path("v1").path("timeline")
.path("domain").path("test_domain_id")
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
domain = response.getEntity(TimelineDomain.class);
assertNotNull(domain);
assertEquals("test_domain_id", domain.getId());
assertEquals("tester", domain.getOwner());
assertNull(domain.getDescription());
// Update the domain
domain.setDescription("test_description");
response = r.path("ws").path("v1")
.path("timeline").path("domain")
.queryParam("user.name", "tester")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, domain);
assertResponseStatusCode(Status.OK, response.getStatusInfo());
// Verify the domain is updated
response = r.path("ws").path("v1").path("timeline")
.path("domain").path("test_domain_id")
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
domain = response.getEntity(TimelineDomain.class);
assertNotNull(domain);
assertEquals("test_domain_id", domain.getId());
assertEquals("test_description", domain.getDescription());
} |
@VisibleForTesting
ExportResult<MusicContainerResource> exportPlaylists(
TokensAndUrlAuthData authData, Optional<PaginationData> paginationData, UUID jobId)
throws IOException, InvalidTokenException, PermissionDeniedException {
Optional<String> paginationToken = Optional.empty();
String pageTokenPrefix = "";
if (paginationData.isPresent()) {
String token = ((StringPaginationToken) paginationData.get()).getToken();
Preconditions.checkArgument(
token.startsWith(PLAYLIST_TOKEN_PREFIX), "Invalid pagination token %s", token);
pageTokenPrefix = token.substring(0, getTokenPrefixLength(token));
if (getTokenPrefixLength(token) < token.length()) {
paginationToken = Optional.of(token.substring(getTokenPrefixLength(token)));
}
}
PlaylistExportResponse playlistExportResponse =
getOrCreateMusicHttpApi(authData).exportPlaylists(paginationToken);
PaginationData nextPageData;
String token = playlistExportResponse.getNextPageToken();
List<MusicPlaylist> playlists = new ArrayList<>();
GooglePlaylist[] googlePlaylists = playlistExportResponse.getPlaylists();
ResultType resultType = ResultType.END;
if (Strings.isNullOrEmpty(token)) {
nextPageData =
new StringPaginationToken(pageTokenPrefix.substring(PLAYLIST_TOKEN_PREFIX.length()));
} else {
nextPageData = new StringPaginationToken(pageTokenPrefix + token);
resultType = ResultType.CONTINUE;
}
ContinuationData continuationData = new ContinuationData(nextPageData);
if (googlePlaylists != null && googlePlaylists.length > 0) {
for (GooglePlaylist googlePlaylist : googlePlaylists) {
Instant createTime = googlePlaylist.getCreateTime() == null ? null
: Instant.parse(googlePlaylist.getCreateTime());
Instant updateTime = googlePlaylist.getUpdateTime() == null ? null
: Instant.parse(googlePlaylist.getUpdateTime());
MusicPlaylist musicPlaylist =
new MusicPlaylist(
googlePlaylist.getName().substring(GOOGLE_PLAYLIST_NAME_PREFIX.length()),
googlePlaylist.getTitle(),
googlePlaylist.getDescription(),
createTime, updateTime);
playlists.add(musicPlaylist);
monitor.debug(
() ->
String.format(
"%s: Google Music exporting playlist: %s", jobId, musicPlaylist.getId()));
// Add playlist id to continuation data
continuationData.addContainerResource(new IdOnlyContainerResource(musicPlaylist.getId()));
}
}
MusicContainerResource containerResource =
new MusicContainerResource(playlists, null, null, null);
return new ExportResult<>(resultType, containerResource, continuationData);
} | @Test
public void exportPlaylistSubsequentSet()
throws IOException, InvalidTokenException, PermissionDeniedException {
setUpSinglePlaylist(GOOGLE_PLAYLIST_NAME_PREFIX + "p1_id");
when(playlistExportResponse.getNextPageToken()).thenReturn(null);
StringPaginationToken inputPaginationToken =
new StringPaginationToken(PLAYLIST_TOKEN_PREFIX + PLAYLIST_PAGE_TOKEN);
// Run test
ExportResult<MusicContainerResource> result =
googleMusicExporter.exportPlaylists(null, Optional.of(inputPaginationToken), uuid);
// Check results
// Verify correct methods were called
verify(musicHttpApi).exportPlaylists(Optional.of(PLAYLIST_PAGE_TOKEN));
verify(playlistExportResponse).getPlaylists();
// Check pagination token - should be absent
ContinuationData continuationData = result.getContinuationData();
StringPaginationToken paginationData =
(StringPaginationToken) continuationData.getPaginationData();
assertThat(paginationData.getToken()).isEmpty();
} |
public static Containerizer from(
CommonCliOptions commonCliOptions, ConsoleLogger logger, CacheDirectories cacheDirectories)
throws InvalidImageReferenceException, FileNotFoundException {
Containerizer containerizer = create(commonCliOptions, logger);
applyHandlers(containerizer, logger);
applyConfiguration(containerizer, commonCliOptions, cacheDirectories);
return containerizer;
} | @Test
public void testFrom_dockerDaemonImage()
throws InvalidImageReferenceException, FileNotFoundException {
CommonCliOptions commonCliOptions =
CommandLine.populateCommand(
new CommonCliOptions(), "-t", "docker://gcr.io/test/test-image-ref");
ContainerizerTestProxy containerizer =
new ContainerizerTestProxy(
Containerizers.from(commonCliOptions, consoleLogger, cacheDirectories));
assertThat(containerizer.getDescription()).isEqualTo("Building image to Docker daemon");
ImageConfiguration config = containerizer.getImageConfiguration();
assertThat(config.getCredentialRetrievers()).isEmpty();
assertThat(config.getDockerClient()).isEmpty();
assertThat(config.getImage().toString()).isEqualTo("gcr.io/test/test-image-ref");
assertThat(config.getTarPath()).isEmpty();
} |
@Override
public void close() throws UnavailableException {
// JournalContext is closed before block deletion context so that file system master changes
// are written before block master changes. If a failure occurs between deleting an inode and
// remove its blocks, it's better to have an orphaned block than an inode with a missing block.
closeQuietly(mJournalContext);
closeQuietly(mBlockDeletionContext);
if (mThrown != null) {
Throwables.propagateIfPossible(mThrown, UnavailableException.class);
throw new RuntimeException(mThrown);
}
} | @Test
public void throwTwoRuntimeExceptions() throws Throwable {
Exception bdcException = new IllegalStateException("block deletion context exception");
Exception jcException = new IllegalArgumentException("journal context exception");
doThrow(bdcException).when(mMockBDC).close();
doThrow(jcException).when(mMockJC).close();
try {
mRpcContext.close();
fail("Expected an exception to be thrown");
} catch (RuntimeException e) {
assertEquals(jcException, e);
// journal context is closed first, so the block deletion context exception should be
// suppressed.
assertEquals(bdcException, e.getSuppressed()[0]);
}
} |
@Override
public boolean tryLock() {
return get(tryLockAsync());
} | @Test
public void testReentrancy() throws InterruptedException {
Lock lock = redisson.getLock("lock1");
Assertions.assertTrue(lock.tryLock());
Assertions.assertTrue(lock.tryLock());
lock.unlock();
// next row for test renew expiration tisk.
//Thread.currentThread().sleep(TimeUnit.SECONDS.toMillis(RedissonLock.LOCK_EXPIRATION_INTERVAL_SECONDS*2));
Thread thread1 = new Thread() {
@Override
public void run() {
RLock lock1 = redisson.getLock("lock1");
Assertions.assertFalse(lock1.tryLock());
}
};
thread1.start();
thread1.join();
lock.unlock();
} |
@Override
public <R> List<R> queryMany(String sql, Object[] args, RowMapper<R> mapper) {
return queryMany(jdbcTemplate, sql, args, mapper);
} | @Test
void testQueryMany5() {
String sql = "SELECT data_id FROM config_info WHERE id >= ? AND id <= ?";
Object[] args = new Object[] {1, 2};
String dataId1 = "test1";
String dataId2 = "test2";
List<String> resultList = new ArrayList<>();
resultList.add(dataId1);
resultList.add(dataId2);
Class clazz = dataId1.getClass();
when(operate.queryMany(jdbcTemplate, sql, args, clazz)).thenReturn(resultList);
assertEquals(operate.queryMany(jdbcTemplate, sql, args, clazz), resultList);
} |
private static String approximateSimpleName(Class<?> clazz, boolean dropOuterClassNames) {
checkArgument(!clazz.isAnonymousClass(), "Attempted to get simple name of anonymous class");
return approximateSimpleName(clazz.getName(), dropOuterClassNames);
} | @Test
public void testApproximateSimpleNameOverride() {
NameOverride overriddenName = () -> "CUSTOM_NAME";
assertEquals("CUSTOM_NAME", NameUtils.approximateSimpleName(overriddenName));
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.