focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public void move(String noteId, String notePath, String newNotePath, AuthenticationInfo subject) throws IOException {
Preconditions.checkArgument(StringUtils.isNotEmpty(noteId));
BlobId sourceBlobId = makeBlobId(noteId, notePath);
BlobId destinationBlobId = makeBlobId(noteId, newNotePath);
try {
storage.get(sourceBlobId).copyTo(destinationBlobId);
} catch (Exception se) {
throw new IOException("Could not copy from " + sourceBlobId.toString() + " to " + destinationBlobId.toString() + ": " + se.getMessage(), se);
}
remove(noteId, notePath, subject);
} | @Test
void testMove_nonexistent() throws IOException {
zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL);
this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage);
assertThrows(IOException.class, () -> {
notebookRepo.move("id", "/name", "/name_new", AUTH_INFO);
});
} |
public CompletableFuture<List<SendResult>> sendMessage(ProxyContext ctx, QueueSelector queueSelector,
String producerGroup, int sysFlag, List<Message> messageList, long timeoutMillis) {
CompletableFuture<List<SendResult>> future = new CompletableFuture<>();
long beginTimestampFirst = System.currentTimeMillis();
AddressableMessageQueue messageQueue = null;
try {
Message message = messageList.get(0);
String topic = message.getTopic();
if (ConfigurationManager.getProxyConfig().isEnableTopicMessageTypeCheck()) {
if (topicMessageTypeValidator != null) {
// Do not check retry or dlq topic
if (!NamespaceUtil.isRetryTopic(topic) && !NamespaceUtil.isDLQTopic(topic)) {
TopicMessageType topicMessageType = serviceManager.getMetadataService().getTopicMessageType(ctx, topic);
TopicMessageType messageType = TopicMessageType.parseFromMessageProperty(message.getProperties());
topicMessageTypeValidator.validate(topicMessageType, messageType);
}
}
}
messageQueue = queueSelector.select(ctx,
this.serviceManager.getTopicRouteService().getCurrentMessageQueueView(ctx, topic));
if (messageQueue == null) {
throw new ProxyException(ProxyExceptionCode.FORBIDDEN, "no writable queue");
}
for (Message msg : messageList) {
MessageClientIDSetter.setUniqID(msg);
}
SendMessageRequestHeader requestHeader = buildSendMessageRequestHeader(messageList, producerGroup, sysFlag, messageQueue.getQueueId());
AddressableMessageQueue finalMessageQueue = messageQueue;
future = this.serviceManager.getMessageService().sendMessage(
ctx,
messageQueue,
messageList,
requestHeader,
timeoutMillis)
.thenApplyAsync(sendResultList -> {
for (SendResult sendResult : sendResultList) {
int tranType = MessageSysFlag.getTransactionValue(requestHeader.getSysFlag());
if (SendStatus.SEND_OK.equals(sendResult.getSendStatus()) &&
tranType == MessageSysFlag.TRANSACTION_PREPARED_TYPE &&
StringUtils.isNotBlank(sendResult.getTransactionId())) {
fillTransactionData(ctx, producerGroup, finalMessageQueue, sendResult, messageList);
}
}
return sendResultList;
}, this.executor)
.whenComplete((result, exception) -> {
long endTimestamp = System.currentTimeMillis();
if (exception != null) {
this.serviceManager.getTopicRouteService().updateFaultItem(finalMessageQueue.getBrokerName(), endTimestamp - beginTimestampFirst, true, false);
} else {
this.serviceManager.getTopicRouteService().updateFaultItem(finalMessageQueue.getBrokerName(),endTimestamp - beginTimestampFirst, false, true);
}
});
} catch (Throwable t) {
future.completeExceptionally(t);
}
return FutureUtils.addExecutor(future, this.executor);
} | @Test
public void testSendMessage() throws Throwable {
when(metadataService.getTopicMessageType(any(), eq(TOPIC))).thenReturn(TopicMessageType.NORMAL);
String txId = MessageClientIDSetter.createUniqID();
String msgId = MessageClientIDSetter.createUniqID();
long commitLogOffset = 1000L;
long queueOffset = 100L;
SendResult sendResult = new SendResult();
sendResult.setSendStatus(SendStatus.SEND_OK);
sendResult.setTransactionId(txId);
sendResult.setMsgId(msgId);
sendResult.setOffsetMsgId(createOffsetMsgId(commitLogOffset));
sendResult.setQueueOffset(queueOffset);
ArgumentCaptor<SendMessageRequestHeader> requestHeaderArgumentCaptor = ArgumentCaptor.forClass(SendMessageRequestHeader.class);
when(this.messageService.sendMessage(any(), any(), any(), requestHeaderArgumentCaptor.capture(), anyLong()))
.thenReturn(CompletableFuture.completedFuture(Lists.newArrayList(sendResult)));
List<Message> messageList = new ArrayList<>();
Message messageExt = createMessageExt(TOPIC, "tag", 0, 0);
messageList.add(messageExt);
AddressableMessageQueue messageQueue = mock(AddressableMessageQueue.class);
when(messageQueue.getBrokerName()).thenReturn("mockBroker");
ArgumentCaptor<String> brokerNameCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<Long> tranStateTableOffsetCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<Long> commitLogOffsetCaptor = ArgumentCaptor.forClass(Long.class);
when(transactionService.addTransactionDataByBrokerName(
any(),
brokerNameCaptor.capture(),
anyString(),
anyString(),
tranStateTableOffsetCaptor.capture(),
commitLogOffsetCaptor.capture(),
anyString(), any())).thenReturn(mock(TransactionData.class));
List<SendResult> sendResultList = this.producerProcessor.sendMessage(
createContext(),
(ctx, messageQueueView) -> messageQueue,
PRODUCER_GROUP,
MessageSysFlag.TRANSACTION_PREPARED_TYPE,
messageList,
3000
).get();
assertNotNull(sendResultList);
assertEquals("mockBroker", brokerNameCaptor.getValue());
assertEquals(queueOffset, tranStateTableOffsetCaptor.getValue().longValue());
assertEquals(commitLogOffset, commitLogOffsetCaptor.getValue().longValue());
SendMessageRequestHeader requestHeader = requestHeaderArgumentCaptor.getValue();
assertEquals(PRODUCER_GROUP, requestHeader.getProducerGroup());
assertEquals(TOPIC, requestHeader.getTopic());
} |
@VisibleForTesting
static StreamExecutionEnvironment createStreamExecutionEnvironment(FlinkPipelineOptions options) {
return createStreamExecutionEnvironment(
options,
MoreObjects.firstNonNull(options.getFilesToStage(), Collections.emptyList()),
options.getFlinkConfDir());
} | @Test
public void shouldFailOnUnknownStateBackend() {
FlinkPipelineOptions options = getDefaultPipelineOptions();
options.setStreaming(true);
options.setStateBackend("unknown");
options.setStateBackendStoragePath("/path");
assertThrows(
"State backend was set to 'unknown' but no storage path was provided.",
IllegalArgumentException.class,
() -> FlinkExecutionEnvironments.createStreamExecutionEnvironment(options));
} |
public static RuleDescriptionSectionContextDto of(String key, String displayName) {
return new RuleDescriptionSectionContextDto(key, displayName);
} | @Test
void equals_with_different_context_keys_should_return_false() {
RuleDescriptionSectionContextDto context1 = RuleDescriptionSectionContextDto.of(CONTEXT_KEY, CONTEXT_DISPLAY_NAME);
RuleDescriptionSectionContextDto context2 = RuleDescriptionSectionContextDto.of(CONTEXT_KEY + "2", CONTEXT_DISPLAY_NAME);
assertThat(context1).isNotEqualTo(context2);
} |
@Override
public ListenableFuture<BufferResult> get(OutputBufferId outputBufferId, long startingSequenceId, DataSize maxSize)
{
checkState(!Thread.holdsLock(this), "Can not get pages while holding a lock on this");
requireNonNull(outputBufferId, "outputBufferId is null");
checkArgument(maxSize.toBytes() > 0, "maxSize must be at least 1 byte");
return getBuffer(outputBufferId).getPages(startingSequenceId, maxSize);
} | @Test
public void testDuplicateRequests()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
// add a queue
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// acknowledge the pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// attempt to get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 0, 3);
} |
public void write(ImageWriter writer, ImageWriterOptions options) {
for (BrokerRegistration broker : brokers.values()) {
writer.write(broker.toRecord(options));
}
if (!controllers.isEmpty()) {
if (!options.metadataVersion().isControllerRegistrationSupported()) {
options.handleLoss("controller registration data");
} else {
for (ControllerRegistration controller : controllers.values()) {
writer.write(controller.toRecord(options));
}
}
}
} | @Test
public void testHandleLossOfControllerRegistrations() {
ClusterImage testImage = new ClusterImage(Collections.emptyMap(),
Collections.singletonMap(1000, new ControllerRegistration.Builder().
setId(1000).
setIncarnationId(Uuid.fromString("9ABu6HEgRuS-hjHLgC4cHw")).
setListeners(Collections.singletonMap("PLAINTEXT",
new Endpoint("PLAINTEXT", SecurityProtocol.PLAINTEXT, "localhost", 19092))).
setSupportedFeatures(Collections.emptyMap()).build()));
RecordListWriter writer = new RecordListWriter();
final AtomicReference<String> lossString = new AtomicReference<>("");
testImage.write(writer, new ImageWriterOptions.Builder().
setMetadataVersion(MetadataVersion.IBP_3_6_IV2).
setLossHandler(loss -> lossString.compareAndSet("", loss.loss())).
build());
assertEquals("controller registration data", lossString.get());
} |
private CoordinatorResult<ConsumerGroupHeartbeatResponseData, CoordinatorRecord> consumerGroupHeartbeat(
String groupId,
String memberId,
int memberEpoch,
String instanceId,
String rackId,
int rebalanceTimeoutMs,
String clientId,
String clientHost,
List<String> subscribedTopicNames,
String assignorName,
List<ConsumerGroupHeartbeatRequestData.TopicPartitions> ownedTopicPartitions
) throws ApiException {
final long currentTimeMs = time.milliseconds();
final List<CoordinatorRecord> records = new ArrayList<>();
// Get or create the consumer group.
boolean createIfNotExists = memberEpoch == 0;
final ConsumerGroup group = getOrMaybeCreateConsumerGroup(groupId, createIfNotExists, records);
throwIfConsumerGroupIsFull(group, memberId);
// Get or create the member.
if (memberId.isEmpty()) memberId = Uuid.randomUuid().toString();
final ConsumerGroupMember member;
if (instanceId == null) {
member = getOrMaybeSubscribeDynamicConsumerGroupMember(
group,
memberId,
memberEpoch,
ownedTopicPartitions,
createIfNotExists,
false
);
} else {
member = getOrMaybeSubscribeStaticConsumerGroupMember(
group,
memberId,
memberEpoch,
instanceId,
ownedTopicPartitions,
createIfNotExists,
false,
records
);
}
// 1. Create or update the member. If the member is new or has changed, a ConsumerGroupMemberMetadataValue
// record is written to the __consumer_offsets partition to persist the change. If the subscriptions have
// changed, the subscription metadata is updated and persisted by writing a ConsumerGroupPartitionMetadataValue
// record to the __consumer_offsets partition. Finally, the group epoch is bumped if the subscriptions have
// changed, and persisted by writing a ConsumerGroupMetadataValue record to the partition.
ConsumerGroupMember updatedMember = new ConsumerGroupMember.Builder(member)
.maybeUpdateInstanceId(Optional.ofNullable(instanceId))
.maybeUpdateRackId(Optional.ofNullable(rackId))
.maybeUpdateRebalanceTimeoutMs(ofSentinel(rebalanceTimeoutMs))
.maybeUpdateServerAssignorName(Optional.ofNullable(assignorName))
.maybeUpdateSubscribedTopicNames(Optional.ofNullable(subscribedTopicNames))
.setClientId(clientId)
.setClientHost(clientHost)
.setClassicMemberMetadata(null)
.build();
boolean bumpGroupEpoch = hasMemberSubscriptionChanged(
groupId,
member,
updatedMember,
records
);
int groupEpoch = group.groupEpoch();
Map<String, TopicMetadata> subscriptionMetadata = group.subscriptionMetadata();
Map<String, Integer> subscribedTopicNamesMap = group.subscribedTopicNames();
SubscriptionType subscriptionType = group.subscriptionType();
if (bumpGroupEpoch || group.hasMetadataExpired(currentTimeMs)) {
// The subscription metadata is updated in two cases:
// 1) The member has updated its subscriptions;
// 2) The refresh deadline has been reached.
subscribedTopicNamesMap = group.computeSubscribedTopicNames(member, updatedMember);
subscriptionMetadata = group.computeSubscriptionMetadata(
subscribedTopicNamesMap,
metadataImage.topics(),
metadataImage.cluster()
);
int numMembers = group.numMembers();
if (!group.hasMember(updatedMember.memberId()) && !group.hasStaticMember(updatedMember.instanceId())) {
numMembers++;
}
subscriptionType = ModernGroup.subscriptionType(
subscribedTopicNamesMap,
numMembers
);
if (!subscriptionMetadata.equals(group.subscriptionMetadata())) {
log.info("[GroupId {}] Computed new subscription metadata: {}.",
groupId, subscriptionMetadata);
bumpGroupEpoch = true;
records.add(newConsumerGroupSubscriptionMetadataRecord(groupId, subscriptionMetadata));
}
if (bumpGroupEpoch) {
groupEpoch += 1;
records.add(newConsumerGroupEpochRecord(groupId, groupEpoch));
log.info("[GroupId {}] Bumped group epoch to {}.", groupId, groupEpoch);
metrics.record(CONSUMER_GROUP_REBALANCES_SENSOR_NAME);
}
group.setMetadataRefreshDeadline(currentTimeMs + consumerGroupMetadataRefreshIntervalMs, groupEpoch);
}
// 2. Update the target assignment if the group epoch is larger than the target assignment epoch. The delta between
// the existing and the new target assignment is persisted to the partition.
final int targetAssignmentEpoch;
final Assignment targetAssignment;
if (groupEpoch > group.assignmentEpoch()) {
targetAssignment = updateTargetAssignment(
group,
groupEpoch,
member,
updatedMember,
subscriptionMetadata,
subscriptionType,
records
);
targetAssignmentEpoch = groupEpoch;
} else {
targetAssignmentEpoch = group.assignmentEpoch();
targetAssignment = group.targetAssignment(updatedMember.memberId(), updatedMember.instanceId());
}
// 3. Reconcile the member's assignment with the target assignment if the member is not
// fully reconciled yet.
updatedMember = maybeReconcile(
groupId,
updatedMember,
group::currentPartitionEpoch,
targetAssignmentEpoch,
targetAssignment,
ownedTopicPartitions,
records
);
scheduleConsumerGroupSessionTimeout(groupId, memberId);
// Prepare the response.
ConsumerGroupHeartbeatResponseData response = new ConsumerGroupHeartbeatResponseData()
.setMemberId(updatedMember.memberId())
.setMemberEpoch(updatedMember.memberEpoch())
.setHeartbeatIntervalMs(consumerGroupHeartbeatIntervalMs(groupId));
// The assignment is only provided in the following cases:
// 1. The member sent a full request. It does so when joining or rejoining the group with zero
// as the member epoch; or on any errors (e.g. timeout). We use all the non-optional fields
// (rebalanceTimeoutMs, subscribedTopicNames and ownedTopicPartitions) to detect a full request
// as those must be set in a full request.
// 2. The member's assignment has been updated.
boolean isFullRequest = memberEpoch == 0 || (rebalanceTimeoutMs != -1 && subscribedTopicNames != null && ownedTopicPartitions != null);
if (isFullRequest || hasAssignedPartitionsChanged(member, updatedMember)) {
response.setAssignment(createConsumerGroupResponseAssignment(updatedMember));
}
return new CoordinatorResult<>(records, response);
} | @Test
public void testShouldThrowUnknownMemberIdExceptionWhenUnknownStaticMemberLeaves() {
String groupId = "fooup";
// Use a static member id as it makes the test easier.
String memberId1 = Uuid.randomUuid().toString();
Uuid fooTopicId = Uuid.randomUuid();
String fooTopicName = "foo";
MockPartitionAssignor assignor = new MockPartitionAssignor("range");
// Consumer group with one static member.
GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder()
.withConsumerGroupAssignors(Collections.singletonList(assignor))
.withMetadataImage(new MetadataImageBuilder()
.addTopic(fooTopicId, fooTopicName, 6)
.build())
.withConsumerGroup(new ConsumerGroupBuilder(groupId, 10)
.withMember(new ConsumerGroupMember.Builder(memberId1)
.setState(MemberState.STABLE)
.setInstanceId(memberId1)
.setMemberEpoch(10)
.setPreviousMemberEpoch(9)
.setClientId(DEFAULT_CLIENT_ID)
.setClientHost(DEFAULT_CLIENT_ADDRESS.toString())
.setSubscribedTopicNames(Arrays.asList("foo", "bar"))
.setServerAssignorName("range")
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(fooTopicId, 0, 1, 2)))
.build())
.withAssignment(memberId1, mkAssignment(
mkTopicAssignment(fooTopicId, 0, 1, 2)))
.withAssignmentEpoch(10))
.build();
assertThrows(UnknownMemberIdException.class, () -> context.consumerGroupHeartbeat(
new ConsumerGroupHeartbeatRequestData()
.setGroupId(groupId)
.setMemberId(memberId1)
.setInstanceId("unknown-" + memberId1)
.setMemberEpoch(LEAVE_GROUP_STATIC_MEMBER_EPOCH)
.setRebalanceTimeoutMs(5000)
.setSubscribedTopicNames(Arrays.asList("foo", "bar"))
.setTopicPartitions(Collections.emptyList())));
} |
public MutableTree<K> beginWrite() {
return new MutableTree<>(this);
} | @Test
public void iterationBenchmark() {
final Persistent23Tree.MutableTree<Integer> tree = new Persistent23Tree<Integer>().beginWrite();
final int count = 100000;
for (int i = 0; i < count; ++i) {
tree.add(i);
}
TestUtil.time("Persistent23Tree iteration", () -> {
for (int i = 0; i < 300; ++i) {
int prev = Integer.MIN_VALUE;
Assert.assertFalse(tree.contains(prev));
final Iterator<Integer> it = tree.iterator();
int j = 0;
while (it.hasNext()) {
j = it.next();
Assert.assertTrue(prev < j);
prev = j;
}
Assert.assertEquals(count - 1, j);
}
});
TestUtil.time("Persistent23Tree reverse iteration", () -> {
for (int i = 0; i < 300; ++i) {
int prev = Integer.MAX_VALUE;
Assert.assertFalse(tree.contains(prev));
final Iterator<Integer> it = tree.reverseIterator();
int j = 0;
while (it.hasNext()) {
j = it.next();
Assert.assertTrue(prev > j);
prev = j;
}
Assert.assertEquals(0, j);
}
});
} |
@VisibleForTesting
static Instant getCreationTime(String configuredCreationTime, ProjectProperties projectProperties)
throws DateTimeParseException, InvalidCreationTimeException {
try {
switch (configuredCreationTime) {
case "EPOCH":
return Instant.EPOCH;
case "USE_CURRENT_TIMESTAMP":
projectProperties.log(
LogEvent.debug(
"Setting image creation time to current time; your image may not be reproducible."));
return Instant.now();
default:
DateTimeFormatter formatter =
new DateTimeFormatterBuilder()
.append(DateTimeFormatter.ISO_DATE_TIME) // parses isoStrict
// add ability to parse with no ":" in tz
.optionalStart()
.appendOffset("+HHmm", "+0000")
.optionalEnd()
.toFormatter();
return formatter.parse(configuredCreationTime, Instant::from);
}
} catch (DateTimeParseException ex) {
throw new InvalidCreationTimeException(configuredCreationTime, configuredCreationTime, ex);
}
} | @Test
public void testGetCreationTime_isoDateTimeValue() throws InvalidCreationTimeException {
Instant expected = DateTimeFormatter.ISO_DATE_TIME.parse("2011-12-03T01:15:30Z", Instant::from);
List<String> validTimeStamps =
ImmutableList.of(
"2011-12-03T10:15:30+09:00",
"2011-12-03T10:15:30+09:00[Asia/Tokyo]",
"2011-12-02T16:15:30-09:00",
"2011-12-03T10:15:30+0900",
"2011-12-02T16:15:30-0900",
"2011-12-03T10:15:30+09",
"2011-12-02T16:15:30-09",
"2011-12-03T01:15:30Z");
for (String timeString : validTimeStamps) {
Instant time = PluginConfigurationProcessor.getCreationTime(timeString, projectProperties);
assertThat(time).isEqualTo(expected);
}
} |
public Optional<UserDto> authenticate(Credentials credentials, HttpRequest request, AuthenticationEvent.Method method) {
if (realm == null) {
return Optional.empty();
}
return Optional.of(doAuthenticate(fixCase(credentials), request, method));
} | @Test
public void authenticate() {
executeStartWithoutGroupSync();
when(authenticator.doAuthenticate(any(Authenticator.Context.class))).thenReturn(true);
UserDetails userDetails = new UserDetails();
userDetails.setName("name");
userDetails.setEmail("email");
when(externalUsersProvider.doGetUserDetails(any(ExternalUsersProvider.Context.class))).thenReturn(userDetails);
underTest.authenticate(new Credentials(LOGIN, PASSWORD), request, BASIC);
assertThat(userIdentityAuthenticator.isAuthenticated()).isTrue();
assertThat(userIdentityAuthenticator.getAuthenticatorParameters().getUserIdentity().getProviderLogin()).isEqualTo(LOGIN);
assertThat(userIdentityAuthenticator.getAuthenticatorParameters().getUserIdentity().getProviderId()).isNull();
assertThat(userIdentityAuthenticator.getAuthenticatorParameters().getUserIdentity().getName()).isEqualTo("name");
assertThat(userIdentityAuthenticator.getAuthenticatorParameters().getUserIdentity().getEmail()).isEqualTo("email");
assertThat(userIdentityAuthenticator.getAuthenticatorParameters().getUserIdentity().shouldSyncGroups()).isFalse();
verify(authenticationEvent).loginSuccess(request, LOGIN, Source.realm(BASIC, REALM_NAME));
} |
public PickTableLayoutForPredicate pickTableLayoutForPredicate()
{
return new PickTableLayoutForPredicate(metadata);
} | @Test
public void ruleAddedNewTableLayoutIfTableScanHasEmptyConstraint()
{
tester().assertThat(pickTableLayout.pickTableLayoutForPredicate())
.on(p -> {
p.variable("orderstatus", createVarcharType(1));
return p.filter(p.rowExpression("orderstatus = 'F'"),
p.tableScan(
ordersTableHandle,
ImmutableList.of(p.variable("orderstatus", createVarcharType(1))),
ImmutableMap.of(p.variable("orderstatus", createVarcharType(1)), new TpchColumnHandle("orderstatus", createVarcharType(1)))));
})
.matches(
constrainedTableScanWithTableLayout(
"orders",
ImmutableMap.of("orderstatus", singleValue(createVarcharType(1), utf8Slice("F"))),
ImmutableMap.of("orderstatus", "orderstatus")));
tester().assertThat(pickTableLayout.pickTableLayoutForPredicate())
.on(p -> {
p.variable("orderstatus", createVarcharType(1));
return p.filter(p.rowExpression("orderstatus = 'F'"),
p.tableScan(
ordersTableHandle,
ImmutableList.of(variable("orderstatus", createVarcharType(1))),
ImmutableMap.of(variable("orderstatus", createVarcharType(1)), new TpchColumnHandle("orderstatus", createVarcharType(1)))));
})
.matches(
constrainedTableScanWithTableLayout(
"orders",
ImmutableMap.of("orderstatus", singleValue(createVarcharType(1), utf8Slice("F"))),
ImmutableMap.of("orderstatus", "orderstatus")));
} |
@Description("Get the plan ids of given plan node")
@ScalarFunction("json_presto_query_plan_node_children")
@SqlType("ARRAY<VARCHAR>")
@SqlNullable
public static Block jsonPlanNodeChildren(@TypeParameter("ARRAY<VARCHAR>") ArrayType arrayType,
@SqlType(StandardTypes.JSON) Slice jsonPlan,
@SqlType(StandardTypes.VARCHAR) Slice planId)
{
List<JsonRenderedNode> planFragments = parseJsonPlanFragmentsAsList(jsonPlan);
if (planFragments == null) {
return null;
}
Map<String, List<String>> planMap = extractPlanIds(planFragments);
List<String> planChildren = planMap.get(planId.toStringUtf8());
if (planChildren == null) {
return null;
}
return constructSqlArray(arrayType, planChildren);
} | @Test
public void testJsonPlanNodeChildren()
{
assertFunction("json_presto_query_plan_node_children(null, null)", new ArrayType(VARCHAR), null);
assertFunction("json_presto_query_plan_node_children(null, '1')", new ArrayType(VARCHAR), null);
assertFunction("json_presto_query_plan_node_children(json '" + TestJsonPrestoQueryPlanFunctionUtils.joinPlan.replaceAll("'", "''") + "', '314')",
new ArrayType(VARCHAR), ImmutableList.of());
assertFunction("json_presto_query_plan_node_children(json '" + TestJsonPrestoQueryPlanFunctionUtils.joinPlan.replaceAll("'", "''") + "', '230')",
new ArrayType(VARCHAR), ImmutableList.of("251", "284"));
assertFunction("json_presto_query_plan_node_children(json '" + TestJsonPrestoQueryPlanFunctionUtils.joinPlan.replaceAll("'", "''") + "', 'nonkey')",
new ArrayType(VARCHAR), null);
} |
@Override
public BasicTypeDefine reconvert(Column column) {
BasicTypeDefine.BasicTypeDefineBuilder builder =
BasicTypeDefine.builder()
.name(column.getName())
.nullable(column.isNullable())
.comment(column.getComment())
.defaultValue(column.getDefaultValue());
switch (column.getDataType().getSqlType()) {
case BOOLEAN:
builder.columnType(String.format("%s(%s)", ORACLE_NUMBER, 1));
builder.dataType(ORACLE_NUMBER);
builder.length(1L);
break;
case TINYINT:
case SMALLINT:
case INT:
case BIGINT:
builder.columnType(ORACLE_INTEGER);
builder.dataType(ORACLE_INTEGER);
break;
case FLOAT:
builder.columnType(ORACLE_BINARY_FLOAT);
builder.dataType(ORACLE_BINARY_FLOAT);
break;
case DOUBLE:
builder.columnType(ORACLE_BINARY_DOUBLE);
builder.dataType(ORACLE_BINARY_DOUBLE);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getDataType();
long precision = decimalType.getPrecision();
int scale = decimalType.getScale();
if (precision <= 0) {
precision = DEFAULT_PRECISION;
scale = DEFAULT_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is precision less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (precision > MAX_PRECISION) {
scale = (int) Math.max(0, scale - (precision - MAX_PRECISION));
precision = MAX_PRECISION;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum precision of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_PRECISION,
precision,
scale);
}
if (scale < 0) {
scale = 0;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is scale less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (scale > MAX_SCALE) {
scale = MAX_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_SCALE,
precision,
scale);
}
builder.columnType(String.format("%s(%s,%s)", ORACLE_NUMBER, precision, scale));
builder.dataType(ORACLE_NUMBER);
builder.precision(precision);
builder.scale(scale);
break;
case BYTES:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(ORACLE_BLOB);
builder.dataType(ORACLE_BLOB);
} else if (column.getColumnLength() <= MAX_RAW_LENGTH) {
builder.columnType(
String.format("%s(%s)", ORACLE_RAW, column.getColumnLength()));
builder.dataType(ORACLE_RAW);
} else {
builder.columnType(ORACLE_BLOB);
builder.dataType(ORACLE_BLOB);
}
break;
case STRING:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(
String.format("%s(%s)", ORACLE_VARCHAR2, MAX_VARCHAR_LENGTH));
builder.dataType(ORACLE_VARCHAR2);
} else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) {
builder.columnType(
String.format("%s(%s)", ORACLE_VARCHAR2, column.getColumnLength()));
builder.dataType(ORACLE_VARCHAR2);
} else {
builder.columnType(ORACLE_CLOB);
builder.dataType(ORACLE_CLOB);
}
break;
case DATE:
builder.columnType(ORACLE_DATE);
builder.dataType(ORACLE_DATE);
break;
case TIMESTAMP:
if (column.getScale() == null || column.getScale() <= 0) {
builder.columnType(ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE);
} else {
int timestampScale = column.getScale();
if (column.getScale() > MAX_TIMESTAMP_SCALE) {
timestampScale = MAX_TIMESTAMP_SCALE;
log.warn(
"The timestamp column {} type timestamp({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to timestamp({})",
column.getName(),
column.getScale(),
MAX_TIMESTAMP_SCALE,
timestampScale);
}
builder.columnType(
String.format("TIMESTAMP(%s) WITH LOCAL TIME ZONE", timestampScale));
builder.scale(timestampScale);
}
builder.dataType(ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE);
break;
default:
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.ORACLE,
column.getDataType().getSqlType().name(),
column.getName());
}
return builder.build();
} | @Test
public void testReconvertShort() {
Column column =
PhysicalColumn.builder().name("test").dataType(BasicType.SHORT_TYPE).build();
BasicTypeDefine typeDefine = OracleTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(OracleTypeConverter.ORACLE_INTEGER, typeDefine.getColumnType());
Assertions.assertEquals(OracleTypeConverter.ORACLE_INTEGER, typeDefine.getDataType());
} |
public static StatementExecutorResponse execute(
final ConfiguredStatement<DescribeFunction> statement,
final SessionProperties sessionProperties,
final KsqlExecutionContext executionContext,
final ServiceContext serviceContext
) {
final DescribeFunction describeFunction = statement.getStatement();
final FunctionName functionName = FunctionName.of(describeFunction.getFunctionName());
if (executionContext.getMetaStore().isAggregate(functionName)) {
return StatementExecutorResponse.handled(Optional.of(
describeAggregateFunction(executionContext, functionName,
statement.getMaskedStatementText())));
}
if (executionContext.getMetaStore().isTableFunction(functionName)) {
return StatementExecutorResponse.handled(Optional.of(
describeTableFunction(executionContext, functionName,
statement.getMaskedStatementText())));
}
return StatementExecutorResponse.handled(Optional.of(
describeNonAggregateFunction(executionContext, functionName,
statement.getMaskedStatementText())));
} | @Test
public void shouldDescribeUDTF() {
// When:
final FunctionDescriptionList functionList = (FunctionDescriptionList)
CustomExecutors.DESCRIBE_FUNCTION.execute(
engine.configure("DESCRIBE FUNCTION TEST_UDTF1;"),
mock(SessionProperties.class),
engine.getEngine(),
engine.getServiceContext()
).getEntity().orElseThrow(IllegalStateException::new);
// Then:
assertThat(functionList, new TypeSafeMatcher<FunctionDescriptionList>() {
@Override
protected boolean matchesSafely(final FunctionDescriptionList item) {
return item.getName().equals("TEST_UDTF1")
&& item.getType().equals(FunctionType.TABLE);
}
@Override
public void describeTo(final Description description) {
description.appendText(functionList.getName());
}
});
assertThat(functionList.getFunctions(), hasSize(2));
FunctionInfo expected1 = new FunctionInfo(
Arrays.asList(new ArgumentInfo("foo", "INT", "", false)),
"INT", "test_udtf1 int");
assertTrue(functionList.getFunctions().contains(expected1));
FunctionInfo expected2 = new FunctionInfo(
Arrays.asList(new ArgumentInfo("foo", "DOUBLE", "", false)),
"DOUBLE", "test_udtf1 double");
assertTrue(functionList.getFunctions().contains(expected2));
} |
public static int[] applyOrder(int[] arr, int[] order) {
if (order.length > arr.length)
throw new IllegalArgumentException("sort order must not be shorter than array");
int[] result = new int[order.length];
for (int i = 0; i < result.length; ++i)
result[i] = arr[order[i]];
return result;
} | @Test
public void testApplyOrder() {
assertEquals(from(0, 6, 3, 1, 4), from(ArrayUtil.applyOrder(new int[]{3, 4, 6, 0, 1}, new int[]{3, 2, 0, 4, 1})));
} |
@VisibleForTesting
protected static MapOutputFile renameMapOutputForReduce(JobConf conf,
TaskAttemptId mapId, MapOutputFile subMapOutputFile) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
// move map output to reduce input
Path mapOut = subMapOutputFile.getOutputFile();
FileStatus mStatus = localFs.getFileStatus(mapOut);
Path reduceIn = subMapOutputFile.getInputFileForWrite(
TypeConverter.fromYarn(mapId).getTaskID(), mStatus.getLen());
Path mapOutIndex = subMapOutputFile.getOutputIndexFile();
Path reduceInIndex = new Path(reduceIn.toString() + ".index");
if (LOG.isDebugEnabled()) {
LOG.debug("Renaming map output file for task attempt "
+ mapId.toString() + " from original location " + mapOut.toString()
+ " to destination " + reduceIn.toString());
}
if (!localFs.mkdirs(reduceIn.getParent())) {
throw new IOException("Mkdirs failed to create "
+ reduceIn.getParent().toString());
}
if (!localFs.rename(mapOut, reduceIn))
throw new IOException("Couldn't rename " + mapOut);
if (!localFs.rename(mapOutIndex, reduceInIndex))
throw new IOException("Couldn't rename " + mapOutIndex);
return new RenamedMapOutputFile(reduceIn);
} | @Test
public void testRenameMapOutputForReduce() throws Exception {
final JobConf conf = new JobConf();
final MROutputFiles mrOutputFiles = new MROutputFiles();
mrOutputFiles.setConf(conf);
// make sure both dirs are distinct
//
conf.set(MRConfig.LOCAL_DIR, localDirs[0].toString());
final Path mapOut = mrOutputFiles.getOutputFileForWrite(1);
conf.set(MRConfig.LOCAL_DIR, localDirs[1].toString());
final Path mapOutIdx = mrOutputFiles.getOutputIndexFileForWrite(1);
Assert.assertNotEquals("Paths must be different!",
mapOut.getParent(), mapOutIdx.getParent());
// make both dirs part of LOCAL_DIR
conf.setStrings(MRConfig.LOCAL_DIR, localDirs);
final FileContext lfc = FileContext.getLocalFSFileContext(conf);
lfc.create(mapOut, EnumSet.of(CREATE)).close();
lfc.create(mapOutIdx, EnumSet.of(CREATE)).close();
final JobId jobId = MRBuilderUtils.newJobId(12345L, 1, 2);
final TaskId tid = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
final TaskAttemptId taid = MRBuilderUtils.newTaskAttemptId(tid, 0);
LocalContainerLauncher.renameMapOutputForReduce(conf, taid, mrOutputFiles);
} |
public Preference<Boolean> getBoolean(@StringRes int prefKey, @BoolRes int defaultValue) {
return mRxSharedPreferences.getBoolean(
mResources.getString(prefKey), mResources.getBoolean(defaultValue));
} | @Test
public void testDoesNotSetupFallbackDictionaryToFalseIfWasSetBeforeToTrue() {
SharedPrefsHelper.setPrefsValue("settings_key_always_use_fallback_user_dictionary", true);
SharedPrefsHelper.setPrefsValue(RxSharedPrefs.CONFIGURATION_VERSION, 11);
SharedPreferences preferences =
PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
Assert.assertTrue(preferences.contains("settings_key_always_use_fallback_user_dictionary"));
new RxSharedPrefs(getApplicationContext(), this::testRestoreFunction);
Assert.assertTrue(preferences.contains("settings_key_always_use_fallback_user_dictionary"));
Assert.assertTrue(
preferences.getBoolean("settings_key_always_use_fallback_user_dictionary", false));
} |
@Override
public Mono<SetRegistrationLockResponse> setRegistrationLock(final SetRegistrationLockRequest request) {
final AuthenticatedDevice authenticatedDevice = AuthenticationUtil.requireAuthenticatedPrimaryDevice();
if (request.getRegistrationLock().isEmpty()) {
throw Status.INVALID_ARGUMENT.withDescription("Registration lock secret must not be empty").asRuntimeException();
}
return Mono.fromFuture(() -> accountsManager.getByAccountIdentifierAsync(authenticatedDevice.accountIdentifier()))
.map(maybeAccount -> maybeAccount.orElseThrow(Status.UNAUTHENTICATED::asRuntimeException))
.flatMap(account -> {
// In the previous REST-based API, clients would send hex strings directly. For backward compatibility, we
// convert the registration lock secret to a lowercase hex string before turning it into a salted hash.
final SaltedTokenHash credentials =
SaltedTokenHash.generateFor(HexFormat.of().withLowerCase().formatHex(request.getRegistrationLock().toByteArray()));
return Mono.fromFuture(() -> accountsManager.updateAsync(account,
a -> a.setRegistrationLock(credentials.hash(), credentials.salt())));
})
.map(ignored -> SetRegistrationLockResponse.newBuilder().build());
} | @Test
void setRegistrationLock() {
final Account account = mock(Account.class);
when(accountsManager.getByAccountIdentifierAsync(AUTHENTICATED_ACI))
.thenReturn(CompletableFuture.completedFuture(Optional.of(account)));
final byte[] registrationLockSecret = TestRandomUtil.nextBytes(32);
final SetRegistrationLockResponse ignored =
authenticatedServiceStub().setRegistrationLock(SetRegistrationLockRequest.newBuilder()
.setRegistrationLock(ByteString.copyFrom(registrationLockSecret))
.build());
final ArgumentCaptor<String> hashCaptor = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<String> saltCaptor = ArgumentCaptor.forClass(String.class);
verify(account).setRegistrationLock(hashCaptor.capture(), saltCaptor.capture());
final SaltedTokenHash registrationLock = new SaltedTokenHash(hashCaptor.getValue(), saltCaptor.getValue());
assertTrue(registrationLock.verify(HexFormat.of().formatHex(registrationLockSecret)));
} |
public final void tag(I input, ScopedSpan span) {
if (input == null) throw new NullPointerException("input == null");
if (span == null) throw new NullPointerException("span == null");
if (span.isNoop()) return;
tag(span, input, span.context());
} | @Test void tag_customizer_withNullContext() {
when(parseValue.apply(eq(input), isNull())).thenReturn("value");
tag.tag(input, null, customizer);
verify(parseValue).apply(input, null);
verifyNoMoreInteractions(parseValue); // doesn't parse twice
verify(customizer).tag("key", "value");
verifyNoMoreInteractions(customizer); // doesn't tag twice
} |
@Override
public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException {
if(directory.isRoot()) {
return new AttributedList<Path>(Collections.singletonList(
new MantaAccountHomeInfo(session.getHost().getCredentials().getUsername(), session.getHost().getDefaultPath()).getNormalizedHomePath()));
}
final AttributedList<Path> children = new AttributedList<>();
final Iterator<MantaObject> objectsIter;
try {
objectsIter = session.getClient().listObjects(directory.getAbsolute()).iterator();
}
catch(MantaObjectException e) {
throw new MantaExceptionMappingService().map("Listing directory {0} failed", e, directory);
}
catch(MantaClientHttpResponseException e) {
throw new MantaHttpExceptionMappingService().map("Listing directory {0} failed", e, directory);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map("Listing directory {0} failed", e);
}
final MantaObjectAttributeAdapter adapter = new MantaObjectAttributeAdapter(session);
while(objectsIter.hasNext()) {
MantaObject o = objectsIter.next();
final Path file = new Path(directory, PathNormalizer.name(o.getPath()),
EnumSet.of(o.isDirectory() ? Path.Type.directory : Path.Type.file), adapter.toAttributes(o)
);
children.add(file);
listener.chunk(directory, children);
}
return children;
} | @Test(expected = NotfoundException.class)
public void testListNotFoundFolder() throws Exception {
new MantaListService(session).list(new Path(
new MantaHomeFinderFeature(session.getHost()).find(), "notfound", EnumSet.of(Path.Type.directory)), new DisabledListProgressListener());
} |
public Set<String> getPluginsThatSupportsGetUserRoles() {
return getPluginsWithCapabilities(Capabilities::canGetUserRoles);
} | @Test
public void shouldGetPluginsThatSupportsGetUserRolesCall() {
when(plugin1.getCapabilities().canGetUserRoles()).thenReturn(true);
Set<String> pluginsThatSupportsGetUserRoles = store.getPluginsThatSupportsGetUserRoles();
assertThat(pluginsThatSupportsGetUserRoles.size(), is(1));
assertThat(pluginsThatSupportsGetUserRoles, contains(plugin1.getDescriptor().id()));
} |
@Override
public UnitExtension getUnitExtension(String extensionName) {
if (extensionName.equals("CommanderExtension")) {
return Optional.ofNullable(unitExtension).orElseGet(() -> new Commander(this));
}
return super.getUnitExtension(extensionName);
} | @Test
void getUnitExtension() {
final var unit = new CommanderUnit("CommanderUnitName");
assertNull(unit.getUnitExtension("SoldierExtension"));
assertNull(unit.getUnitExtension("SergeantExtension"));
assertNotNull(unit.getUnitExtension("CommanderExtension"));
} |
@Override
public void apply(final Record<KOut, Change<ValueAndTimestamp<VOut>>> record) {
@SuppressWarnings("rawtypes") final ProcessorNode prev = context.currentNode();
context.setCurrentNode(myNode);
try {
context.forward(
record
.withValue(
new Change<>(
getValueOrNull(record.value().newValue),
getValueOrNull(record.value().oldValue),
record.value().isLatest))
.withTimestamp(
record.value().newValue != null ? record.value().newValue.timestamp()
: record.timestamp())
);
} finally {
context.setCurrentNode(prev);
}
} | @Test
public void shouldForwardValueTimestampIfNewValueExists() {
@SuppressWarnings("unchecked")
final InternalProcessorContext<String, Change<String>> context = mock(InternalProcessorContext.class);
doNothing().when(context).forward(
new Record<>(
"key",
new Change<>("newValue", "oldValue"),
42L));
new TimestampedCacheFlushListener<>(context).apply(
new Record<>(
"key",
new Change<>(
ValueAndTimestamp.make("newValue", 42L),
ValueAndTimestamp.make("oldValue", 21L)),
73L));
verify(context, times(2)).setCurrentNode(null);
} |
List<MappingField> resolveFields(
@Nonnull String[] externalName,
@Nullable String dataConnectionName,
@Nonnull Map<String, String> options,
@Nonnull List<MappingField> userFields,
boolean stream
) {
Predicate<MappingField> pkColumnName = Options.getPkColumnChecker(options, stream);
Map<String, DocumentField> dbFields = readFields(externalName, dataConnectionName, options, stream);
List<MappingField> resolvedFields = new ArrayList<>();
if (userFields.isEmpty()) {
for (DocumentField documentField : dbFields.values()) {
MappingField mappingField = new MappingField(
documentField.columnName,
resolveType(documentField.columnType),
documentField.columnName,
documentField.columnType.name()
);
mappingField.setPrimaryKey(pkColumnName.test(mappingField));
resolvedFields.add(mappingField);
}
} else {
for (MappingField f : userFields) {
String prefixIfStream = stream ? "fullDocument." : "";
String nameInMongo = f.externalName() == null ? prefixIfStream + f.name() : f.externalName();
DocumentField documentField = getField(dbFields, f, stream);
if (documentField == null) {
throw new IllegalArgumentException("Could not resolve field with name " + nameInMongo);
}
MappingField mappingField = new MappingField(f.name(), f.type(), documentField.columnName,
documentField.columnType.name());
mappingField.setPrimaryKey(pkColumnName.test(mappingField));
validateType(f, documentField);
resolvedFields.add(mappingField);
}
}
return resolvedFields;
} | @Test
public void testFailsOnNoDatabase() {
String collectionName = "people_3";
FieldResolver resolver = new FieldResolver(null);
Map<String, String> readOpts = new HashMap<>();
readOpts.put("connectionString", mongoContainer.getConnectionString());
try {
resolver.resolveFields(new String[]{collectionName}, null, readOpts, singletonList(
new MappingField("id", QueryDataType.MAP).setExternalName("_id")
), false);
} catch (IllegalArgumentException e) {
assertThat(e.getMessage()).isEqualTo("Database must be provided in the mapping or data connection.");
}
} |
public void hasLength(int expectedLength) {
checkArgument(expectedLength >= 0, "expectedLength(%s) must be >= 0", expectedLength);
check("length()").that(checkNotNull(actual).length()).isEqualTo(expectedLength);
} | @Test
public void hasLengthZero() {
assertThat("").hasLength(0);
} |
public BatchEventData getBatchEventData() {
return batchEventData;
} | @Test
public void testGetBatchEventData() {
assertEquals(batchEventData, batchIMapEvent.getBatchEventData());
} |
@Override
public void execute(ComputationStep.Context context) {
new DepthTraversalTypeAwareCrawler(
new TypeAwareVisitorAdapter(CrawlerDepthLimit.PROJECT, PRE_ORDER) {
@Override
public void visitProject(Component project) {
executeForProject(project);
}
}).visit(treeRootHolder.getRoot());
} | @Test
void rawMeasure_is_updated_if_present_and_new_measures_are_created_if_project_has_measure_for_metric_of_condition() {
int rawValue = 3;
Condition equals2Condition = createLessThanCondition(INT_METRIC_1, "2");
Measure rawMeasure = newMeasureBuilder().create(rawValue, null);
qualityGateHolder.setQualityGate(new QualityGate(SOME_QG_UUID, SOME_QG_NAME, of(equals2Condition)));
measureRepository.addRawMeasure(PROJECT_REF, INT_METRIC_1_KEY, rawMeasure);
underTest.execute(new TestComputationStepContext());
Optional<Measure> addedRawMeasure = measureRepository.getAddedRawMeasure(PROJECT_COMPONENT, INT_METRIC_1_KEY);
assertThat(addedRawMeasure)
.hasQualityGateLevel(OK)
.hasQualityGateText(dumbResultTextAnswer(equals2Condition, OK, rawValue));
assertThat(getAlertStatusMeasure())
.hasQualityGateLevel(OK)
.hasQualityGateText(dumbResultTextAnswer(equals2Condition, OK, rawValue));
assertThat(getQGDetailsMeasure().get())
.hasValue(new QualityGateDetailsData(OK, of(new EvaluatedCondition(equals2Condition, OK, rawValue)), false).toJson());
QualityGateStatusHolderAssertions.assertThat(qualityGateStatusHolder)
.hasStatus(QualityGateStatus.OK)
.hasConditionCount(1)
.hasCondition(equals2Condition, ConditionStatus.EvaluationStatus.OK, String.valueOf(rawValue));
} |
public void addChild(Entry entry) {
childEntries.add(entry);
entry.setParent(this);
} | @Test
public void findsParentComponent(){
Component component = mock(Component.class);
Entry structureWithEntry = new Entry();
new EntryAccessor().setComponent(structureWithEntry, component);
final Entry child = new Entry();
structureWithEntry.addChild(child);
assertThat(new EntryAccessor().getAncestorComponent(child), equalTo(component));
} |
@Nonnull
public static <K, V> Sink<ChangeRecord> map(
@Nonnull String mapName,
@Nonnull FunctionEx<? super ChangeRecord, ? extends K> keyFn,
@Nonnull FunctionEx<? super ChangeRecord, ? extends V> valueFn
) {
String name = "mapCdcSink(" + mapName + ')';
return sink(name, mapName, null, keyFn, valueFn);
} | @Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void reordering() {
SupplierEx[] records = {
SYNC1,
UPDATE1,
() -> changeRecord(10, UPDATE, UPDATE1.get().key().toJson(), null,
UPDATE1.get().value().toJson().replace("sthomas@acme.com", "sthomas2@acme.com")),
() -> changeRecord(11, UPDATE, UPDATE1.get().key().toJson(), null,
UPDATE1.get().value().toJson().replace("sthomas@acme.com", "sthomas3@acme.com")),
() -> changeRecord(12, UPDATE, UPDATE1.get().key().toJson(), null,
UPDATE1.get().value().toJson().replace("sthomas@acme.com", "sthomas4@acme.com")),
() -> changeRecord(13, UPDATE, UPDATE1.get().key().toJson(), null,
UPDATE1.get().value().toJson().replace("sthomas@acme.com", "sthomas5@acme.com"))
};
p.readFrom(items(records))
.rebalance()
.map(r -> r)
.writeTo(localSync());
execute().join();
assertMap(hz(), "sthomas5@acme.com", null);
hz().getMap(MAP).destroy();
} |
@Override
public void handle(SeckillWebMockRequestDTO request) {
// 状态机初始化
stateMachineService.initStateMachine(request.getSeckillId());
// 初始化库存数量
// 使用状态机控制活动状态
if (!stateMachineService.feedMachine(Events.ACTIVITY_RESET, request.getSeckillId())) {
throw new RuntimeException("活动尚未结束,请等待活动结束后再次操作");
}
stateMachineService.feedMachine(Events.ACTIVITY_START, request.getSeckillId());
} | @Test
public void shouldThrowExceptionWhenActivityNotEnded() {
SeckillWebMockRequestDTO request = new SeckillWebMockRequestDTO();
request.setSeckillId(123L);
when(stateMachineService.feedMachine(Events.ACTIVITY_RESET, request.getSeckillId())).thenReturn(false);
try {
stateMachinePreRequestHandler.handle(request);
} catch (RuntimeException e) {
verify(stateMachineService, times(1)).initStateMachine(request.getSeckillId());
verify(stateMachineService, times(1)).feedMachine(Events.ACTIVITY_RESET, request.getSeckillId());
verify(stateMachineService, times(0)).feedMachine(Events.ACTIVITY_START, request.getSeckillId());
}
} |
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
} | @Test
public void testGetAnalysisPhaze() {
assertEquals(AnalysisPhase.INFORMATION_COLLECTION, instance.getAnalysisPhase());
} |
@Override
public boolean hasAnySuperAdmin(Collection<Long> ids) {
if (CollectionUtil.isEmpty(ids)) {
return false;
}
RoleServiceImpl self = getSelf();
return ids.stream().anyMatch(id -> {
RoleDO role = self.getRoleFromCache(id);
return role != null && RoleCodeEnum.isSuperAdmin(role.getCode());
});
} | @Test
public void testHasAnySuperAdmin_true() {
try (MockedStatic<SpringUtil> springUtilMockedStatic = mockStatic(SpringUtil.class)) {
springUtilMockedStatic.when(() -> SpringUtil.getBean(eq(RoleServiceImpl.class)))
.thenReturn(roleService);
// mock 数据
RoleDO dbRole = randomPojo(RoleDO.class).setCode("super_admin");
roleMapper.insert(dbRole);
// 准备参数
Long id = dbRole.getId();
// 调用,并调用
assertTrue(roleService.hasAnySuperAdmin(singletonList(id)));
}
} |
public static ParamType getSchemaFromType(final Type type) {
return getSchemaFromType(type, JAVA_TO_ARG_TYPE);
} | @Test
public void shouldGetFloatSchemaForDoubleClass() {
assertThat(
UdfUtil.getSchemaFromType(Double.class),
equalTo(ParamTypes.DOUBLE)
);
} |
@Override
public Map<String, Object> encode(Object object) throws EncodeException {
if (object == null) {
return Collections.emptyMap();
}
ObjectParamMetadata metadata =
classToMetadata.computeIfAbsent(object.getClass(), ObjectParamMetadata::parseObjectType);
return metadata.objectFields.stream()
.map(field -> this.FieldValuePair(object, field))
.filter(fieldObjectPair -> fieldObjectPair.right.isPresent())
.collect(Collectors.toMap(this::fieldName,
fieldObjectPair -> fieldObjectPair.right.get()));
} | @Test
void defaultEncoder_acceptNullValue() {
assertThat(encoder.encode(null)).as("Empty map should be returned")
.isEqualTo(Collections.EMPTY_MAP);
} |
@DeleteMapping
@TpsControl(pointName = "NamingServiceDeregister", name = "HttpNamingServiceDeregister")
@Secured(action = ActionTypes.WRITE)
public String remove(@RequestParam(defaultValue = Constants.DEFAULT_NAMESPACE_ID) String namespaceId,
@RequestParam String serviceName) throws Exception {
getServiceOperator().delete(namespaceId, serviceName);
NotifyCenter.publishEvent(
new DeregisterServiceTraceEvent(System.currentTimeMillis(), namespaceId,
NamingUtils.getGroupName(serviceName), NamingUtils.getServiceName(serviceName)));
return "ok";
} | @Test
void testRemove() {
try {
String res = serviceController.remove(TEST_NAMESPACE, TEST_SERVICE_NAME);
assertEquals("ok", res);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
} |
@Override
public ConfigDO getConfigByKey(String key) {
return configMapper.selectByKey(key);
} | @Test
public void testGetConfigByKey() {
// mock 数据
ConfigDO dbConfig = randomConfigDO();
configMapper.insert(dbConfig);// @Sql: 先插入出一条存在的数据
// 准备参数
String key = dbConfig.getConfigKey();
// 调用
ConfigDO config = configService.getConfigByKey(key);
// 断言
assertNotNull(config);
assertPojoEquals(dbConfig, config);
} |
@Override
public int getOrder() {
return -1;
} | @Test
void shouldOrderToMinusOne() {
ModifyServersOpenApiFilter modifyServersOpenApiFilter = new ModifyServersOpenApiFilter();
assertEquals(modifyServersOpenApiFilter.getOrder(), -1);
} |
@SuppressWarnings("unchecked")
public static <T> Collection<T> getServiceInstances(final Class<T> serviceInterface) {
return (Collection<T>) getRegisteredSPI(serviceInterface).getServiceInstances();
} | @Test
void assertGetServiceInstancesWithMultitonSPI() {
Collection<MultitonSPIFixture> actual = ShardingSphereServiceLoader.getServiceInstances(MultitonSPIFixture.class);
assertThat(actual.size(), is(1));
MultitonSPIFixture actualInstance = actual.iterator().next();
assertThat(actualInstance, instanceOf(MultitonSPIFixtureImpl.class));
assertThat(actualInstance, not(ShardingSphereServiceLoader.getServiceInstances(MultitonSPIFixture.class).iterator().next()));
} |
public void schedule(ExecutableMethod<?, ?> method) {
if (hasParametersOutsideOfJobContext(method.getTargetMethod())) {
throw new IllegalStateException("Methods annotated with " + Recurring.class.getName() + " can only have zero parameters or a single parameter of type JobContext.");
}
String id = getId(method);
String cron = getCron(method);
String interval = getInterval(method);
if (StringUtils.isNullOrEmpty(cron) && StringUtils.isNullOrEmpty(interval))
throw new IllegalArgumentException("Either cron or interval attribute is required.");
if (isNotNullOrEmpty(cron) && isNotNullOrEmpty(interval))
throw new IllegalArgumentException("Both cron and interval attribute provided. Only one is allowed.");
if (Recurring.RECURRING_JOB_DISABLED.equals(cron) || Recurring.RECURRING_JOB_DISABLED.equals(interval)) {
if (id == null) {
LOGGER.warn("You are trying to disable a recurring job using placeholders but did not define an id.");
} else {
jobScheduler.deleteRecurringJob(id);
}
} else {
JobDetails jobDetails = getJobDetails(method);
ZoneId zoneId = getZoneId(method);
if (isNotNullOrEmpty(cron)) {
jobScheduler.scheduleRecurrently(id, jobDetails, CronExpression.create(cron), zoneId);
} else {
jobScheduler.scheduleRecurrently(id, jobDetails, new Interval(interval), zoneId);
}
}
} | @Test
void beansWithMethodsAnnotatedWithRecurringAnnotationCronAndIntervalWillThrowException() {
final ExecutableMethod executableMethod = mock(ExecutableMethod.class);
final Method method = getRequiredMethod(MyServiceWithRecurringJob.class, "myRecurringMethod");
when(executableMethod.getTargetMethod()).thenReturn(method);
when(executableMethod.stringValue(Recurring.class, "id")).thenReturn(Optional.of("my-recurring-job"));
when(executableMethod.stringValue(Recurring.class, "cron")).thenReturn(Optional.of("*/15 * * * *"));
when(executableMethod.stringValue(Recurring.class, "interval")).thenReturn(Optional.of("PT1M"));
assertThatThrownBy(() -> jobRunrRecurringJobScheduler.schedule(executableMethod)).isInstanceOf(IllegalArgumentException.class);
} |
public static IntrinsicMapTaskExecutor withSharedCounterSet(
List<Operation> operations,
CounterSet counters,
ExecutionStateTracker executionStateTracker) {
return new IntrinsicMapTaskExecutor(operations, counters, executionStateTracker);
} | @Test
public void testExceptionInFinishAbortsAllOperations() throws Exception {
Operation o1 = Mockito.mock(Operation.class);
Operation o2 = Mockito.mock(Operation.class);
Operation o3 = Mockito.mock(Operation.class);
Mockito.doThrow(new Exception("in finish")).when(o2).finish();
ExecutionStateTracker stateTracker = ExecutionStateTracker.newForTest();
try (IntrinsicMapTaskExecutor executor =
IntrinsicMapTaskExecutor.withSharedCounterSet(
Arrays.<Operation>asList(o1, o2, o3), counterSet, stateTracker)) {
executor.execute();
fail("Should have thrown");
} catch (Exception e) {
InOrder inOrder = Mockito.inOrder(o1, o2, o3);
inOrder.verify(o3).start();
inOrder.verify(o2).start();
inOrder.verify(o1).start();
inOrder.verify(o1).finish();
inOrder.verify(o2).finish();
// Order of abort doesn't matter
Mockito.verify(o1).abort();
Mockito.verify(o2).abort();
Mockito.verify(o3).abort();
Mockito.verifyNoMoreInteractions(o1, o2, o3);
}
} |
public static String getBasicApiJsonSchema() throws JsonProcessingException {
ObjectMapper mapper = createSchemaObjectMapper();
JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(mapper);
DefaultPackageScanClassResolver packageScanClassResolver = new DefaultPackageScanClassResolver();
Set<Class<?>> schemaClasses = new HashSet<>();
// get non-abstract extensions of AbstractDTOBase
schemaClasses.addAll(packageScanClassResolver.findByFilter(
type -> !Modifier.isAbstract(type.getModifiers()) && AbstractDTOBase.class.isAssignableFrom(type),
"org.apache.camel.component.salesforce.api.dto"));
// get non-abstract extensions of AbstractDTOBase
schemaClasses.addAll(packageScanClassResolver.findByFilter(
type -> !Modifier.isAbstract(type.getModifiers()) && AbstractDTOBase.class.isAssignableFrom(type),
"org.apache.camel.component.salesforce.api.dto"));
Set<JsonSchema> allSchemas = new HashSet<>();
for (Class<?> aClass : schemaClasses) {
JsonSchema jsonSchema = schemaGen.generateSchema(aClass);
allSchemas.add(jsonSchema);
}
return getJsonSchemaString(mapper, allSchemas, API_DTO_ID);
} | @Test
public void getBasicApiJsonSchema() throws Exception {
// create basic api dto schema
LOG.info("Basic Api Schema...");
String basicApiJsonSchema = JsonUtils.getBasicApiJsonSchema();
LOG.info(basicApiJsonSchema);
// parse schema to validate
ObjectMapper objectMapper = JsonUtils.createObjectMapper();
JsonSchema jsonSchema = objectMapper.readValue(basicApiJsonSchema, JsonSchema.class);
assertTrue(jsonSchema.isObjectSchema());
assertFalse(((ObjectSchema) jsonSchema).getOneOf().isEmpty());
} |
@Override
public PageResult<ProductCommentDO> getCommentPage(AppCommentPageReqVO pageVO, Boolean visible) {
return productCommentMapper.selectPage(pageVO, visible);
} | @Test
public void testGetCommentPage_success() {
// 准备参数
ProductCommentDO productComment = randomPojo(ProductCommentDO.class, o -> {
o.setUserNickname("王二狗");
o.setSpuName("感冒药");
o.setScores(ProductCommentScoresEnum.FOUR.getScores());
o.setReplyStatus(Boolean.TRUE);
o.setVisible(Boolean.TRUE);
o.setId(generateId());
o.setUserId(generateId());
o.setAnonymous(Boolean.TRUE);
o.setOrderId(generateId());
o.setOrderItemId(generateId());
o.setSpuId(generateId());
o.setSkuId(generateId());
o.setDescriptionScores(ProductCommentScoresEnum.FOUR.getScores());
o.setBenefitScores(ProductCommentScoresEnum.FOUR.getScores());
o.setContent("真好吃");
o.setReplyUserId(generateId());
o.setReplyContent("确实");
o.setReplyTime(LocalDateTime.now());
o.setCreateTime(LocalDateTime.now());
o.setUpdateTime(LocalDateTime.now());
});
productCommentMapper.insert(productComment);
Long orderId = productComment.getOrderId();
Long spuId = productComment.getSpuId();
// 测试 userNickname 不匹配
productCommentMapper.insert(cloneIgnoreId(productComment, o -> o.setUserNickname("王三").setScores(ProductCommentScoresEnum.ONE.getScores())));
// 测试 orderId 不匹配
productCommentMapper.insert(cloneIgnoreId(productComment, o -> o.setOrderId(generateId())));
// 测试 spuId 不匹配
productCommentMapper.insert(cloneIgnoreId(productComment, o -> o.setSpuId(generateId())));
// 测试 spuName 不匹配
productCommentMapper.insert(cloneIgnoreId(productComment, o -> o.setSpuName("感康")));
// 测试 scores 不匹配
productCommentMapper.insert(cloneIgnoreId(productComment, o -> o.setScores(ProductCommentScoresEnum.ONE.getScores())));
// 测试 replied 不匹配
productCommentMapper.insert(cloneIgnoreId(productComment, o -> o.setReplyStatus(Boolean.FALSE)));
// 测试 visible 不匹配
productCommentMapper.insert(cloneIgnoreId(productComment, o -> o.setVisible(Boolean.FALSE)));
// 调用
ProductCommentPageReqVO productCommentPageReqVO = new ProductCommentPageReqVO();
productCommentPageReqVO.setUserNickname("王二");
productCommentPageReqVO.setOrderId(orderId);
productCommentPageReqVO.setSpuId(spuId);
productCommentPageReqVO.setSpuName("感冒药");
productCommentPageReqVO.setScores(ProductCommentScoresEnum.FOUR.getScores());
productCommentPageReqVO.setReplyStatus(Boolean.TRUE);
PageResult<ProductCommentDO> commentPage = productCommentService.getCommentPage(productCommentPageReqVO);
PageResult<ProductCommentRespVO> result = BeanUtils.toBean(productCommentMapper.selectPage(productCommentPageReqVO),
ProductCommentRespVO.class);
assertEquals(result.getTotal(), commentPage.getTotal());
PageResult<ProductCommentDO> all = productCommentService.getCommentPage(new ProductCommentPageReqVO());
assertEquals(8, all.getTotal());
// 测试获取所有商品分页评论数据
PageResult<ProductCommentDO> result1 = productCommentService.getCommentPage(new AppCommentPageReqVO(), Boolean.TRUE);
assertEquals(7, result1.getTotal());
// 测试获取所有商品分页中评数据
PageResult<ProductCommentDO> result2 = productCommentService.getCommentPage(new AppCommentPageReqVO().setType(AppCommentPageReqVO.MEDIOCRE_COMMENT), Boolean.TRUE);
assertEquals(2, result2.getTotal());
// 测试获取指定 spuId 商品分页中评数据
PageResult<ProductCommentDO> result3 = productCommentService.getCommentPage(new AppCommentPageReqVO().setSpuId(spuId).setType(AppCommentPageReqVO.MEDIOCRE_COMMENT), Boolean.TRUE);
assertEquals(2, result3.getTotal());
// 测试分页 tab count
//AppCommentStatisticsRespVO tabsCount = productCommentService.getCommentStatistics(spuId, Boolean.TRUE);
//assertEquals(4, tabsCount.getGoodCount());
//assertEquals(2, tabsCount.getMediocreCount());
//assertEquals(0, tabsCount.getNegativeCount());
} |
protected static void checkPayload(Channel channel, long size) throws IOException {
int payload = getPayload(channel);
boolean overPayload = isOverPayload(payload, size);
if (overPayload) {
ExceedPayloadLimitException e = new ExceedPayloadLimitException(
"Data length too large: " + size + ", max payload: " + payload + ", channel: " + channel);
logger.error(TRANSPORT_EXCEED_PAYLOAD_LIMIT, "", "", e.getMessage(), e);
throw e;
}
} | @Test
void tesCheckPayloadMinusPayloadNoLimit() throws Exception {
Channel channel = mock(Channel.class);
given(channel.getUrl()).willReturn(URL.valueOf("dubbo://1.1.1.1?payload=-1"));
AbstractCodec.checkPayload(channel, 15 * 1024 * 1024);
verify(channel, VerificationModeFactory.atLeastOnce()).getUrl();
} |
public Pair<IndexEntry, IndexEntry> lookupLastLogIndexAndPosFromTail() {
final long lastLogIndex = getLastLogIndex();
final long firstLogIndex = getFirstLogIndex();
IndexEntry lastSegmentIndex = null, lastConfIndex = null;
long index = lastLogIndex;
while (index >= firstLogIndex) {
final IndexEntry indexEntry = lookupIndex(index);
indexEntry.setLogIndex(index);
if (indexEntry.getLogType() == IndexType.IndexSegment.getType() && lastSegmentIndex == null) {
lastSegmentIndex = indexEntry;
} else if (indexEntry.getLogType() == IndexType.IndexConf.getType() && lastConfIndex == null) {
lastConfIndex = indexEntry;
}
if (lastSegmentIndex != null && lastConfIndex != null) {
break;
}
index--;
}
return Pair.of(lastSegmentIndex, lastConfIndex);
} | @Test
public void testLookupLastLogIndexAndPosFromTail() {
this.indexDB.startServiceManager();
{
this.indexDB.appendIndexAsync(1, 1, IndexType.IndexSegment);
this.indexDB.appendIndexAsync(2, 2, IndexType.IndexSegment);
final Pair<Integer, Long> posPair = this.indexDB.appendIndexAsync(3, 3, IndexType.IndexConf);
this.indexDB.appendIndexAsync(4, 4, IndexType.IndexSegment);
this.indexDB.waitForFlush(posPair.getSecond(), 100);
}
final Pair<IndexFile.IndexEntry, IndexFile.IndexEntry> indexPair = this.indexDB
.lookupLastLogIndexAndPosFromTail();
final IndexFile.IndexEntry lastSegmentIndex = indexPair.getFirst();
final IndexFile.IndexEntry lastConfIndex = indexPair.getSecond();
assert (lastSegmentIndex.getLogIndex() == 4);
assert (lastConfIndex.getLogIndex() == 3);
} |
public static ThriftType fromJSON(String json) {
return JSON.fromJSON(json, ThriftType.class);
} | @Test
public void testParseUnionInfo() throws Exception {
StructType st = (StructType)
StructType.fromJSON("{\"id\": \"STRUCT\", \"children\":[], \"structOrUnionType\": \"UNION\"}");
assertEquals(st.getStructOrUnionType(), StructOrUnionType.UNION);
st = (StructType)
StructType.fromJSON("{\"id\": \"STRUCT\", \"children\":[], \"structOrUnionType\": \"STRUCT\"}");
assertEquals(st.getStructOrUnionType(), StructOrUnionType.STRUCT);
st = (StructType) StructType.fromJSON("{\"id\": \"STRUCT\", \"children\":[]}");
assertEquals(st.getStructOrUnionType(), StructOrUnionType.STRUCT);
st = (StructType)
StructType.fromJSON("{\"id\": \"STRUCT\", \"children\":[], \"structOrUnionType\": \"UNKNOWN\"}");
assertEquals(st.getStructOrUnionType(), StructOrUnionType.UNKNOWN);
} |
public static BadRequestException userNotExists(String userName) {
return new BadRequestException("user not exists for userName:%s", userName);
} | @Test
public void testUserNotExists(){
BadRequestException userNotExists = BadRequestException.userNotExists("user");
assertEquals("user not exists for userName:user", userNotExists.getMessage());
} |
@Override
public Resource parseResource(HttpServletRequest request, Secured secured) {
if (StringUtils.isNotBlank(secured.resource())) {
return parseSpecifiedResource(secured);
}
String type = secured.signType();
if (!resourceParserMap.containsKey(type)) {
Loggers.AUTH.warn("Can't find Http request resourceParser for type {} use specified resource parser", type);
return useSpecifiedParserToParse(secured, request);
}
return resourceParserMap.get(type).parse(request, secured);
} | @Test
@Secured(resource = "testResource")
void testParseResourceWithSpecifiedResource() throws NoSuchMethodException {
Secured secured = getMethodSecure("testParseResourceWithSpecifiedResource");
Resource actual = httpProtocolAuthService.parseResource(request, secured);
assertEquals("testResource", actual.getName());
assertEquals(SignType.SPECIFIED, actual.getType());
assertNull(actual.getNamespaceId());
assertNull(actual.getGroup());
assertNull(actual.getProperties());
} |
public void writeEncodedValue(EncodedValue encodedValue) throws IOException {
switch (encodedValue.getValueType()) {
case ValueType.BOOLEAN:
writer.write(Boolean.toString(((BooleanEncodedValue) encodedValue).getValue()));
break;
case ValueType.BYTE:
writer.write(
String.format("0x%x", ((ByteEncodedValue)encodedValue).getValue()));
break;
case ValueType.CHAR:
writer.write(
String.format("0x%x", (int)((CharEncodedValue)encodedValue).getValue()));
break;
case ValueType.SHORT:
writer.write(
String.format("0x%x", ((ShortEncodedValue)encodedValue).getValue()));
break;
case ValueType.INT:
writer.write(
String.format("0x%x", ((IntEncodedValue)encodedValue).getValue()));
break;
case ValueType.LONG:
writer.write(
String.format("0x%x", ((LongEncodedValue)encodedValue).getValue()));
break;
case ValueType.FLOAT:
writer.write(Float.toString(((FloatEncodedValue)encodedValue).getValue()));
break;
case ValueType.DOUBLE:
writer.write(Double.toString(((DoubleEncodedValue)encodedValue).getValue()));
break;
case ValueType.ANNOTATION:
writeAnnotation((AnnotationEncodedValue)encodedValue);
break;
case ValueType.ARRAY:
writeArray((ArrayEncodedValue)encodedValue);
break;
case ValueType.STRING:
writeQuotedString(((StringEncodedValue)encodedValue).getValue());
break;
case ValueType.FIELD:
writeFieldDescriptor(((FieldEncodedValue)encodedValue).getValue());
break;
case ValueType.ENUM:
writeFieldDescriptor(((EnumEncodedValue)encodedValue).getValue());
break;
case ValueType.METHOD:
writeMethodDescriptor(((MethodEncodedValue)encodedValue).getValue());
break;
case ValueType.TYPE:
writeType(((TypeEncodedValue)encodedValue).getValue());
break;
case ValueType.METHOD_TYPE:
writeMethodProtoDescriptor(((MethodTypeEncodedValue)encodedValue).getValue());
break;
case ValueType.METHOD_HANDLE:
writeMethodHandle(((MethodHandleEncodedValue)encodedValue).getValue());
break;
case ValueType.NULL:
writer.write("null");
break;
default:
throw new IllegalArgumentException("Unknown encoded value type");
}
} | @Test
public void testWriteEncodedValue_type() throws IOException {
DexFormattedWriter writer = new DexFormattedWriter(output);
writer.writeEncodedValue(new ImmutableTypeEncodedValue("Ltest/type;"));
Assert.assertEquals(
"Ltest/type;",
output.toString());
} |
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("type", type)
.add("key", key)
.add("value", value instanceof byte[] ? new ByteArraySizeHashPrinter((byte[]) value) : value)
.add("version", version)
.toString();
} | @Test
public void testToString() {
assertThat(stats1.toString(), is(stats1.toString()));
} |
public String toXmlPartial(Object domainObject) {
bombIf(!isAnnotationPresent(domainObject.getClass(), ConfigTag.class), () -> "Object " + domainObject + " does not have a ConfigTag");
Element element = elementFor(domainObject.getClass());
write(domainObject, element, configCache, registry);
if (isAnnotationPresent(domainObject.getClass(), ConfigCollection.class) && domainObject instanceof Collection) {
for (Object item : (Collection<?>) domainObject) {
if (isAnnotationPresent(item.getClass(), ConfigCollection.class) && item instanceof Collection) {
new ExplicitCollectionXmlFieldWithValue(domainObject.getClass(), null, (Collection<?>) item, configCache, registry).populate(element);
continue;
}
Element childElement = elementFor(item.getClass());
element.addContent(childElement);
write(item, childElement, configCache, registry);
}
}
try (ByteArrayOutputStream output = new ByteArrayOutputStream(32 * 1024)) {
XmlUtils.writeXml(element, output);
// FIXME the lack of charset here looks rather suspicious. But unclear how to fix without possible regressions.
// Related to similar issue in GoConfigMigration?
return output.toString();
} catch (IOException e) {
throw bomb("Unable to write xml to String");
}
} | @Test
public void shouldWriteEmptyOnCancelTaskWhenDefined() throws Exception {
String partial = """
<job name="functional">
<tasks>
<exec command="echo">
<oncancel />
</exec>
</tasks>
</job>""";
JobConfig jobConfig = xmlLoader.fromXmlPartial(partial, JobConfig.class);
assertThat(xmlWriter.toXmlPartial(jobConfig), is(partial));
} |
@DELETE
@Path("status")
public Response deleteStatusUpdate(
@HeaderParam(PeerEurekaNode.HEADER_REPLICATION) String isReplication,
@QueryParam("value") String newStatusValue,
@QueryParam("lastDirtyTimestamp") String lastDirtyTimestamp) {
try {
if (registry.getInstanceByAppAndId(app.getName(), id) == null) {
logger.warn("Instance not found: {}/{}", app.getName(), id);
return Response.status(Status.NOT_FOUND).build();
}
InstanceStatus newStatus = newStatusValue == null ? InstanceStatus.UNKNOWN : InstanceStatus.valueOf(newStatusValue);
boolean isSuccess = registry.deleteStatusOverride(app.getName(), id,
newStatus, lastDirtyTimestamp, "true".equals(isReplication));
if (isSuccess) {
logger.info("Status override removed: {} - {}", app.getName(), id);
return Response.ok().build();
} else {
logger.warn("Unable to remove status override: {} - {}", app.getName(), id);
return Response.serverError().build();
}
} catch (Throwable e) {
logger.error("Error removing instance's {} status override", id);
return Response.serverError().build();
}
} | @Test
public void testStatusOverrideDeleteReturnsNotFoundErrorCodeIfInstanceNotRegistered() throws Exception {
Response response = instanceResource.deleteStatusUpdate(InstanceStatus.OUT_OF_SERVICE.name(), "false", "0");
assertThat(response.getStatus(), is(equalTo(Status.NOT_FOUND.getStatusCode())));
} |
@Override
public boolean supportsCoreSQLGrammar() {
return false;
} | @Test
void assertSupportsCoreSQLGrammar() {
assertFalse(metaData.supportsCoreSQLGrammar());
} |
public static Optional<String> getTargetFieldName(final List<Field<?>> fields, Model model) {
return getTargetFields(fields, model).stream().map(KiePMMLNameOpType::getName).findFirst();
} | @Test
void getTargetFieldName() {
final String fieldName = "fieldName";
MiningField.UsageType usageType = MiningField.UsageType.ACTIVE;
MiningField miningField = getMiningField(fieldName, usageType);
final DataField dataField = getDataField(fieldName, OpType.CATEGORICAL, DataType.STRING);
final DataDictionary dataDictionary = new DataDictionary();
dataDictionary.addDataFields(dataField);
MiningSchema miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
final Model model = new RegressionModel();
model.setMiningSchema(miningSchema);
final List<Field<?>> fields = getFieldsFromDataDictionary(dataDictionary);
Optional<String> retrieved = org.kie.pmml.compiler.api.utils.ModelUtils.getTargetFieldName(fields, model);
assertThat(retrieved.isPresent()).isFalse();
usageType = MiningField.UsageType.PREDICTED;
miningField = getMiningField(fieldName, usageType);
miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
model.setMiningSchema(miningSchema);
retrieved = org.kie.pmml.compiler.api.utils.ModelUtils.getTargetFieldName(fields, model);
assertThat(retrieved.isPresent()).isTrue();
assertThat(retrieved.get()).isEqualTo(fieldName);
} |
@SuppressWarnings("unchecked")
@Override
public boolean setFlushListener(final CacheFlushListener<Windowed<K>, V> listener,
final boolean sendOldValues) {
final SessionStore<Bytes, byte[]> wrapped = wrapped();
if (wrapped instanceof CachedStateStore) {
return ((CachedStateStore<byte[], byte[]>) wrapped).setFlushListener(
record -> listener.apply(
record.withKey(SessionKeySchema.from(record.key(), serdes.keyDeserializer(), serdes.topic()))
.withValue(new Change<>(
record.value().newValue != null ? serdes.valueFrom(record.value().newValue) : null,
record.value().oldValue != null ? serdes.valueFrom(record.value().oldValue) : null,
record.value().isLatest
))
),
sendOldValues);
}
return false;
} | @Test
public void shouldNotSetFlushListenerOnWrappedNoneCachingStore() {
setUpWithoutContext();
assertFalse(store.setFlushListener(null, false));
} |
@Override
public Double getDoubleAndRemove(K name) {
return null;
} | @Test
public void testGetDoubleAndRemove() {
assertNull(HEADERS.getDoubleAndRemove("name1"));
} |
@Override
public Option<IndexedRecord> combineAndGetUpdateValue(IndexedRecord currentValue, Schema schema, Properties properties) throws IOException {
// Specific to Postgres: If the updated record has TOASTED columns,
// we will need to keep the previous value for those columns
// see https://debezium.io/documentation/reference/connectors/postgresql.html#postgresql-toasted-values
Option<IndexedRecord> insertOrDeleteRecord = super.combineAndGetUpdateValue(currentValue, schema, properties);
if (insertOrDeleteRecord.isPresent()) {
mergeToastedValuesIfPresent(insertOrDeleteRecord.get(), currentValue);
}
return insertOrDeleteRecord;
} | @Test
public void testMergeWithDelete() throws IOException {
GenericRecord deleteRecord = createRecord(2, Operation.DELETE, 100L);
PostgresDebeziumAvroPayload payload = new PostgresDebeziumAvroPayload(deleteRecord, 100L);
assertTrue(payload.isDeleted(avroSchema, new Properties()));
GenericRecord existingRecord = createRecord(2, Operation.UPDATE, 99L);
Option<IndexedRecord> mergedRecord = payload.combineAndGetUpdateValue(existingRecord, avroSchema);
// expect nothing to be committed to table
assertFalse(mergedRecord.isPresent());
GenericRecord lateRecord = createRecord(2, Operation.DELETE, 98L);
payload = new PostgresDebeziumAvroPayload(lateRecord, 98L);
mergedRecord = payload.combineAndGetUpdateValue(existingRecord, avroSchema);
validateRecord(mergedRecord, 2, Operation.UPDATE, 99L);
} |
@Override
public int getOrder() {
return PluginEnum.HYSTRIX.getCode();
} | @Test
public void testGetOrder() {
assertEquals(hystrixPlugin.getOrder(), PluginEnum.HYSTRIX.getCode());
} |
public JdbcUrl parse(final String jdbcUrl) {
Matcher matcher = CONNECTION_URL_PATTERN.matcher(jdbcUrl);
ShardingSpherePreconditions.checkState(matcher.matches(), () -> new UnrecognizedDatabaseURLException(jdbcUrl, CONNECTION_URL_PATTERN.pattern().replaceAll("%", "%%")));
String authority = matcher.group(AUTHORITY_GROUP_KEY);
ShardingSpherePreconditions.checkNotNull(authority, () -> new UnrecognizedDatabaseURLException(jdbcUrl, CONNECTION_URL_PATTERN.pattern().replaceAll("%", "%%")));
return new JdbcUrl(parseHostname(authority), parsePort(authority), matcher.group(PATH_GROUP_KEY), parseQueryProperties(matcher.group(QUERY_GROUP_KEY)));
} | @Test
void assertParseIncorrectURL() {
assertThrows(UnrecognizedDatabaseURLException.class, () -> new StandardJdbcUrlParser().parse("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MySQL"));
} |
@Override
public void iterateStatus(AlluxioURI path, ListStatusPOptions options,
Consumer<? super URIStatus> action)
throws FileDoesNotExistException, IOException, AlluxioException {
if (options.getRecursive()) {
// Do not cache results of recursive list status,
// because some results might be cached multiple times.
// Otherwise, needs more complicated logic inside the cache,
// that might not worth the effort of caching.
mDelegatedFileSystem.iterateStatus(path, options, action);
return;
}
List<URIStatus> cachedStatuses = mMetadataCache.listStatus(path);
if (cachedStatuses == null) {
List<URIStatus> statuses = new ArrayList<>();
mDelegatedFileSystem.iterateStatus(path, options, status -> {
statuses.add(status);
action.accept(status);
});
mMetadataCache.put(path, statuses);
return;
}
cachedStatuses.forEach(action);
} | @Test
public void iterateStatusRecursive() throws Exception {
mFs.iterateStatus(DIR, LIST_STATUS_OPTIONS.toBuilder().setRecursive(true).build(), ignored -> {
});
assertEquals(1, mRpcCountingFs.listStatusRpcCount(DIR));
mFs.iterateStatus(DIR, LIST_STATUS_OPTIONS.toBuilder().setRecursive(true).build(), ignored -> {
});
assertEquals(2, mRpcCountingFs.listStatusRpcCount(DIR));
} |
@Override
public void execute(Context context) {
try (CloseableIterator<DefaultIssue> issues = protoIssueCache.traverse()) {
while (issues.hasNext()) {
DefaultIssue issue = issues.next();
if (shouldUpdateIndexForIssue(issue)) {
changedIssuesRepository.addIssueKey(issue.key());
}
}
}
} | @Test
public void execute_whenIssueIssCopied_shouldLoadIssue() {
protoIssueCache.newAppender()
.append(newDefaultIssue().setCopied(true))
.close();
underTest.execute(mock(ComputationStep.Context.class));
verify(changedIssuesRepository).addIssueKey("issueKey1");
} |
public static SeaTunnelDataType<?> covertHiveTypeToSeaTunnelType(String name, String hiveType) {
if (hiveType.contains("varchar")) {
return BasicType.STRING_TYPE;
}
if (hiveType.contains("char")) {
throw CommonError.convertToSeaTunnelTypeError(
HiveConstants.CONNECTOR_NAME, PluginType.SOURCE, hiveType, name);
}
if (hiveType.contains("binary")) {
return PrimitiveByteArrayType.INSTANCE;
}
if (hiveType.contains("struct")) {
LinkedHashMap<String, Object> fields = new LinkedHashMap<>();
int start = hiveType.indexOf("<");
int end = hiveType.lastIndexOf(">");
String[] columns = hiveType.substring(start + 1, end).split(",");
for (String column : columns) {
String[] splits = column.split(":");
fields.put(
splits[0], covertHiveTypeToSeaTunnelType(splits[0], splits[1]).toString());
}
return SeaTunnelDataTypeConvertorUtil.deserializeSeaTunnelDataType(
name, JsonUtils.toJsonString(fields));
}
return SeaTunnelDataTypeConvertorUtil.deserializeSeaTunnelDataType(name, hiveType);
} | @Test
void convertHiveStructType() {
SeaTunnelDataType<?> structType =
HiveTypeConvertor.covertHiveTypeToSeaTunnelType(
"structType", "struct<country:String,city:String>");
assertEquals(SqlType.ROW, structType.getSqlType());
SeaTunnelRowType seaTunnelRowType = (SeaTunnelRowType) structType;
assertEquals(BasicType.STRING_TYPE, seaTunnelRowType.getFieldType(0));
assertEquals(BasicType.STRING_TYPE, seaTunnelRowType.getFieldType(0));
} |
public static Optional<Integer> getPreviousSequence(final List<Integer> sequences, final int currentSequence) {
if (sequences.size() <= 1) {
return Optional.empty();
}
sequences.sort(Integer::compareTo);
Integer index = null;
for (int i = 0; i < sequences.size(); i++) {
if (sequences.get(i) == currentSequence) {
index = i;
break;
}
}
if (null == index) {
return Optional.empty();
}
return Optional.of(index >= 1 ? sequences.get(index - 1) : MAX_SEQUENCE);
} | @Test
void assertGetPreviousSequence() {
List<Integer> sequences = Arrays.asList(2, 3, 1);
Optional<Integer> previousSequence = ConsistencyCheckSequence.getPreviousSequence(sequences, 3);
assertTrue(previousSequence.isPresent());
assertThat(previousSequence.get(), is(2));
previousSequence = ConsistencyCheckSequence.getPreviousSequence(sequences, 2);
assertTrue(previousSequence.isPresent());
assertThat(previousSequence.get(), is(1));
previousSequence = ConsistencyCheckSequence.getPreviousSequence(sequences, 1);
assertTrue(previousSequence.isPresent());
assertThat(previousSequence.get(), is(3));
previousSequence = ConsistencyCheckSequence.getPreviousSequence(sequences, 4);
assertFalse(previousSequence.isPresent());
} |
public static int read(
final UnsafeBuffer termBuffer,
final int termOffset,
final FragmentHandler handler,
final int fragmentsLimit,
final Header header,
final ErrorHandler errorHandler,
final long currentPosition,
final Position subscriberPosition)
{
int fragmentsRead = 0;
int offset = termOffset;
final int capacity = termBuffer.capacity();
header.buffer(termBuffer);
try
{
while (fragmentsRead < fragmentsLimit && offset < capacity)
{
final int frameLength = frameLengthVolatile(termBuffer, offset);
if (frameLength <= 0)
{
break;
}
final int frameOffset = offset;
offset += BitUtil.align(frameLength, FRAME_ALIGNMENT);
if (!isPaddingFrame(termBuffer, frameOffset))
{
++fragmentsRead;
header.offset(frameOffset);
handler.onFragment(termBuffer, frameOffset + HEADER_LENGTH, frameLength - HEADER_LENGTH, header);
}
}
}
catch (final Exception ex)
{
errorHandler.onError(ex);
}
finally
{
final long newPosition = currentPosition + (offset - termOffset);
if (newPosition > currentPosition)
{
subscriberPosition.setOrdered(newPosition);
}
}
return fragmentsRead;
} | @Test
void shouldNotReadPastTail()
{
final int termOffset = 0;
final int readOutcome = TermReader.read(
termBuffer, termOffset, handler, Integer.MAX_VALUE, header, errorHandler, 0, subscriberPosition);
assertEquals(0, readOutcome);
verify(subscriberPosition, never()).setOrdered(anyLong());
verify(termBuffer).getIntVolatile(0);
verify(handler, never()).onFragment(any(), anyInt(), anyInt(), any());
} |
public void ready() {
sync.releaseShared(UNUSED);
} | @Test
public void testAwaitAfterReady() throws InterruptedException {
StartingGun sg = new StartingGun();
sg.ready();
Thread[] threads = startWaitingThreads(sg);
allThreadsDead(threads);
} |
public boolean transitionToFailed(Throwable throwable)
{
requireNonNull(throwable, "throwable is null");
failureCause.compareAndSet(null, Failures.toFailure(throwable));
boolean failed = state.setIf(FAILED, currentState -> !currentState.isDone());
if (failed) {
log.error(throwable, "Stage execution %s failed", stageExecutionId);
}
else {
log.debug(throwable, "Failure after stage execution %s finished", stageExecutionId);
}
return failed;
} | @Test
public void testFailed()
{
StageExecutionStateMachine stateMachine = createStageStateMachine();
assertTrue(stateMachine.transitionToFailed(FAILED_CAUSE));
assertFinalState(stateMachine, StageExecutionState.FAILED);
} |
@Override
public StoredEntityListPreferences get(final StoredEntityListPreferencesId preferencesId) {
return this.db.findOneById(preferencesId);
} | @Test
public void returnsNullWhenFetchingPreferenceFromEmptyDB() {
final StoredEntityListPreferences storedEntityListPreferences = toTest.get(wrongId);
assertNull(storedEntityListPreferences);
} |
@Override
public void write(final PostgreSQLPacketPayload payload, final Object value) {
throw new UnsupportedSQLOperationException("PostgreSQLInt4ArrayBinaryProtocolValue.write()");
} | @Test
void assertWrite() {
assertThrows(UnsupportedSQLOperationException.class, () -> newInstance().write(new PostgreSQLPacketPayload(null, StandardCharsets.UTF_8), "val"));
} |
public Strength getScore(final String password) {
if(StringUtils.isEmpty(password)) {
return Strength.veryweak;
}
else {
final int score = zxcvbn.measure(password, Collections.singletonList(
PreferencesFactory.get().getProperty("application.name"))).getScore();
switch(score) {
case 0:
return Strength.veryweak;
case 1:
return Strength.weak;
case 2:
return Strength.fair;
case 3:
return Strength.strong;
case 4:
default:
return Strength.verystrong;
}
}
} | @Test
public void testGetScore() {
assertEquals(PasswordStrengthValidator.Strength.veryweak, new PasswordStrengthValidator().getScore(""));
assertEquals(PasswordStrengthValidator.Strength.veryweak, new PasswordStrengthValidator().getScore("Cyberduck"));
assertEquals(PasswordStrengthValidator.Strength.verystrong, new PasswordStrengthValidator().getScore("ahvae7faY3ae"));
} |
@Override
public String id() {
return blob.getGeneratedId();
} | @Test
public void testId() {
String id = "test-id";
when(blob.getGeneratedId()).thenReturn(id);
assertThat(artifact.id()).isEqualTo(id);
} |
@Override
public ResourceAllocationResult tryFulfillRequirements(
Map<JobID, Collection<ResourceRequirement>> missingResources,
TaskManagerResourceInfoProvider taskManagerResourceInfoProvider,
BlockedTaskManagerChecker blockedTaskManagerChecker) {
final ResourceAllocationResult.Builder resultBuilder = ResourceAllocationResult.builder();
final List<InternalResourceInfo> registeredResources =
getAvailableResources(
taskManagerResourceInfoProvider, resultBuilder, blockedTaskManagerChecker);
final List<InternalResourceInfo> pendingResources =
getPendingResources(taskManagerResourceInfoProvider, resultBuilder);
ResourceProfile totalCurrentResources =
Stream.concat(registeredResources.stream(), pendingResources.stream())
.map(internalResourceInfo -> internalResourceInfo.totalProfile)
.reduce(ResourceProfile.ZERO, ResourceProfile::merge);
for (Map.Entry<JobID, Collection<ResourceRequirement>> resourceRequirements :
missingResources.entrySet()) {
final JobID jobId = resourceRequirements.getKey();
final Collection<ResourceRequirement> unfulfilledJobRequirements =
tryFulfillRequirementsForJobWithResources(
jobId, resourceRequirements.getValue(), registeredResources);
if (!unfulfilledJobRequirements.isEmpty()) {
totalCurrentResources =
totalCurrentResources.merge(
tryFulfillRequirementsForJobWithPendingResources(
jobId,
unfulfilledJobRequirements,
pendingResources,
resultBuilder));
}
}
// Unlike tryFulfillRequirementsForJobWithPendingResources, which updates pendingResources
// to the latest state after a new PendingTaskManager is created,
// tryFulFillRequiredResources will not update pendingResources even after new
// PendingTaskManagers are created.
// This is because the pendingResources are no longer needed afterward.
tryFulFillRequiredResources(
registeredResources, pendingResources, totalCurrentResources, resultBuilder);
return resultBuilder.build();
} | @Test
void testFulfillRequirementWithRegisteredResourcesEvenly() {
final TaskManagerInfo taskManager1 =
new TestingTaskManagerInfo(
DEFAULT_SLOT_RESOURCE.multiply(10),
DEFAULT_SLOT_RESOURCE.multiply(10),
DEFAULT_SLOT_RESOURCE);
final TaskManagerInfo taskManager2 =
new TestingTaskManagerInfo(
DEFAULT_SLOT_RESOURCE.multiply(10),
DEFAULT_SLOT_RESOURCE.multiply(10),
DEFAULT_SLOT_RESOURCE);
final TaskManagerInfo taskManager3 =
new TestingTaskManagerInfo(
DEFAULT_SLOT_RESOURCE.multiply(10),
DEFAULT_SLOT_RESOURCE.multiply(10),
DEFAULT_SLOT_RESOURCE);
final JobID jobId = new JobID();
final List<ResourceRequirement> requirements = new ArrayList<>();
final ResourceProfile largeResource = DEFAULT_SLOT_RESOURCE.multiply(5);
final TaskManagerResourceInfoProvider taskManagerResourceInfoProvider =
TestingTaskManagerResourceInfoProvider.newBuilder()
.setRegisteredTaskManagersSupplier(
() -> Arrays.asList(taskManager1, taskManager2, taskManager3))
.build();
requirements.add(ResourceRequirement.create(largeResource, 4));
requirements.add(ResourceRequirement.create(ResourceProfile.UNKNOWN, 2));
final ResourceAllocationResult result =
EVENLY_STRATEGY.tryFulfillRequirements(
Collections.singletonMap(jobId, requirements),
taskManagerResourceInfoProvider,
resourceID -> false);
assertThat(result.getUnfulfillableJobs()).isEmpty();
assertThat(result.getAllocationsOnPendingResources()).isEmpty();
assertThat(result.getPendingTaskManagersToAllocate()).isEmpty();
assertThat(result.getAllocationsOnRegisteredResources().get(jobId).values())
.allSatisfy(
resourceCounter ->
assertThat(resourceCounter.getTotalResourceCount()).isEqualTo(2));
assertThat(result.getAllocationsOnRegisteredResources().get(jobId).values())
.allSatisfy(
resourceCounter ->
assertThat(resourceCounter.containsResource(largeResource))
.isTrue());
} |
public final synchronized List<E> getAllAddOns() {
Logger.d(mTag, "getAllAddOns has %d add on for %s", mAddOns.size(), getClass().getName());
if (mAddOns.size() == 0) {
loadAddOns();
}
Logger.d(
mTag, "getAllAddOns will return %d add on for %s", mAddOns.size(), getClass().getName());
return unmodifiableList(mAddOns);
} | @Test(expected = IllegalStateException.class)
public void testMustSupplyNoneEmptyBuiltIns() throws Exception {
AddOnsFactory.SingleAddOnsFactory<TestAddOn> singleAddOnsFactory =
new AddOnsFactory.SingleAddOnsFactory<>(
getApplicationContext(),
SharedPrefsHelper.getSharedPreferences(),
"ASK_KT",
"com.anysoftkeyboard.plugin.TEST",
"com.anysoftkeyboard.plugindata.TEST",
"TestAddOns",
"TestAddOn",
"test",
R.xml.test_add_ons_empty,
R.string.test_default_test_addon_id,
true,
true) {
@Override
public void setAddOnEnabled(String addOnId, boolean enabled) {}
@Override
protected TestAddOn createConcreteAddOn(
Context askContext,
Context context,
int apiVersion,
CharSequence prefId,
CharSequence name,
CharSequence description,
boolean isHidden,
int sortIndex,
AttributeSet attrs) {
return null;
}
};
Assert.assertNotNull(singleAddOnsFactory.getAllAddOns());
} |
public static int checkPositiveOrZero(int i, String name) {
if (i < INT_ZERO) {
throw new IllegalArgumentException(name + " : " + i + " (expected: >= 0)");
}
return i;
} | @Test
public void testCheckPositiveOrZeroIntString() {
Exception actualEx = null;
try {
ObjectUtil.checkPositiveOrZero(POS_ONE_INT, NUM_POS_NAME);
} catch (Exception e) {
actualEx = e;
}
assertNull(actualEx, TEST_RESULT_NULLEX_NOK);
actualEx = null;
try {
ObjectUtil.checkPositiveOrZero(ZERO_INT, NUM_ZERO_NAME);
} catch (Exception e) {
actualEx = e;
}
assertNull(actualEx, TEST_RESULT_NULLEX_NOK);
actualEx = null;
try {
ObjectUtil.checkPositiveOrZero(NEG_ONE_INT, NUM_NEG_NAME);
} catch (Exception e) {
actualEx = e;
}
assertNotNull(actualEx, TEST_RESULT_NULLEX_OK);
assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK);
} |
@Override
public Optional<DevOpsProjectCreator> getDevOpsProjectCreator(DbSession dbSession, Map<String, String> characteristics) {
String githubApiUrl = characteristics.get(DEVOPS_PLATFORM_URL);
String githubRepository = characteristics.get(DEVOPS_PLATFORM_PROJECT_IDENTIFIER);
if (githubApiUrl == null || githubRepository == null) {
return Optional.empty();
}
DevOpsProjectDescriptor devOpsProjectDescriptor = new DevOpsProjectDescriptor(ALM.GITHUB, githubApiUrl, githubRepository, null);
return dbClient.almSettingDao().selectByAlm(dbSession, ALM.GITHUB).stream()
.filter(almSettingDto -> devOpsProjectDescriptor.url().equals(almSettingDto.getUrl()))
.map(almSettingDto -> findInstallationIdAndCreateDevOpsProjectCreator(devOpsProjectDescriptor, almSettingDto))
.flatMap(Optional::stream)
.findFirst();
} | @Test
public void getDevOpsProjectCreatorFromImport_shouldInstantiateDevOpsProjectCreator() {
AlmSettingDto mockAlmSettingDto = mockAlmSettingDto(true);
mockAlmPatDto(mockAlmSettingDto);
mockSuccessfulGithubInteraction();
when(devOpsProjectService.create(mockAlmSettingDto, GITHUB_PROJECT_DESCRIPTOR)).thenReturn(DEV_OPS_PROJECT);
DevOpsProjectCreator devOpsProjectCreator = githubProjectCreatorFactory.getDevOpsProjectCreator(mockAlmSettingDto, GITHUB_PROJECT_DESCRIPTOR).orElseThrow();
GithubProjectCreator expectedGithubProjectCreator = getExpectedGithubProjectCreator(false);
assertThat(devOpsProjectCreator).usingRecursiveComparison().isEqualTo(expectedGithubProjectCreator);
} |
@VisibleForTesting
void checkSourceFileField( String sourceFilenameFieldName, SFTPPutData data ) throws KettleStepException {
// Sourcefilename field
sourceFilenameFieldName = environmentSubstitute( sourceFilenameFieldName );
if ( Utils.isEmpty( sourceFilenameFieldName ) ) {
// source filename field is missing
throw new KettleStepException( BaseMessages.getString( PKG, "SFTPPut.Error.SourceFileNameFieldMissing" ) );
}
data.indexOfSourceFileFieldName = getInputRowMeta().indexOfValue( sourceFilenameFieldName );
if ( data.indexOfSourceFileFieldName == -1 ) {
// source filename field is missing
throw new KettleStepException( BaseMessages.getString(
PKG, "SFTPPut.Error.CanNotFindField", sourceFilenameFieldName ) );
}
} | @Test( expected = KettleStepException.class )
public void checkSourceFileField_NameIsSet_NotFound() throws Exception {
step.setInputRowMeta( new RowMeta() );
step.checkSourceFileField( "sourceFile", new SFTPPutData() );
} |
public static MacAddress valueOf(final String address) {
if (!isValid(address)) {
throw new IllegalArgumentException(
"Specified MAC Address must contain 12 hex digits"
+ " separated pairwise by :'s.");
}
final String[] elements = address.split(":");
final byte[] addressInBytes = new byte[MacAddress.MAC_ADDRESS_LENGTH];
for (int i = 0; i < MacAddress.MAC_ADDRESS_LENGTH; i++) {
final String element = elements[i];
addressInBytes[i] = (byte) Integer.parseInt(element, 16);
}
return new MacAddress(addressInBytes);
} | @Test(expected = IllegalArgumentException.class)
public void testValueOfInvalidByte() throws Exception {
MacAddress.valueOf(INVALID_BYTE);
} |
@Override
public Sensor addLatencyRateTotalSensor(final String scopeName,
final String entityName,
final String operationName,
final Sensor.RecordingLevel recordingLevel,
final String... tags) {
final String threadId = Thread.currentThread().getName();
final String group = groupNameFromScope(scopeName);
final Map<String, String> tagMap = customizedTags(threadId, scopeName, entityName, tags);
final Sensor sensor =
customInvocationRateAndCountSensor(threadId, group, entityName, operationName, tagMap, recordingLevel);
addAvgAndMaxToSensor(
sensor,
group,
tagMap,
operationName + LATENCY_SUFFIX,
AVG_LATENCY_DESCRIPTION + operationName,
MAX_LATENCY_DESCRIPTION + operationName
);
return sensor;
} | @Test
public void shouldThrowIfLatencyRateTotalSensorIsAddedWithOddTags() {
final IllegalArgumentException exception = assertThrows(
IllegalArgumentException.class,
() -> streamsMetrics.addLatencyRateTotalSensor(
SCOPE_NAME,
ENTITY_NAME,
OPERATION_NAME,
RecordingLevel.DEBUG,
"bad-tag")
);
assertThat(exception.getMessage(), is("Tags needs to be specified in key-value pairs"));
} |
@Override
public Set<String> findClassNames(String pluginId) {
Set<String> classNames = getEntries().get(pluginId);
if (classNames == null) {
return Collections.emptySet();
}
return classNames;
} | @Test
public void testFindClassNames() {
ExtensionFinder instance = new AbstractExtensionFinder(pluginManager) {
@Override
public Map<String, Set<String>> readPluginsStorages() {
Map<String, Set<String>> entries = new LinkedHashMap<>();
Set<String> bucket = new HashSet<>();
bucket.add("org.pf4j.plugin.TestExtension");
entries.put("plugin1", bucket);
return entries;
}
@Override
public Map<String, Set<String>> readClasspathStorages() {
Map<String, Set<String>> entries = new LinkedHashMap<>();
Set<String> bucket = new HashSet<>();
bucket.add("org.pf4j.plugin.TestExtension");
bucket.add("org.pf4j.plugin.FailTestExtension");
entries.put(null, bucket);
return entries;
}
};
Set<String> result = instance.findClassNames(null);
assertEquals(2, result.size());
result = instance.findClassNames("plugin1");
assertEquals(1, result.size());
} |
public boolean containsDataSource() {
return !resourceMetaData.getStorageUnits().isEmpty();
} | @Test
void assertNotContainsDataSource() {
ResourceMetaData resourceMetaData = new ResourceMetaData(Collections.emptyMap());
RuleMetaData ruleMetaData = new RuleMetaData(Collections.singleton(mock(ShardingSphereRule.class)));
assertFalse(new ShardingSphereDatabase("foo_db", mock(DatabaseType.class), resourceMetaData, ruleMetaData, Collections.emptyMap()).containsDataSource());
} |
@Override
public ObjectNode encode(Criterion criterion, CodecContext context) {
EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context);
return encoder.encode();
} | @Test
public void matchUdpDstMaskedTest() {
Criterion criterion = Criteria.matchUdpDstMasked(tpPort, tpPortMask);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
} |
@PostConstruct
public void applyPluginMetadata() {
if (taskPreference() != null) {
for (ConfigurationProperty configurationProperty : configuration) {
if (isValidPluginConfiguration(configurationProperty.getConfigKeyName())) {
Boolean isSecure = pluginConfigurationFor(configurationProperty.getConfigKeyName()).getOption(Property.SECURE);
configurationProperty.handleSecureValueConfiguration(isSecure);
}
}
}
} | @Test
public void postConstructShouldDoNothingForAInvalidConfigurationProperty() throws Exception {
TaskPreference taskPreference = mock(TaskPreference.class);
ConfigurationProperty configurationProperty = ConfigurationPropertyMother.create("KEY1");
Configuration configuration = new Configuration(configurationProperty);
PluggableTaskConfigStore.store().setPreferenceFor("abc.def", taskPreference);
TaskConfig taskConfig = new TaskConfig();
when(taskPreference.getConfig()).thenReturn(taskConfig);
PluggableTask task = new PluggableTask(new PluginConfiguration("abc.def", "1"), configuration);
assertFalse(configurationProperty.isSecure());
task.applyPluginMetadata();
assertFalse(configurationProperty.isSecure());
} |
@Override
public void execute(SensorContext context) {
for (InputFile file : context.fileSystem().inputFiles(context.fileSystem().predicates().hasLanguages(Xoo.KEY))) {
processSignificantCodeFile(file, context);
}
} | @Test
public void testNoExceptionIfNoFileWithOffsets() {
context.fileSystem().add(inputFile);
sensor.execute(context);
} |
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof MultiValuedTimestamp)) {
return false;
}
MultiValuedTimestamp that = (MultiValuedTimestamp) obj;
return Objects.equals(this.value1, that.value1) &&
Objects.equals(this.value2, that.value2);
} | @Test
public void testEquals() {
new EqualsTester()
.addEqualityGroup(stats1, stats1)
.addEqualityGroup(stats2)
.testEquals();
} |
public DrlxParseResult drlxParse(Class<?> patternType, String bindingId, String expression) {
return drlxParse(patternType, bindingId, expression, false);
} | @Test
public void testNullSafeExpressionsWithIn() {
SingleDrlxParseSuccess result = (SingleDrlxParseSuccess) parser.drlxParse(Person.class, "$p", "address!.city in (\"Milan\", \"Tokyo\")");
List<Expression> nullSafeExpressions = result.getNullSafeExpressions();
assertThat(nullSafeExpressions).hasSize(1);
assertThat(nullSafeExpressions.get(0).toString()).isEqualTo("_this.getAddress() != null");
// null check is done after the first constraint
assertThat(result.getExpr().toString()).isEqualTo("D.eval(org.drools.model.operators.InOperator.INSTANCE, _this.getAddress().getCity(), \"Milan\", \"Tokyo\")");
} |
@Override
public void reconcileExecutionDeployments(
ResourceID taskExecutorHost,
ExecutionDeploymentReport executionDeploymentReport,
Map<ExecutionAttemptID, ExecutionDeploymentState> expectedDeployedExecutions) {
final Set<ExecutionAttemptID> unknownExecutions =
new HashSet<>(executionDeploymentReport.getExecutions());
final Set<ExecutionAttemptID> missingExecutions = new HashSet<>();
for (Map.Entry<ExecutionAttemptID, ExecutionDeploymentState> execution :
expectedDeployedExecutions.entrySet()) {
boolean deployed = unknownExecutions.remove(execution.getKey());
if (!deployed && execution.getValue() != ExecutionDeploymentState.PENDING) {
missingExecutions.add(execution.getKey());
}
}
if (!unknownExecutions.isEmpty()) {
handler.onUnknownDeploymentsOf(unknownExecutions, taskExecutorHost);
}
if (!missingExecutions.isEmpty()) {
handler.onMissingDeploymentsOf(missingExecutions, taskExecutorHost);
}
} | @Test
void testPendingDeployments() {
TestingExecutionDeploymentReconciliationHandler handler =
new TestingExecutionDeploymentReconciliationHandler();
DefaultExecutionDeploymentReconciler reconciler =
new DefaultExecutionDeploymentReconciler(handler);
ResourceID resourceId = generate();
ExecutionAttemptID matchingId = createExecutionAttemptId();
ExecutionAttemptID unknownId = createExecutionAttemptId();
ExecutionAttemptID missingId = createExecutionAttemptId();
reconciler.reconcileExecutionDeployments(
resourceId,
new ExecutionDeploymentReport(new HashSet<>(Arrays.asList(matchingId, unknownId))),
Stream.of(matchingId, missingId)
.collect(Collectors.toMap(x -> x, x -> ExecutionDeploymentState.PENDING)));
assertThat(handler.getMissingExecutions()).isEmpty();
assertThat(handler.getUnknownExecutions()).contains(unknownId);
} |
@VisibleForTesting
static RowGroup createRowGroup(int groupId, long rowsInStripe, long rowsInRowGroup, Map<StreamId, List<RowGroupIndex>> columnIndexes, Map<StreamId, ValueInputStream<?>> valueStreams, Map<StreamId, StreamCheckpoint> checkpoints)
{
long totalRowGroupBytes = columnIndexes
.values()
.stream()
.mapToLong(e -> e.get(groupId)
.getColumnStatistics()
.getTotalValueSizeInBytes())
.sum();
long rowOffset = multiplyExact(groupId, rowsInRowGroup);
int rowCount = toIntExact(Math.min(rowsInStripe - rowOffset, rowsInRowGroup));
ImmutableMap.Builder<StreamId, InputStreamSource<?>> builder = ImmutableMap.builder();
for (Entry<StreamId, StreamCheckpoint> entry : checkpoints.entrySet()) {
StreamId streamId = entry.getKey();
StreamCheckpoint checkpoint = entry.getValue();
// skip streams without data
ValueInputStream<?> valueStream = valueStreams.get(streamId);
if (valueStream == null) {
continue;
}
builder.put(streamId, createCheckpointStreamSource(valueStream, checkpoint));
}
InputStreamSources rowGroupStreams = new InputStreamSources(builder.build());
return new RowGroup(groupId, rowOffset, rowCount, totalRowGroupBytes, rowGroupStreams);
} | @Test(expectedExceptions = ArithmeticException.class)
public void testRowGroupOverflow()
{
StripeReader.createRowGroup(Integer.MAX_VALUE, Long.MAX_VALUE, Long.MAX_VALUE, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of());
} |
@Override
public void handlerPlugin(final PluginData pluginData) {
if (Objects.nonNull(pluginData) && Boolean.TRUE.equals(pluginData.getEnabled())) {
//init redis
RedisConfigProperties redisConfigProperties = GsonUtils.getInstance().fromJson(pluginData.getConfig(), RedisConfigProperties.class);
//spring data redisTemplate
if (Objects.isNull(Singleton.INST.get(ReactiveRedisTemplate.class))
|| Objects.isNull(Singleton.INST.get(RedisConfigProperties.class))
|| !redisConfigProperties.equals(Singleton.INST.get(RedisConfigProperties.class))) {
final RedisConnectionFactory redisConnectionFactory = new RedisConnectionFactory(redisConfigProperties);
ReactiveRedisTemplate<String, String> reactiveRedisTemplate = new ShenyuReactiveRedisTemplate<>(
redisConnectionFactory.getLettuceConnectionFactory(),
ShenyuRedisSerializationContext.stringSerializationContext());
Singleton.INST.single(ReactiveRedisTemplate.class, reactiveRedisTemplate);
Singleton.INST.single(RedisConfigProperties.class, redisConfigProperties);
}
}
} | @Test
public void handlerPluginTest() {
RedisConfigProperties redisConfigProperties = generateRedisConfig(generateDefaultUrl());
PluginData pluginData = new PluginData();
pluginData.setEnabled(true);
pluginData.setConfig(GsonUtils.getInstance().toJson(redisConfigProperties));
new RateLimiterPluginDataHandler().handlerPlugin(pluginData);
assertEquals(redisConfigProperties.getUrl(), Singleton.INST.get(RedisConfigProperties.class).getUrl());
assertNotNull(Singleton.INST.get(ReactiveRedisTemplate.class));
} |
public Map<String, Map<String, String>> getConfigAsMap() {
Map<String, Map<String, String>> configMap = new HashMap<>();
for (ConfigurationProperty property : configuration) {
Map<String, String> mapValue = new HashMap<>();
mapValue.put(VALUE_KEY, property.getValue());
if (!property.errors().isEmpty()) {
mapValue.put(ERRORS_KEY, StringUtils.join(property.errors().getAll(), ", "));
}
configMap.put(property.getConfigKeyName(), mapValue);
}
return configMap;
} | @Test
void shouldGetConfigAsMap() throws Exception {
PluginConfiguration pluginConfiguration = new PluginConfiguration("test-plugin-id", "13.4");
GoCipher cipher = new GoCipher();
List<String> keys = List.of("Avengers 1", "Avengers 2", "Avengers 3", "Avengers 4");
List<String> values = List.of("Iron man", "Hulk", "Thor", "Captain America");
Configuration configuration = new Configuration(
new ConfigurationProperty(new ConfigurationKey(keys.get(0)), new ConfigurationValue(values.get(0))),
new ConfigurationProperty(new ConfigurationKey(keys.get(1)), new ConfigurationValue(values.get(1))),
new ConfigurationProperty(new ConfigurationKey(keys.get(2)), new ConfigurationValue(values.get(2))),
new ConfigurationProperty(new ConfigurationKey(keys.get(3)), new ConfigurationValue(values.get(3)),
new EncryptedConfigurationValue(cipher.encrypt(values.get(3))), cipher));
SCM scm = new SCM("scm-id", pluginConfiguration, configuration);
Map<String, Map<String, String>> configMap = scm.getConfigAsMap();
assertThat(configMap.keySet().size()).isEqualTo(keys.size());
assertThat(configMap.values().size()).isEqualTo(values.size());
assertThat(configMap.keySet().containsAll(keys)).isTrue();
for (int i = 0; i < keys.size(); i++) {
assertThat(configMap.get(keys.get(i)).get(SCM.VALUE_KEY)).isEqualTo(values.get(i));
}
} |
public static Metric metric(String name) {
return MetricsImpl.metric(name, Unit.COUNT);
} | @Test
public void metricsDisabled() {
Long[] input = {0L, 1L, 2L, 3L, 4L};
pipeline.readFrom(TestSources.items(input))
.map(l -> {
Metrics.metric("mapped").increment();
Metrics.metric("total", Unit.COUNT).set(input.length);
return l;
})
.writeTo(Sinks.noop());
Job job = instance.getJet().newJob(pipeline, new JobConfig().setMetricsEnabled(false));
job.join();
JobMetrics metrics = job.getMetrics();
assertTrue(metrics.get("mapped").isEmpty());
assertTrue(metrics.get("total").isEmpty());
} |
@Override
protected String selectorHandler(final MetaDataRegisterDTO metaDataDTO) {
return "";
} | @Test
public void testSelectorHandler() {
MetaDataRegisterDTO metaDataRegisterDTO = MetaDataRegisterDTO.builder().build();
assertEquals(StringUtils.EMPTY, shenyuClientRegisterDivideService.selectorHandler(metaDataRegisterDTO));
} |
@Override
public Object handle(ProceedingJoinPoint proceedingJoinPoint, RateLimiter rateLimiter,
String methodName) throws Throwable {
RateLimiterOperator<?> rateLimiterOperator = RateLimiterOperator.of(rateLimiter);
Object returnValue = proceedingJoinPoint.proceed();
return executeRxJava2Aspect(rateLimiterOperator, returnValue);
} | @Test
public void testReactorTypes() throws Throwable {
RateLimiter rateLimiter = RateLimiter.ofDefaults("test");
when(proceedingJoinPoint.proceed()).thenReturn(Single.just("Test"));
assertThat(
rxJava2RateLimiterAspectExt.handle(proceedingJoinPoint, rateLimiter, "testMethod"))
.isNotNull();
when(proceedingJoinPoint.proceed()).thenReturn(Flowable.just("Test"));
assertThat(
rxJava2RateLimiterAspectExt.handle(proceedingJoinPoint, rateLimiter, "testMethod"))
.isNotNull();
when(proceedingJoinPoint.proceed()).thenReturn(Completable.complete());
assertThat(
rxJava2RateLimiterAspectExt.handle(proceedingJoinPoint, rateLimiter, "testMethod"))
.isNotNull();
when(proceedingJoinPoint.proceed()).thenReturn(Maybe.just("Test"));
assertThat(
rxJava2RateLimiterAspectExt.handle(proceedingJoinPoint, rateLimiter, "testMethod"))
.isNotNull();
when(proceedingJoinPoint.proceed()).thenReturn(Observable.just("Test"));
assertThat(
rxJava2RateLimiterAspectExt.handle(proceedingJoinPoint, rateLimiter, "testMethod"))
.isNotNull();
} |
public static Path getStagingDir(Cluster cluster, Configuration conf)
throws IOException, InterruptedException {
UserGroupInformation user = UserGroupInformation.getLoginUser();
return getStagingDir(cluster, conf, user);
} | @Test(expected = IOException.class)
public void testGetStagingWhenFileOwnerNameAndCurrentUserNameDoesNotMatch()
throws IOException, InterruptedException {
Cluster cluster = mock(Cluster.class);
Configuration conf = new Configuration();
String stagingDirOwner = "someuser";
Path stagingPath = mock(Path.class);
UserGroupInformation user = UserGroupInformation
.createUserForTesting(USER_1, GROUP_NAMES);
assertEquals(USER_1, user.getUserName());
FileSystem fs = new FileSystemTestHelper.MockFileSystem();
FileStatus fileStatus = new FileStatus(1, true, 1, 1, 100L, 100L,
FsPermission.getDefault(), stagingDirOwner, stagingDirOwner,
stagingPath);
when(stagingPath.getFileSystem(conf)).thenReturn(fs);
when(fs.getFileStatus(stagingPath)).thenReturn(fileStatus);
when(cluster.getStagingAreaDir()).thenReturn(stagingPath);
assertEquals(stagingPath,
JobSubmissionFiles.getStagingDir(cluster, conf, user));
} |
public synchronized <U> Versioned<U> map(Function<V, U> transformer) {
return new Versioned<>(value != null ? transformer.apply(value) : null, version, creationTime);
} | @Test
public void testMap() {
Versioned<String> tempObj = stats1.map(VersionedTest::transform);
assertThat(tempObj.value(), is("1"));
} |
@Override
public void initSession(T param) throws ExtensionException {
// remove session before init
session.remove();
IBusiness<T> matchedBusiness = null;
List<String> matchedBusinessCodes = new ArrayList<>();
for (IBusiness<T> business : businessManager.listAllBusinesses()) {
if (business.match(param)) {
if (matchedBusiness == null) {
matchedBusiness = business; // first matched
}
matchedBusinessCodes.add(business.code());
}
}
if (matchBusinessStrict) {
if (matchedBusiness == null) {
throw new ExtensionException("business not found");
}
if (matchedBusinessCodes.size() > 1) {
throw new ExtensionException("multiple business found, matched business codes: [" + String.join(", ", matchedBusinessCodes) + "]");
}
}
if (matchedBusiness != null) {
if (enableLogger) {
logger.info("[ExtensionFactory] init session match business: " + matchedBusiness.code() + " priority: " + matchedBusiness.priority());
}
session.setMatchedCode(matchedBusiness.code(), matchedBusiness.priority());
for (UsedAbility usedAbility : matchedBusiness.usedAbilities()) {
IAbility<T> ability = abilityManager.getAbility(usedAbility.code());
if (ability == null) {
throw new ExtensionException("business " + matchedBusiness.code() + " used ability " + usedAbility.code() + " not found");
}
if (ability.match(param)) {
if (enableLogger) {
logger.info("[ExtensionFactory] init session match ability: " + usedAbility.code() + " priority: " + usedAbility.priority());
}
session.setMatchedCode(usedAbility.code(), usedAbility.priority());
}
}
}
IAbility<T> defaultAbility = abilityManager.getAbility(BaseDefaultAbility.DEFAULT_CODE);
session.setMatchedCode(defaultAbility.code(), defaultAbilityPriority);
if (enableLogger) {
logger.info("[ExtensionFactory] init session match default ability: " + defaultAbility.code() + " priority: " + defaultAbilityPriority);
}
} | @Test
public void testInitSession() throws Exception {
ExtensionException e;
DefaultExtContext<Object> context = new DefaultExtContext<>(false, true);
context.registerBusiness(new BusinessX());
context.registerBusiness(new BusinessY());
context.registerBusiness(new BusinessZ());
context.registerBusiness(new BusinessZZ());
e = assertThrows(ExtensionException.class, () -> context.initSession("UnknownBiz"));
assertEquals("business not found", e.getMessage());
e = assertThrows(ExtensionException.class, () -> context.initSession("BusinessZZ"));
assertEquals("multiple business found, matched business codes: [BusinessZ, BusinessZZ]", e.getMessage());
e = assertThrows(ExtensionException.class, () -> context.initSession("BusinessX"));
assertEquals("ability AbilityM not found", e.getMessage());
context.registerAbility(new AbilityM());
e = assertThrows(ExtensionException.class, () -> context.initSession("BusinessX"));
assertEquals("ability ability.application.default not found", e.getMessage());
context.registerAbility(new ExtDefaultAbility());
context.initSession("BusinessX");
context.validateContext();
} |
public List<String> tokenize(String text)
{
List<String> tokens = new ArrayList<>();
Matcher regexMatcher = regexExpression.matcher(text);
int lastIndexOfPrevMatch = 0;
while (regexMatcher.find(lastIndexOfPrevMatch)) // this is where the magic happens:
// the regexp is used to find a matching pattern for substitution
{
int beginIndexOfNextMatch = regexMatcher.start();
String prevToken = text.substring(lastIndexOfPrevMatch, beginIndexOfNextMatch);
if (!prevToken.isEmpty())
{
tokens.add(prevToken);
}
String currentMatch = regexMatcher.group();
tokens.add(currentMatch);
lastIndexOfPrevMatch = regexMatcher.end();
if (lastIndexOfPrevMatch < text.length() && text.charAt(lastIndexOfPrevMatch) != '_')
{
// beause it is sometimes positioned after the "_", but it should be positioned
// before the "_"
--lastIndexOfPrevMatch;
}
}
String tail = text.substring(lastIndexOfPrevMatch);
if (!tail.isEmpty())
{
tokens.add(tail);
}
return tokens;
} | @Test
void testTokenize_happyPath_7()
{
// given
CompoundCharacterTokenizer tokenizer = new CompoundCharacterTokenizer(
new HashSet<>(Arrays.asList("_100_101_", "_102_", "_103_104_")));
String text = "_100_101_102_103_104_";
// when
List<String> tokens = tokenizer.tokenize(text);
// then
assertEquals(Arrays.asList("_100_101_", "_102_", "_103_104_"), tokens);
} |
Flux<Post> findAll() {
return Flux.fromIterable(data.values());
} | @Test
public void testGetAllPosts() {
StepVerifier.create(posts.findAll())
.consumeNextWith(p -> assertTrue(p.getTitle().equals("post one")))
.consumeNextWith(p -> assertTrue(p.getTitle().equals("post two")))
.expectComplete()
.verify();
} |
public void runExtractor(Message msg) {
try(final Timer.Context ignored = completeTimer.time()) {
final String field;
try (final Timer.Context ignored2 = conditionTimer.time()) {
// We can only work on Strings.
if (!(msg.getField(sourceField) instanceof String)) {
conditionMissesCounter.inc();
return;
}
field = (String) msg.getField(sourceField);
// Decide if to extract at all.
if (conditionType.equals(ConditionType.STRING)) {
if (field.contains(conditionValue)) {
conditionHitsCounter.inc();
} else {
conditionMissesCounter.inc();
return;
}
} else if (conditionType.equals(ConditionType.REGEX)) {
if (regexConditionPattern.matcher(field).find()) {
conditionHitsCounter.inc();
} else {
conditionMissesCounter.inc();
return;
}
}
}
try (final Timer.Context ignored2 = executionTimer.time()) {
Result[] results;
try {
results = run(field);
} catch (ExtractorException e) {
final String error = "Could not apply extractor <" + getTitle() + " (" + getId() + ")>";
msg.addProcessingError(new Message.ProcessingError(
ProcessingFailureCause.ExtractorException, error, ExceptionUtils.getRootCauseMessage(e)));
return;
}
if (results == null || results.length == 0 || Arrays.stream(results).anyMatch(result -> result.getValue() == null)) {
return;
} else if (results.length == 1 && results[0].target == null) {
// results[0].target is null if this extractor cannot produce multiple fields use targetField in that case
msg.addField(targetField, results[0].getValue());
} else {
for (final Result result : results) {
msg.addField(result.getTarget(), result.getValue());
}
}
// Remove original from message?
if (cursorStrategy.equals(CursorStrategy.CUT) && !targetField.equals(sourceField) && !Message.RESERVED_FIELDS.contains(sourceField) && results[0].beginIndex != -1) {
final StringBuilder sb = new StringBuilder(field);
final List<Result> reverseList = Arrays.stream(results)
.sorted(Comparator.<Result>comparingInt(result -> result.endIndex).reversed())
.collect(Collectors.toList());
// remove all from reverse so that the indices still match
for (final Result result : reverseList) {
sb.delete(result.getBeginIndex(), result.getEndIndex());
}
final String builtString = sb.toString();
final String finalResult = builtString.trim().isEmpty() ? "fullyCutByExtractor" : builtString;
msg.removeField(sourceField);
// TODO don't add an empty field back, or rather don't add fullyCutByExtractor
msg.addField(sourceField, finalResult);
}
runConverters(msg);
}
}
} | @Test
public void testWithEmptyResultArray() throws Exception {
final TestExtractor extractor = new TestExtractor.Builder()
.callback(new Callable<Result[]>() {
@Override
public Result[] call() throws Exception {
return new Result[0];
}
})
.build();
final Message msg = createMessage("the hello");
extractor.runExtractor(msg);
assertThat(msg.hasField("target")).isFalse();
} |
public List<ResContainer> makeResourcesXml(JadxArgs args) {
Map<String, ICodeWriter> contMap = new HashMap<>();
for (ResourceEntry ri : resStorage.getResources()) {
if (SKIP_RES_TYPES.contains(ri.getTypeName())) {
continue;
}
String fn = getFileName(ri);
ICodeWriter cw = contMap.get(fn);
if (cw == null) {
cw = new SimpleCodeWriter(args);
cw.add("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
cw.startLine("<resources>");
cw.incIndent();
contMap.put(fn, cw);
}
addValue(cw, ri);
}
List<ResContainer> files = new ArrayList<>(contMap.size());
for (Map.Entry<String, ICodeWriter> entry : contMap.entrySet()) {
String fileName = entry.getKey();
ICodeWriter content = entry.getValue();
content.decIndent();
content.startLine("</resources>");
ICodeInfo codeInfo = content.finish();
files.add(ResContainer.textResource(fileName, codeInfo));
}
Collections.sort(files);
return files;
} | @Test
void testString() {
ResourceStorage resStorage = new ResourceStorage();
ResourceEntry re = new ResourceEntry(2130903103, "jadx.gui.app", "string", "app_name", "");
re.setSimpleValue(new RawValue(3, 0));
re.setNamedValues(Lists.list());
resStorage.add(re);
BinaryXMLStrings strings = new BinaryXMLStrings();
strings.put(0, "Jadx Decompiler App");
ValuesParser vp = new ValuesParser(strings, resStorage.getResourcesNames());
ResXmlGen resXmlGen = new ResXmlGen(resStorage, vp);
List<ResContainer> files = resXmlGen.makeResourcesXml(args);
assertThat(files).hasSize(1);
assertThat(files.get(0).getName()).isEqualTo("res/values/strings.xml");
String input = files.get(0).getText().toString();
assertThat(input).isEqualTo("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
+ "<resources>\n"
+ " <string name=\"app_name\">Jadx Decompiler App</string>\n"
+ "</resources>");
} |
@Override
public RecoverableFsDataOutputStream open(Path path) throws IOException {
LOGGER.trace("Opening output stream for path {}", path);
Preconditions.checkNotNull(path);
GSBlobIdentifier finalBlobIdentifier = BlobUtils.parseUri(path.toUri());
return new GSRecoverableFsDataOutputStream(storage, options, finalBlobIdentifier);
} | @Test
public void testOpen() throws IOException {
Path path = new Path("gs://foo/bar");
GSRecoverableFsDataOutputStream stream =
(GSRecoverableFsDataOutputStream) writer.open(path);
assertNotNull(stream);
} |
@Override
public Duration convert(String source) {
try {
if (ISO8601.matcher(source).matches()) {
return Duration.parse(source);
}
Matcher matcher = SIMPLE.matcher(source);
Assert.state(matcher.matches(), "'" + source + "' is not a valid duration");
long amount = Long.parseLong(matcher.group(1));
ChronoUnit unit = getUnit(matcher.group(2));
return Duration.of(amount, unit);
} catch (Exception ex) {
throw new IllegalStateException("'" + source + "' is not a valid duration", ex);
}
} | @Test
public void convertWhenSimpleMillisShouldReturnDuration() {
assertThat(convert("10ms")).isEqualTo(Duration.ofMillis(10));
assertThat(convert("10MS")).isEqualTo(Duration.ofMillis(10));
assertThat(convert("+10ms")).isEqualTo(Duration.ofMillis(10));
assertThat(convert("-10ms")).isEqualTo(Duration.ofMillis(-10));
} |
public static boolean isCompositeType(LogicalType logicalType) {
if (logicalType instanceof DistinctType) {
return isCompositeType(((DistinctType) logicalType).getSourceType());
}
LogicalTypeRoot typeRoot = logicalType.getTypeRoot();
return typeRoot == STRUCTURED_TYPE || typeRoot == ROW;
} | @Test
void testIsCompositeTypeLegacyCompositeType() {
DataType dataType =
TypeConversions.fromLegacyInfoToDataType(new RowTypeInfo(Types.STRING, Types.INT));
assertThat(LogicalTypeChecks.isCompositeType(dataType.getLogicalType())).isTrue();
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.