focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
public void merge(RegisterSet that) {
for (int bucket = 0; bucket < M.length; bucket++) {
int word = 0;
for (int j = 0; j < LOG2_BITS_PER_WORD; j++) {
int mask = 0x1f << (REGISTER_SIZE * j);
int thisVal = (this.M[bucket] & mask);
int thatVal = (that.M[bucket] & mask);
word |= (thisVal < thatVal) ? thatVal : thisVal;
}
this.M[bucket] = word;
}
} | @Test
public void testMerge() {
Random rand = new Random(2);
int count = 32;
RegisterSet rs = new RegisterSet(count);
RegisterSet[] rss = new RegisterSet[5];
for (int i = 0; i < rss.length; i++) {
rss[i] = new RegisterSet(count);
for (int pos = 0; pos < rs.count; pos++) {
int val = rand.nextInt(10);
rs.updateIfGreater(pos, val);
rss[i].set(pos, val);
}
}
RegisterSet merged = new RegisterSet(count);
for (int i = 0; i < rss.length; i++) {
merged.merge(rss[i]);
}
for (int pos = 0; pos < rs.count; pos++) {
assertEquals(rs.get(pos), merged.get(pos));
}
} |
@PostMapping
@Secured(resource = AuthConstants.CONSOLE_RESOURCE_NAME_PREFIX + "namespaces", action = ActionTypes.WRITE)
public Boolean createNamespace(@RequestParam("customNamespaceId") String namespaceId,
@RequestParam("namespaceName") String namespaceName,
@RequestParam(value = "namespaceDesc", required = false) String namespaceDesc) {
if (StringUtils.isBlank(namespaceId)) {
namespaceId = UUID.randomUUID().toString();
} else {
namespaceId = namespaceId.trim();
if (!namespaceIdCheckPattern.matcher(namespaceId).matches()) {
return false;
}
if (namespaceId.length() > NAMESPACE_ID_MAX_LENGTH) {
return false;
}
// check unique
if (namespacePersistService.tenantInfoCountByTenantId(namespaceId) > 0) {
return false;
}
}
// contains illegal chars
if (!namespaceNameCheckPattern.matcher(namespaceName).matches()) {
return false;
}
try {
return namespaceOperationService.createNamespace(namespaceId, namespaceName, namespaceDesc);
} catch (NacosException e) {
return false;
}
} | @Test
void testCreateNamespaceFailure() throws NacosException {
when(namespaceOperationService.createNamespace(anyString(), anyString(), anyString())).thenThrow(new NacosException(500, "test"));
assertFalse(namespaceController.createNamespace("", "testName", "testDesc"));
} |
public static List<Object> getFieldValues(Class<? extends Enum<?>> clazz, String fieldName) {
if(null == clazz || StrUtil.isBlank(fieldName)){
return null;
}
final Enum<?>[] enums = clazz.getEnumConstants();
if (null == enums) {
return null;
}
final List<Object> list = new ArrayList<>(enums.length);
for (Enum<?> e : enums) {
list.add(ReflectUtil.getFieldValue(e, fieldName));
}
return list;
} | @Test
public void getFieldValuesTest() {
List<Object> types = EnumUtil.getFieldValues(TestEnum.class, "type");
assertEquals(CollUtil.newArrayList("type1", "type2", "type3"), types);
} |
@Override
public RegisterRMRequestProto convert2Proto(RegisterRMRequest registerRMRequest) {
final short typeCode = registerRMRequest.getTypeCode();
final AbstractMessageProto abstractMessage = AbstractMessageProto.newBuilder().setMessageType(
MessageTypeProto.forNumber(typeCode)).build();
final String extraData = registerRMRequest.getExtraData();
AbstractIdentifyRequestProto abstractIdentifyRequestProto = AbstractIdentifyRequestProto.newBuilder()
.setAbstractMessage(abstractMessage).setApplicationId(registerRMRequest.getApplicationId()).setExtraData(
extraData == null ? "" : extraData).setTransactionServiceGroup(
registerRMRequest.getTransactionServiceGroup()).setVersion(registerRMRequest.getVersion()).build();
RegisterRMRequestProto result = RegisterRMRequestProto.newBuilder().setAbstractIdentifyRequest(
abstractIdentifyRequestProto).setResourceIds(
registerRMRequest.getResourceIds() == null ? "" : registerRMRequest.getResourceIds()).build();
return result;
} | @Test
public void convert2Proto() {
RegisterRMRequest registerRMRequest = new RegisterRMRequest();
registerRMRequest.setResourceIds("res1");
registerRMRequest.setVersion("123");
registerRMRequest.setTransactionServiceGroup("group");
registerRMRequest.setExtraData("extraData");
registerRMRequest.setApplicationId("appId");
RegisterRMRequestConvertor convertor = new RegisterRMRequestConvertor();
RegisterRMRequestProto proto = convertor.convert2Proto(registerRMRequest);
RegisterRMRequest real = convertor.convert2Model(proto);
assertThat((real.getTypeCode())).isEqualTo(registerRMRequest.getTypeCode());
assertThat((real.getResourceIds())).isEqualTo(registerRMRequest.getResourceIds());
assertThat((real.getVersion())).isEqualTo(registerRMRequest.getVersion());
assertThat((real.getTransactionServiceGroup())).isEqualTo(registerRMRequest.getTransactionServiceGroup());
assertThat((real.getExtraData())).isEqualTo(registerRMRequest.getExtraData());
assertThat((real.getApplicationId())).isEqualTo(registerRMRequest.getApplicationId());
} |
public static boolean isIpV6Endpoint(NetworkEndpoint networkEndpoint) {
return hasIpAddress(networkEndpoint)
&& networkEndpoint.getIpAddress().getAddressFamily().equals(AddressFamily.IPV6);
} | @Test
public void isIpV6Endpoint_withIpV6AndPortEndpoint_returnsFalse() {
NetworkEndpoint ipV6AndPortEndpoint =
NetworkEndpoint.newBuilder()
.setType(NetworkEndpoint.Type.IP_PORT)
.setPort(Port.newBuilder().setPortNumber(8888))
.setIpAddress(
IpAddress.newBuilder().setAddress("3ffe::1").setAddressFamily(AddressFamily.IPV6))
.build();
assertThat(NetworkEndpointUtils.isIpV6Endpoint(ipV6AndPortEndpoint)).isTrue();
} |
@VisibleForTesting
Entity exportNativeEntity(LookupTableDto lookupTableDto, EntityDescriptorIds entityDescriptorIds) {
final String tableId = entityDescriptorIds.get(EntityDescriptor.create(lookupTableDto.id(), ModelTypes.LOOKUP_TABLE_V1))
.orElseThrow(() -> new ContentPackException("Couldn't find lookup table entity " + lookupTableDto.id()));
final String cacheId = entityDescriptorIds.get(cacheDescriptor(lookupTableDto.cacheId()))
.orElseThrow(() -> new ContentPackException("Couldn't find lookup cache entity " + lookupTableDto.cacheId()));
final String adapterId = entityDescriptorIds.get(adapterDescriptor(lookupTableDto.dataAdapterId()))
.orElseThrow(() -> new ContentPackException("Couldn't find lookup data adapter entity " + lookupTableDto.dataAdapterId()));
final LookupTableEntity lookupTableEntity = LookupTableEntity.create(
ValueReference.of(lookupTableDto.scope()),
ValueReference.of(lookupTableDto.name()),
ValueReference.of(lookupTableDto.title()),
ValueReference.of(lookupTableDto.description()),
ValueReference.of(cacheId),
ValueReference.of(adapterId),
ValueReference.of(lookupTableDto.defaultSingleValue()),
ValueReference.of(lookupTableDto.defaultSingleValueType()),
ValueReference.of(lookupTableDto.defaultMultiValue()),
ValueReference.of(lookupTableDto.defaultMultiValueType()));
final JsonNode data = objectMapper.convertValue(lookupTableEntity, JsonNode.class);
return EntityV1.builder()
.id(ModelId.of(tableId))
.type(ModelTypes.LOOKUP_TABLE_V1)
.data(data)
.build();
} | @Test
@MongoDBFixtures("LookupTableFacadeTest.json")
public void exportNativeEntity() {
final EntityDescriptor tableDescriptor = EntityDescriptor.create("5adf24dd4b900a0fdb4e530d", ModelTypes.LOOKUP_TABLE_V1);
final EntityDescriptor adapterDescriptor = EntityDescriptor.create("5adf24a04b900a0fdb4e52c8", ModelTypes.LOOKUP_ADAPTER_V1);
final EntityDescriptor cacheDescriptor = EntityDescriptor.create("5adf24b24b900a0fdb4e52dd", ModelTypes.LOOKUP_CACHE_V1);
final EntityDescriptorIds entityDescriptorIds = EntityDescriptorIds.of(
tableDescriptor,
adapterDescriptor,
cacheDescriptor
);
final Entity entity = facade.exportEntity(tableDescriptor, entityDescriptorIds).orElseThrow(AssertionError::new);
assertThat(entity).isInstanceOf(EntityV1.class);
assertThat(entity.id()).isEqualTo(ModelId.of(entityDescriptorIds.get(tableDescriptor).orElse(null)));
assertThat(entity.type()).isEqualTo(ModelTypes.LOOKUP_TABLE_V1);
final EntityV1 entityV1 = (EntityV1) entity;
final LookupTableEntity lookupTableEntity = objectMapper.convertValue(entityV1.data(), LookupTableEntity.class);
assertThat(lookupTableEntity.name()).isEqualTo(ValueReference.of("http-dsv-no-cache"));
assertThat(lookupTableEntity.title()).isEqualTo(ValueReference.of("HTTP DSV without Cache"));
assertThat(lookupTableEntity.description()).isEqualTo(ValueReference.of("HTTP DSV without Cache"));
assertThat(lookupTableEntity.dataAdapterName()).isEqualTo(ValueReference.of(entityDescriptorIds.get(adapterDescriptor).orElse(null)));
assertThat(lookupTableEntity.cacheName()).isEqualTo(ValueReference.of(entityDescriptorIds.get(cacheDescriptor).orElse(null)));
assertThat(lookupTableEntity.defaultSingleValue()).isEqualTo(ValueReference.of("Default single value"));
assertThat(lookupTableEntity.defaultSingleValueType()).isEqualTo(ValueReference.of(LookupDefaultValue.Type.STRING));
assertThat(lookupTableEntity.defaultMultiValue()).isEqualTo(ValueReference.of("Default multi value"));
assertThat(lookupTableEntity.defaultMultiValueType()).isEqualTo(ValueReference.of(LookupDefaultValue.Type.OBJECT));
} |
@Override
public Buffer allocate() {
return allocate(this.pageSize);
} | @Test
public void testWithBiggerMinimumCapacity() throws Exception {
final PooledBufferAllocatorImpl allocator = new PooledBufferAllocatorImpl(4096);
final Buffer buffer = allocator.allocate(10000);
assertEquals(0, buffer.offset());
assertEquals(0, buffer.limit());
assertEquals(16384, buffer.capacity());
buffer.release();
} |
public String reqApi(String api, Map<String, String> params, String method) throws NacosException {
return reqApi(api, params, Collections.EMPTY_MAP, method);
} | @Test
void testReqApiForEmptyServer() throws NacosException {
assertThrows(NacosException.class, () -> {
Map<String, String> params = new HashMap<>();
clientProxy.reqApi("api", params, Collections.emptyMap(), Collections.emptyList(), HttpMethod.GET);
});
} |
@VisibleForTesting
void startKsql(final KsqlConfig ksqlConfigWithPort) {
cleanupOldState();
initialize(ksqlConfigWithPort);
} | @Test
public void shouldSendCreateStreamRequestBeforeSettingReady() {
// When:
app.startKsql(ksqlConfig);
// Then:
final InOrder inOrder = Mockito.inOrder(ksqlResource, serverState);
verify(ksqlResource).handleKsqlStatements(
securityContextArgumentCaptor.capture(),
eq(new KsqlRequest(logCreateStatement, Collections.emptyMap(), Collections.emptyMap(), null))
);
assertThat(securityContextArgumentCaptor.getValue().getUserPrincipal(), is(Optional.empty()));
assertThat(securityContextArgumentCaptor.getValue().getServiceContext(), is(serviceContext));
inOrder.verify(serverState).setReady();
} |
@Override
public CacheAppender<O> newAppender() {
return new JavaSerializationCacheAppender();
} | @Test
public void fail_to_serialize() throws Exception {
class Unserializable implements Serializable {
private void writeObject(ObjectOutputStream out) {
throw new UnsupportedOperationException("expected error");
}
}
DiskCache<Serializable> cache = new JavaSerializationDiskCache<>(temp.newFile(), System2.INSTANCE);
try {
cache.newAppender().append(new Unserializable());
fail();
} catch (UnsupportedOperationException e) {
assertThat(e).hasMessage("expected error");
}
} |
public DynamicInputChunkContext<K, V> getChunkContext(
Configuration configuration) throws IOException {
if(chunkContext == null) {
chunkContext = new DynamicInputChunkContext<K, V>(configuration);
}
return chunkContext;
} | @Test
public void testDynamicInputChunkContext() throws IOException {
Configuration configuration = new Configuration();
configuration.set(DistCpConstants.CONF_LABEL_LISTING_FILE_PATH,
"/tmp/test/file1.seq");
DynamicInputFormat firstInputFormat = new DynamicInputFormat();
DynamicInputFormat secondInputFormat = new DynamicInputFormat();
DynamicInputChunkContext firstContext =
firstInputFormat.getChunkContext(configuration);
DynamicInputChunkContext secondContext =
firstInputFormat.getChunkContext(configuration);
DynamicInputChunkContext thirdContext =
secondInputFormat.getChunkContext(configuration);
DynamicInputChunkContext fourthContext =
secondInputFormat.getChunkContext(configuration);
Assert.assertTrue("Chunk contexts from the same DynamicInputFormat " +
"object should be the same.",firstContext.equals(secondContext));
Assert.assertTrue("Chunk contexts from the same DynamicInputFormat " +
"object should be the same.",thirdContext.equals(fourthContext));
Assert.assertTrue("Contexts from different DynamicInputFormat " +
"objects should be different.",!firstContext.equals(thirdContext));
} |
@VisibleForTesting
int getSchedulerConf(String webAppAddress, WebResource resource)
throws Exception {
ClientResponse response = null;
resource = (resource != null) ? resource :
initializeWebResource(webAppAddress);
try {
Builder builder;
if (UserGroupInformation.isSecurityEnabled()) {
builder = resource
.path("ws").path("v1").path("cluster")
.path("scheduler-conf").accept(MediaType.APPLICATION_XML);
} else {
builder = resource
.path("ws").path("v1").path("cluster").path("scheduler-conf")
.queryParam("user.name", UserGroupInformation.getCurrentUser()
.getShortUserName()).accept(MediaType.APPLICATION_XML);
}
response = builder.get(ClientResponse.class);
if (response != null) {
if (response.getStatus() == Status.OK.getStatusCode()) {
ConfInfo schedulerConf = response.getEntity(ConfInfo.class);
JAXBContext jaxbContext = JAXBContext.newInstance(ConfInfo.class);
Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
StringWriter sw = new StringWriter();
jaxbMarshaller.marshal(schedulerConf, sw);
prettyFormatWithIndent(sw.toString(), 2);
return 0;
} else {
System.err.println("Failed to get scheduler configuration: "
+ response.getEntity(String.class));
}
} else {
System.err.println("Failed to get scheduler configuration: " +
"null response");
}
return -1;
} finally {
if (response != null) {
response.close();
}
destroyClient();
}
} | @Test(timeout = 10000)
public void testGetSchedulerConf() throws Exception {
ByteArrayOutputStream sysOutStream = new ByteArrayOutputStream();
PrintStream sysOut = new PrintStream(sysOutStream);
System.setOut(sysOut);
try {
super.setUp();
GuiceServletConfig.setInjector(
Guice.createInjector(new WebServletModule()));
int exitCode = cli.getSchedulerConf("", resource());
assertEquals("SchedConfCLI failed to run", 0, exitCode);
assertTrue("Failed to get scheduler configuration",
sysOutStream.toString().contains("testqueue"));
} finally {
cleanUp();
}
} |
@Operation(summary = "Get the status of a mijn digid session [VALID, INVALID]")
@GetMapping("/session_status/{mijn_digid_session_id}")
public ResponseEntity<MijnDigidSessionStatus> sessionStatus(@PathVariable(name = "mijn_digid_session_id") String mijnDigiDSessionId) {
if(mijnDigiDSessionId == null) {
return ResponseEntity.badRequest().build();
}
return ResponseEntity.ok(mijnDigiDSessionService.sessionStatus(mijnDigiDSessionId));
} | @Test
void validateBadRequestOnNoSessionId() {
ResponseEntity<MijnDigidSessionStatus> response = mijnDigiDSessionController.sessionStatus(null);
assertEquals(response.getStatusCode(), HttpStatus.BAD_REQUEST);
} |
@Override
public ListenableFuture<?> execute(Commit statement, TransactionManager transactionManager, Metadata metadata, AccessControl accessControl, QueryStateMachine stateMachine, List<Expression> parameters)
{
Session session = stateMachine.getSession();
if (!session.getTransactionId().isPresent()) {
throw new PrestoException(NOT_IN_TRANSACTION, "No transaction in progress");
}
TransactionId transactionId = session.getTransactionId().get();
stateMachine.clearTransactionId();
return transactionManager.asyncCommit(transactionId);
} | @Test
public void testNoTransactionCommit()
{
TransactionManager transactionManager = createTestTransactionManager();
Session session = sessionBuilder()
.build();
QueryStateMachine stateMachine = createQueryStateMachine("COMMIT", session, true, transactionManager, executor, metadata);
try {
CommitTask commitTask = new CommitTask();
getFutureValue(commitTask.execute(new Commit(), transactionManager, metadata, new AllowAllAccessControl(), stateMachine, emptyList()));
fail();
}
catch (PrestoException e) {
assertEquals(e.getErrorCode(), NOT_IN_TRANSACTION.toErrorCode());
}
assertFalse(stateMachine.getQueryInfo(Optional.empty()).isClearTransactionId());
assertFalse(stateMachine.getQueryInfo(Optional.empty()).getStartedTransactionId().isPresent());
assertTrue(transactionManager.getAllTransactionInfos().isEmpty());
} |
@Override
public CompletableFuture<List<Long>> getSplitBoundary(BundleSplitOption bundleSplitOptionTmp) {
FlowOrQpsEquallyDivideBundleSplitOption bundleSplitOption =
(FlowOrQpsEquallyDivideBundleSplitOption) bundleSplitOptionTmp;
NamespaceService service = bundleSplitOption.getService();
NamespaceBundle bundle = bundleSplitOption.getBundle();
Map<String, TopicStatsImpl> topicStatsMap = bundleSplitOption.getTopicStatsMap();
int loadBalancerNamespaceBundleMaxMsgRate = bundleSplitOption.getLoadBalancerNamespaceBundleMaxMsgRate();
double diffThreshold = bundleSplitOption.getFlowOrQpsDifferenceThresholdPercentage() / 100.0;
long loadBalancerNamespaceBundleMaxBandwidthBytes = bundleSplitOption
.getLoadBalancerNamespaceBundleMaxBandwidthMbytes() * MBytes;
return service.getOwnedTopicListForNamespaceBundle(bundle).thenCompose(topics -> {
if (topics == null || topics.size() <= 1) {
return CompletableFuture.completedFuture(null);
}
double bundleThroughput = 0;
double bundleMsgRate = 0;
Map<Long, TopicInfo> topicInfoMap = new HashMap<>();
List<Long> topicHashList = new ArrayList<>(topics.size());
for (String topic : topics) {
TopicStatsImpl topicStats = topicStatsMap.get(topic);
if (topicStats == null) {
continue;
}
double msgRateIn = topicStats.getMsgRateIn();
double msgRateOut = topicStats.getMsgRateOut();
double msgThroughputIn = topicStats.getMsgThroughputIn();
double msgThroughputOut = topicStats.getMsgThroughputOut();
double msgRate = msgRateIn + msgRateOut;
double throughput = msgThroughputIn + msgThroughputOut;
if (msgRate <= 0 && throughput <= 0) {
// Skip empty topic
continue;
}
Long hashCode = bundle.getNamespaceBundleFactory().getLongHashCode(topic);
topicHashList.add(hashCode);
topicInfoMap.put(hashCode, new TopicInfo(topic, msgRate, throughput));
bundleThroughput += throughput;
bundleMsgRate += msgRate;
}
if (topicInfoMap.size() < 2
|| (bundleMsgRate < (loadBalancerNamespaceBundleMaxMsgRate * (1 + diffThreshold))
&& bundleThroughput < (loadBalancerNamespaceBundleMaxBandwidthBytes * (1 + diffThreshold)))) {
return CompletableFuture.completedFuture(null);
}
Collections.sort(topicHashList);
List<Long> splitResults = new ArrayList<>();
double bundleMsgRateTmp = topicInfoMap.get(topicHashList.get(0)).msgRate;
double bundleThroughputTmp = topicInfoMap.get(topicHashList.get(0)).throughput;
for (int i = 1; i < topicHashList.size(); i++) {
long topicHashCode = topicHashList.get(i);
double msgRate = topicInfoMap.get(topicHashCode).msgRate;
double throughput = topicInfoMap.get(topicHashCode).throughput;
if ((bundleMsgRateTmp + msgRate) > loadBalancerNamespaceBundleMaxMsgRate
|| (bundleThroughputTmp + throughput) > loadBalancerNamespaceBundleMaxBandwidthBytes) {
long splitStart = topicHashList.get(i - 1);
long splitEnd = topicHashList.get(i);
long splitMiddle = splitStart + (splitEnd - splitStart) / 2 + 1;
splitResults.add(splitMiddle);
bundleMsgRateTmp = msgRate;
bundleThroughputTmp = throughput;
} else {
bundleMsgRateTmp += msgRate;
bundleThroughputTmp += throughput;
}
}
return CompletableFuture.completedFuture(splitResults);
});
} | @Test
public void testFirstPositionIsOverLoad() {
FlowOrQpsEquallyDivideBundleSplitAlgorithm algorithm = new FlowOrQpsEquallyDivideBundleSplitAlgorithm();
int loadBalancerNamespaceBundleMaxMsgRate = 1010;
int loadBalancerNamespaceBundleMaxBandwidthMbytes = 100;
int flowOrQpsDifferenceThresholdPercentage = 10;
int topicNum = 5;
List<String> mockTopics = new ArrayList<>();
List<Long> topicHashList = new ArrayList<>(topicNum);
Map<Long, String> hashAndTopic = new HashMap<>();
for (int i = 0; i < topicNum; i++) {
String topicName = "persistent://test-tenant1/test-namespace1/test-partition-" + i;
mockTopics.add(topicName);
long hashValue = Hashing.crc32().hashString(topicName, UTF_8).padToLong();
topicHashList.add(hashValue);
hashAndTopic.put(hashValue, topicName);
}
Collections.sort(topicHashList);
Map<String, TopicStatsImpl> topicStatsMap = new HashMap<>();
long hashValue = topicHashList.get(0);
String topicName = hashAndTopic.get(hashValue);
TopicStatsImpl topicStats0 = new TopicStatsImpl();
topicStats0.msgRateIn = 1000;
topicStats0.msgThroughputIn = 1000;
topicStats0.msgRateOut = 1000;
topicStats0.msgThroughputOut = 1000;
topicStatsMap.put(topicName, topicStats0);
for (int i = 1; i < topicHashList.size(); i++) {
hashValue = topicHashList.get(i);
topicName = hashAndTopic.get(hashValue);
TopicStatsImpl topicStats = new TopicStatsImpl();
topicStats.msgRateIn = 24.5;
topicStats.msgThroughputIn = 1000;
topicStats.msgRateOut = 25;
topicStats.msgThroughputOut = 1000;
topicStatsMap.put(topicName, topicStats);
}
// -- do test
NamespaceService mockNamespaceService = mock(NamespaceService.class);
NamespaceBundle mockNamespaceBundle = mock(NamespaceBundle.class);
doReturn(CompletableFuture.completedFuture(mockTopics))
.when(mockNamespaceService).getOwnedTopicListForNamespaceBundle(mockNamespaceBundle);
NamespaceBundleFactory mockNamespaceBundleFactory = mock(NamespaceBundleFactory.class);
doReturn(mockNamespaceBundleFactory)
.when(mockNamespaceBundle).getNamespaceBundleFactory();
mockTopics.forEach((topic) -> {
long hash = Hashing.crc32().hashString(topic, UTF_8).padToLong();
doReturn(hash)
.when(mockNamespaceBundleFactory).getLongHashCode(topic);
});
List<Long> splitPositions = algorithm.getSplitBoundary(new FlowOrQpsEquallyDivideBundleSplitOption(mockNamespaceService, mockNamespaceBundle,
null, topicStatsMap, loadBalancerNamespaceBundleMaxMsgRate,
loadBalancerNamespaceBundleMaxBandwidthMbytes, flowOrQpsDifferenceThresholdPercentage)).join();
long splitStart = topicHashList.get(0);
long splitEnd = topicHashList.get(1);
long splitMiddle = splitStart + (splitEnd - splitStart) / 2 + 1;
assertTrue(splitPositions.get(0) == splitMiddle);
} |
public static void mergeOutputDataParams(
Map<String, Parameter> allParams, Map<String, Parameter> params) {
params.forEach(
(name, param) -> {
if (!allParams.containsKey(name)) {
throw new MaestroValidationException(
"Invalid output parameter [%s], not defined in params", name);
}
MergeContext context = MergeContext.stepCreate(ParamSource.OUTPUT_PARAMETER);
if (param.getType() == ParamType.MAP && param.isLiteral()) {
ParamDefinition baseDef = allParams.get(name).toDefinition();
Map<String, ParamDefinition> baseMap = baseDef.asMapParamDef().getValue();
ParamDefinition toMergeDef = param.toDefinition();
Map<String, ParamDefinition> toMergeMap = toMergeDef.asMapParamDef().getValue();
mergeParams(baseMap, toMergeMap, context);
Parameter mergedParam =
buildMergedParamDefinition(name, toMergeDef, baseDef, context, baseMap)
.toParameter();
populateEvaluatedResultAndTime(mergedParam, param.getEvaluatedTime());
allParams.put(name, mergedParam);
} else if (param.getType() == ParamType.STRING_MAP && param.isLiteral()) {
ParamDefinition baseDef = allParams.get(name).toDefinition();
Map<String, String> baseMap = baseDef.asStringMapParamDef().getValue();
ParamDefinition toMergeDef = param.toDefinition();
Map<String, String> toMergeMap = toMergeDef.asStringMapParamDef().getValue();
baseMap.putAll(toMergeMap);
Parameter mergedParam =
buildMergedParamDefinition(name, toMergeDef, baseDef, context, baseMap)
.toParameter();
populateEvaluatedResultAndTime(mergedParam, param.getEvaluatedTime());
allParams.put(name, mergedParam);
} else {
ParamDefinition paramDefinition =
ParamsMergeHelper.buildMergedParamDefinition(
name,
param.toDefinition(),
allParams.get(name).toDefinition(),
MergeContext.stepCreate(ParamSource.OUTPUT_PARAMETER),
param.getValue());
Parameter parameter = paramDefinition.toParameter();
parameter.setEvaluatedResult(param.getEvaluatedResult());
parameter.setEvaluatedTime(param.getEvaluatedTime());
allParams.put(name, parameter);
}
});
} | @Test
public void testMergeOutputDataParamsMapParam() throws JsonProcessingException {
Map<String, Parameter> allParams =
parseParamMap(
"{\"map_param\":{\"value\":{\"string_array_param\":{\"value\":[\"p1\",\"p2\"],\"type\":\"STRING_ARRAY\"},\"long_param\":{\"value\":123,\"type\":\"LONG\"},\"nested_map_param\":{\"value\":{\"nested_1\":{\"value\":\"val1\",\"type\":\"STRING\"}},\"type\":\"MAP\"}},\"type\":\"MAP\",\"evaluated_result\":{\"string_array_param\":[\"p1\",\"p2\"],\"long_param\":123,\"nested_map_param\":{\"nested_1\":\"val1\"}},\"evaluated_time\":1626893775979}}");
Map<String, Parameter> paramsToMerge =
parseParamMap(
"{\"map_param\":{\"value\":{\"string_array_param\":{\"value\":[\"p3\",\"p4\"],\"type\":\"STRING_ARRAY\"},\"nested_map_param\":{\"value\":{\"nested_2\":{\"value\":\"val2\",\"type\":\"STRING\"}},\"type\":\"MAP\"}},\"type\":\"MAP\",\"evaluated_result\":{\"string_array_param\":[\"p3\",\"p4\"],\"nested_map_param\":{\"nested_2\":\"val2\"}},\"evaluated_time\":1626893775979}}");
ParamsMergeHelper.mergeOutputDataParams(allParams, paramsToMerge);
Map<String, ParamDefinition> mergedMapParamDef = allParams.get("map_param").getValue();
Map<String, Object> mergedMapEvaluated = allParams.get("map_param").getEvaluatedResult();
// verify string array is replaced in both def and evaluated result.
assertArrayEquals(
new String[] {"p3", "p4"},
mergedMapParamDef.get("string_array_param").asStringArrayParamDef().getValue());
assertArrayEquals(
new String[] {"p3", "p4"}, (String[]) mergedMapEvaluated.get("string_array_param"));
// verify the long param is retained in both def and evaluated result
assertEquals(123, (long) mergedMapParamDef.get("long_param").asLongParamDef().getValue());
assertEquals(123, (long) mergedMapEvaluated.get("long_param"));
// verify the nested map param is merged in both def and evaluated result
Map<String, ParamDefinition> nestedMap =
mergedMapParamDef.get("nested_map_param").asMapParamDef().getValue();
assertEquals(2, nestedMap.size());
assertTrue(nestedMap.containsKey("nested_1"));
assertTrue(nestedMap.containsKey("nested_2"));
Map<String, Object> nestedEvaluated =
(Map<String, Object>) mergedMapEvaluated.get("nested_map_param");
assertEquals("val1", nestedEvaluated.get("nested_1"));
assertEquals("val2", nestedEvaluated.get("nested_2"));
} |
@Override
public Page<ConfigInfoTagWrapper> findAllConfigInfoTagForDumpAll(final int pageNo, final int pageSize) {
final int startRow = (pageNo - 1) * pageSize;
ConfigInfoTagMapper configInfoTagMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO_TAG);
String sqlCountRows = configInfoTagMapper.count(null);
MapperResult sqlFetchRows = configInfoTagMapper.findAllConfigInfoTagForDumpAllFetchRows(
new MapperContext(startRow, pageSize));
PaginationHelper<ConfigInfoTagWrapper> helper = createPaginationHelper();
return helper.fetchPageLimit(sqlCountRows, sqlFetchRows.getSql(), sqlFetchRows.getParamList().toArray(), pageNo,
pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER);
} | @Test
void testFindAllConfigInfoTagForDumpAll() {
//mock count
Mockito.when(databaseOperate.queryOne(anyString(), eq(Integer.class))).thenReturn(308);
List<ConfigInfoTagWrapper> mockTagList = new ArrayList<>();
mockTagList.add(new ConfigInfoTagWrapper());
mockTagList.add(new ConfigInfoTagWrapper());
mockTagList.add(new ConfigInfoTagWrapper());
mockTagList.get(0).setLastModified(System.currentTimeMillis());
mockTagList.get(1).setLastModified(System.currentTimeMillis());
mockTagList.get(2).setLastModified(System.currentTimeMillis());
//mock query list
Mockito.when(databaseOperate.queryMany(anyString(), eq(new Object[] {}), eq(CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER)))
.thenReturn(mockTagList);
int pageNo = 3;
int pageSize = 100;
//execute & verify
Page<ConfigInfoTagWrapper> returnTagPage = embeddedConfigInfoTagPersistService.findAllConfigInfoTagForDumpAll(pageNo, pageSize);
assertEquals(308, returnTagPage.getTotalCount());
assertEquals(mockTagList, returnTagPage.getPageItems());
} |
public static ShutdownArgs parse(final String[] args) {
try {
CommandLineParser parser = new DefaultParser();
CommandLine cmd = parser.parse(createShutdownOptions(), args);
return new ShutdownArgs(StartArgsParser.getPort(cmd.getOptionValue("s")));
} catch (ParseException e) {
throw new ParseArgException("fail to parse arguments", e);
}
} | @Test
public void should_set_shutdown_port() {
assertThrows(ParseArgException.class, () -> {
parse(new String[]{"shutdown", "-s"});
});
} |
public void build(Entry target) {
process(target, VisitorType.BUILDER);
} | @Test(expected = IllegalStateException.class)
public void defaultBuilderIsNotSetException() {
final RecursiveMenuStructureProcessor recursiveMenuStructureBuilder = new RecursiveMenuStructureProcessor();
final Entry childEntry = new Entry();
recursiveMenuStructureBuilder.build(childEntry);
} |
@Override
public Map<String, Map<StreamMessageId, Map<K, V>>> read(StreamMultiReadArgs args) {
return get(readAsync(args));
} | @Test
public void testReadMultiKeysEmpty() {
RStream<String, String> stream = redisson.getStream("test2");
Map<String, Map<StreamMessageId, Map<String, String>>> s = stream.read(StreamMultiReadArgs.greaterThan(new StreamMessageId(0), "test1", new StreamMessageId(0))
.count(10));
assertThat(s).isEmpty();
} |
public ConnectionFactory connectionFactory(ConnectionFactory connectionFactory) {
// It is common to implement both interfaces
if (connectionFactory instanceof XAConnectionFactory) {
return (ConnectionFactory) xaConnectionFactory((XAConnectionFactory) connectionFactory);
}
return TracingConnectionFactory.create(connectionFactory, this);
} | @Test void connectionFactory_wrapsXaInput() {
abstract class Both implements XAConnectionFactory, ConnectionFactory {
}
assertThat(jmsTracing.connectionFactory(mock(Both.class)))
.isInstanceOf(XAConnectionFactory.class);
} |
public void go(PrintStream out) {
KieServices ks = KieServices.Factory.get();
KieRepository kr = ks.getRepository();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write("src/main/resources/org/kie/example5/HAL5.drl", getRule());
KieBuilder kb = ks.newKieBuilder(kfs);
kb.buildAll(); // kieModule is automatically deployed to KieRepository if successfully built.
if (kb.getResults().hasMessages(Level.ERROR)) {
throw new RuntimeException("Build Errors:\n" + kb.getResults().toString());
}
KieContainer kContainer = ks.newKieContainer(kr.getDefaultReleaseId());
KieSession kSession = kContainer.newKieSession();
kSession.setGlobal("out", out);
kSession.insert(new Message("Dave", "Hello, HAL. Do you read me, HAL?"));
kSession.fireAllRules();
} | @Test
public void testGo() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
new KieFileSystemExample().go(ps);
ps.close();
String actual = baos.toString();
String expected = "" +
"Dave: Hello, HAL. Do you read me, HAL?" + NL +
"HAL: Dave. I read you." + NL;
assertEquals(expected, actual);
} |
public static File saveJunitXml(String targetDir, FeatureResult result, String fileName) {
DecimalFormat formatter = (DecimalFormat) NumberFormat.getNumberInstance(Locale.US);
formatter.applyPattern("0.######");
Document doc = XmlUtils.newDocument();
Element root = doc.createElement("testsuite");
doc.appendChild(root);
root.setAttribute("tests", result.getScenarioCount() + "");
root.setAttribute("failures", result.getFailedCount() + "");
root.setAttribute("time", formatter.format(result.getDurationMillis() / 1000));
root.setAttribute("name", result.getDisplayName()); // will be uri
root.setAttribute("skipped", "0");
StringBuilder xmlString = new StringBuilder();
xmlString.append(XmlUtils.toString(doc, false).replace("/>", ">"));
String baseName = result.getFeature().getPackageQualifiedName();
Iterator<ScenarioResult> iterator = result.getScenarioResults().iterator();
while (iterator.hasNext()) {
ScenarioResult sr = iterator.next();
Element testCase = doc.createElement("testcase");
testCase.setAttribute("classname", baseName);
StringBuilder sb = new StringBuilder();
Throwable error = appendSteps(sr.getStepResults(), sb);
String name = sr.getScenario().getName();
if (StringUtils.isBlank(name)) {
name = sr.getScenario().getUniqueId();
}
testCase.setAttribute("name", name);
testCase.setAttribute("time", formatter.format(sr.getDurationMillis() / 1000));
Element stepsHolder;
if (error != null) {
stepsHolder = doc.createElement("failure");
stepsHolder.setAttribute("message", error.getMessage());
} else {
stepsHolder = doc.createElement("system-out");
}
Element properties = null;
properties = addCustomTags(testCase, doc, sr);
if(properties != null && properties.getChildNodes().getLength() > 0){
testCase.appendChild(properties);
}
testCase.appendChild(stepsHolder);
stepsHolder.setTextContent(sb.toString());
xmlString.append(XmlUtils.toString(testCase)).append('\n');
}
xmlString.append("</testsuite>");
if (fileName == null) {
fileName = baseName + ".xml";
}
File file = new File(targetDir + File.separator + fileName);
FileUtils.writeToFile(file, xmlString.toString());
return file;
} | @Test
void testCustomTags() {
String expectedCustomTags = "<properties><property name=\"requirement\" value=\"CALC-2\"/><property name=\"test_key\" value=\"CALC-2\"/></properties>";
Feature feature = Feature.read("classpath:com/intuit/karate/report/customTags.feature");
FeatureRuntime fr = FeatureRuntime.of(new Suite(), new FeatureCall(feature));
fr.run();
File file = ReportUtils.saveJunitXml("target", fr.result, null);
assertTrue(FileUtils.toString(file).contains(expectedCustomTags));
} |
@VisibleForTesting
static TableSchema protoTableSchemaFromBeamSchema(Schema schema) {
Preconditions.checkState(schema.getFieldCount() > 0);
TableSchema.Builder builder = TableSchema.newBuilder();
for (Field field : schema.getFields()) {
builder.addFields(fieldDescriptorFromBeamField(field));
}
return builder.build();
} | @Test
public void testNestedFromSchema() {
DescriptorProto descriptor =
TableRowToStorageApiProto.descriptorSchemaFromTableSchema(
BeamRowToStorageApiProto.protoTableSchemaFromBeamSchema((NESTED_SCHEMA)), true, false);
Map<String, Type> expectedBaseTypes =
BASE_SCHEMA_PROTO.getFieldList().stream()
.collect(
Collectors.toMap(FieldDescriptorProto::getName, FieldDescriptorProto::getType));
Map<String, Type> types =
descriptor.getFieldList().stream()
.collect(
Collectors.toMap(FieldDescriptorProto::getName, FieldDescriptorProto::getType));
Map<String, String> typeNames =
descriptor.getFieldList().stream()
.collect(
Collectors.toMap(FieldDescriptorProto::getName, FieldDescriptorProto::getTypeName));
Map<String, Label> typeLabels =
descriptor.getFieldList().stream()
.collect(
Collectors.toMap(FieldDescriptorProto::getName, FieldDescriptorProto::getLabel));
assertEquals(3, types.size());
Map<String, DescriptorProto> nestedTypes =
descriptor.getNestedTypeList().stream()
.collect(Collectors.toMap(DescriptorProto::getName, Functions.identity()));
assertEquals(3, nestedTypes.size());
assertEquals(Type.TYPE_MESSAGE, types.get("nested"));
assertEquals(Label.LABEL_OPTIONAL, typeLabels.get("nested"));
String nestedTypeName1 = typeNames.get("nested");
Map<String, Type> nestedTypes1 =
nestedTypes.get(nestedTypeName1).getFieldList().stream()
.collect(
Collectors.toMap(FieldDescriptorProto::getName, FieldDescriptorProto::getType));
assertEquals(expectedBaseTypes, nestedTypes1);
assertEquals(Type.TYPE_MESSAGE, types.get("nestedarray"));
assertEquals(Label.LABEL_REPEATED, typeLabels.get("nestedarray"));
String nestedTypeName2 = typeNames.get("nestedarray");
Map<String, Type> nestedTypes2 =
nestedTypes.get(nestedTypeName2).getFieldList().stream()
.collect(
Collectors.toMap(FieldDescriptorProto::getName, FieldDescriptorProto::getType));
assertEquals(expectedBaseTypes, nestedTypes2);
assertEquals(Type.TYPE_MESSAGE, types.get("nestediterable"));
assertEquals(Label.LABEL_REPEATED, typeLabels.get("nestediterable"));
String nestedTypeName3 = typeNames.get("nestediterable");
Map<String, Type> nestedTypes3 =
nestedTypes.get(nestedTypeName3).getFieldList().stream()
.collect(
Collectors.toMap(FieldDescriptorProto::getName, FieldDescriptorProto::getType));
assertEquals(expectedBaseTypes, nestedTypes3);
} |
public boolean matches(String input) {
return MATCHER.matches(input, pattern);
} | @Test
public void testMatchesOnSingleCharacter() throws Exception {
GlobMatcher matcher = new GlobMatcher("A*");
assertTrue(matcher.matches("AABBCC"));
assertFalse(matcher.matches("FFFF"));
} |
public Optional<ContentPack> findByIdAndRevision(ModelId id, int revision) {
final DBQuery.Query query = DBQuery.is(Identified.FIELD_META_ID, id).is(Revisioned.FIELD_META_REVISION, revision);
return Optional.ofNullable(dbCollection.findOne(query));
} | @Test
@MongoDBFixtures("ContentPackPersistenceServiceTest.json")
public void findByIdAndRevisionWithInvalidId() {
final Optional<ContentPack> contentPack = contentPackPersistenceService.findByIdAndRevision(ModelId.of("does-not-exist"), 2);
assertThat(contentPack).isEmpty();
} |
public void isInstanceOf(Class<?> clazz) {
if (clazz == null) {
throw new NullPointerException("clazz");
}
if (actual == null) {
failWithActual("expected instance of", clazz.getName());
return;
}
if (!isInstanceOfType(actual, clazz)) {
if (Platform.classMetadataUnsupported()) {
throw new UnsupportedOperationException(
actualCustomStringRepresentation()
+ ", an instance of "
+ actual.getClass().getName()
+ ", may or may not be an instance of "
+ clazz.getName()
+ ". Under -XdisableClassMetadata, we do not have enough information to tell.");
}
failWithoutActual(
fact("expected instance of", clazz.getName()),
fact("but was instance of", actual.getClass().getName()),
fact("with value", actualCustomStringRepresentation()));
}
} | @SuppressWarnings("IsInstanceString") // test is an intentional trivially true check
@Test
public void isInstanceOfExactType() {
assertThat("a").isInstanceOf(String.class);
} |
public LocationIndex prepareIndex() {
return prepareIndex(EdgeFilter.ALL_EDGES);
} | @Test
public void testMoreReal() {
BaseGraph graph = new BaseGraph.Builder(encodingManager).create();
NodeAccess na = graph.getNodeAccess();
na.setNode(1, 51.2492152, 9.4317166);
na.setNode(0, 52, 9);
na.setNode(2, 51.2, 9.4);
na.setNode(3, 49, 10);
graph.edge(1, 0).set(speedEnc, 60, 60);
graph.edge(0, 2).set(speedEnc, 60, 60);
graph.edge(0, 3).set(speedEnc, 60, 60).setWayGeometry(Helper.createPointList(51.21, 9.43));
LocationIndex index = createIndexNoPrepare(graph, 500000).prepareIndex();
assertEquals(1, findClosestEdge(index, 51.2, 9.4));
} |
public static <T, PredicateT extends ProcessFunction<T, Boolean>> Filter<T> by(
PredicateT predicate) {
return new Filter<>(predicate);
} | @Test
@Category(NeedsRunner.class)
public void testIdentityFilterByPredicateWithLambda() {
PCollection<Integer> output =
p.apply(Create.of(591, 11789, 1257, 24578, 24799, 307)).apply(Filter.by(i -> true));
PAssert.that(output).containsInAnyOrder(591, 11789, 1257, 24578, 24799, 307);
p.run();
} |
private static Schema optional(Schema original) {
// null is first in the union because Parquet's default is always null
return Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), original));
} | @Test
public void testOldAvroListOfLists() throws Exception {
Schema listOfLists = optional(Schema.createArray(Schema.createArray(Schema.create(INT))));
Schema schema = Schema.createRecord("AvroCompatListInList", null, null, false);
schema.setFields(
Lists.newArrayList(new Schema.Field("listOfLists", listOfLists, null, JsonProperties.NULL_VALUE)));
System.err.println("Avro schema: " + schema.toString(true));
testRoundTripConversion(
schema,
"message AvroCompatListInList {\n" + " optional group listOfLists (LIST) {\n"
+ " repeated group array (LIST) {\n"
+ " repeated int32 array;\n"
+ " }\n"
+ " }\n"
+ "}");
// Cannot use round-trip assertion because 3-level representation is used
testParquetToAvroConversion(
NEW_BEHAVIOR,
schema,
"message AvroCompatListInList {\n" + " optional group listOfLists (LIST) {\n"
+ " repeated group array (LIST) {\n"
+ " repeated int32 array;\n"
+ " }\n"
+ " }\n"
+ "}");
} |
@Override
public void start() {
DatabaseVersion.Status status = version.getStatus();
checkState(status == UP_TO_DATE || status == FRESH_INSTALL, "Compute Engine can't start unless Database is up to date");
} | @Test
public void start_has_no_effect_if_status_is_FRESH_INSTALL() {
when(databaseVersion.getStatus()).thenReturn(DatabaseVersion.Status.FRESH_INSTALL);
underTest.start();
verify(databaseVersion).getStatus();
verifyNoMoreInteractions(databaseVersion);
} |
@Override
public ClientDetailsEntity updateClient(ClientDetailsEntity oldClient, ClientDetailsEntity newClient) throws IllegalArgumentException {
if (oldClient != null && newClient != null) {
for (String uri : newClient.getRegisteredRedirectUri()) {
if (blacklistedSiteService.isBlacklisted(uri)) {
throw new IllegalArgumentException("Client URI is blacklisted: " + uri);
}
}
// if the client is flagged to allow for refresh tokens, make sure it's got the right scope
ensureRefreshTokenConsistency(newClient);
// make sure we don't have both a JWKS and a JWKS URI
ensureKeyConsistency(newClient);
// check consistency when using HEART mode
checkHeartMode(newClient);
// check the sector URI
checkSectorIdentifierUri(newClient);
// make sure a client doesn't get any special system scopes
ensureNoReservedScopes(newClient);
return clientRepository.updateClient(oldClient.getId(), newClient);
}
throw new IllegalArgumentException("Neither old client or new client can be null!");
} | @Test
public void updateClient_noOfflineAccess() {
ClientDetailsEntity oldClient = new ClientDetailsEntity();
oldClient.getScope().add(SystemScopeService.OFFLINE_ACCESS);
ClientDetailsEntity client = new ClientDetailsEntity();
client = service.updateClient(oldClient, client);
Mockito.verify(scopeService, Mockito.atLeastOnce()).removeReservedScopes(Matchers.anySet());
assertThat(client.getScope().contains(SystemScopeService.OFFLINE_ACCESS), is(equalTo(false)));
} |
public static Object eval(Parameter param, Map<String, Parameter> params) {
if (!param.isEvaluated()) {
switch (param.getType()) {
case STRING:
return interpolate(param.asStringParam().getValue(), params);
case STRING_ARRAY:
return Arrays.stream(param.asStringArrayParam().getValue())
.map(v -> interpolate(v, params))
.toArray(String[]::new);
case STRING_MAP:
try {
return param.asStringMapParam().getValue().entrySet().stream()
.collect(
MapHelper.toListMap(
e -> interpolate(e.getKey(), params),
e -> interpolate(e.getValue(), params)));
} catch (Exception e) {
LOG.error(
"Failed to evaluate literal param: {} due to ",
param.asStringMapParam().getValue(),
e);
throw e;
}
default:
return param.getValue();
}
} else {
return param.getEvaluatedResult();
}
} | @Test
public void testStringInterpolationJsonError() {
Parameter param = StringParameter.builder().name("test").value("test $invalidMap").build();
Map<String, Parameter> params = new LinkedHashMap<>();
params.put(
"invalidMap",
MapParameter.builder()
.evaluatedResult(Collections.singletonMap("key", new Object()))
.evaluatedTime(123L)
.build());
AssertHelper.assertThrows(
"Throw an error if param cannot be json serialized",
MaestroInternalError.class,
"INTERNAL_ERROR - Cannot evaluate [invalidMap] as string due to",
() -> LiteralEvaluator.eval(param, params));
} |
@VisibleForTesting
static Map<String, ExternalResourceDriver> externalResourceDriversFromConfig(
Configuration config, PluginManager pluginManager) {
final Set<String> resourceSet = getExternalResourceSet(config);
if (resourceSet.isEmpty()) {
return Collections.emptyMap();
}
final Iterator<ExternalResourceDriverFactory> factoryIterator =
pluginManager.load(ExternalResourceDriverFactory.class);
final Map<String, ExternalResourceDriverFactory> externalResourceFactories =
new HashMap<>();
factoryIterator.forEachRemaining(
externalResourceDriverFactory ->
externalResourceFactories.put(
externalResourceDriverFactory.getClass().getName(),
externalResourceDriverFactory));
final Map<String, ExternalResourceDriver> externalResourceDrivers = new HashMap<>();
for (String resourceName : resourceSet) {
final ConfigOption<String> driverClassOption =
key(ExternalResourceOptions
.getExternalResourceDriverFactoryConfigOptionForResource(
resourceName))
.stringType()
.noDefaultValue();
final String driverFactoryClassName = config.get(driverClassOption);
if (StringUtils.isNullOrWhitespaceOnly(driverFactoryClassName)) {
LOG.warn(
"Could not find driver class name for {}. Please make sure {} is configured.",
resourceName,
driverClassOption.key());
continue;
}
ExternalResourceDriverFactory externalResourceDriverFactory =
externalResourceFactories.get(driverFactoryClassName);
if (externalResourceDriverFactory != null) {
DelegatingConfiguration delegatingConfiguration =
new DelegatingConfiguration(
config,
ExternalResourceOptions
.getExternalResourceParamConfigPrefixForResource(
resourceName));
try {
externalResourceDrivers.put(
resourceName,
externalResourceDriverFactory.createExternalResourceDriver(
delegatingConfiguration));
LOG.info("Add external resources driver for {}.", resourceName);
} catch (Exception e) {
LOG.warn(
"Could not instantiate driver with factory {} for {}. {}",
driverFactoryClassName,
resourceName,
e);
}
} else {
LOG.warn(
"Could not find factory class {} for {}.",
driverFactoryClassName,
resourceName);
}
}
return externalResourceDrivers;
} | @Test
public void testFactoryPluginDoesNotExist() {
final Configuration config = new Configuration();
final String driverFactoryClassName = TestingExternalResourceDriverFactory.class.getName();
final PluginManager testingPluginManager = new TestingPluginManager(Collections.emptyMap());
config.set(
ExternalResourceOptions.EXTERNAL_RESOURCE_LIST,
Collections.singletonList(RESOURCE_NAME_1));
config.setString(
ExternalResourceOptions.getExternalResourceDriverFactoryConfigOptionForResource(
RESOURCE_NAME_1),
driverFactoryClassName);
final Map<String, ExternalResourceDriver> externalResourceDrivers =
ExternalResourceUtils.externalResourceDriversFromConfig(
config, testingPluginManager);
assertThat(externalResourceDrivers.entrySet(), is(empty()));
} |
@Override
public void put(ExecutionGraphInfo executionGraphInfo) throws IOException {
final JobID jobId = executionGraphInfo.getJobId();
final ArchivedExecutionGraph archivedExecutionGraph =
executionGraphInfo.getArchivedExecutionGraph();
final JobStatus jobStatus = archivedExecutionGraph.getState();
final String jobName = archivedExecutionGraph.getJobName();
Preconditions.checkArgument(
jobStatus.isTerminalState(),
"The job "
+ jobName
+ '('
+ jobId
+ ") is not in a terminal state. Instead it is in state "
+ jobStatus
+ '.');
switch (jobStatus) {
case FINISHED:
numFinishedJobs++;
break;
case CANCELED:
numCanceledJobs++;
break;
case FAILED:
numFailedJobs++;
break;
case SUSPENDED:
break;
default:
throw new IllegalStateException(
"The job "
+ jobName
+ '('
+ jobId
+ ") should have been in a known terminal state. "
+ "Instead it was in state "
+ jobStatus
+ '.');
}
// write the ArchivedExecutionGraph to disk
storeExecutionGraphInfo(executionGraphInfo);
final JobDetails detailsForJob = JobDetails.createDetailsForJob(archivedExecutionGraph);
jobDetailsCache.put(jobId, detailsForJob);
executionGraphInfoCache.put(jobId, executionGraphInfo);
} | @Test
public void testPut() throws IOException {
assertPutJobGraphWithStatus(JobStatus.FINISHED);
} |
@Override
public String toString() {
return StringUtils.join(columns, separator);
} | @Test
public void testToString() {
assertEquals("a,b", new SampleMetadata(',', "a", "b").toString());
} |
@Override
public RateLimiter rateLimiter(final String name) {
return rateLimiter(name, getDefaultConfig());
} | @Test
@SuppressWarnings("unchecked")
public void rateLimiterPositiveWithSupplier() throws Exception {
RateLimiterRegistry registry = new InMemoryRateLimiterRegistry(config);
Supplier<RateLimiterConfig> rateLimiterConfigSupplier = mock(Supplier.class);
when(rateLimiterConfigSupplier.get())
.thenReturn(config);
RateLimiter firstRateLimiter = registry.rateLimiter("test", rateLimiterConfigSupplier);
verify(rateLimiterConfigSupplier, times(1)).get();
RateLimiter sameAsFirst = registry.rateLimiter("test", rateLimiterConfigSupplier);
verify(rateLimiterConfigSupplier, times(1)).get();
RateLimiter anotherLimit = registry.rateLimiter("test1", rateLimiterConfigSupplier);
verify(rateLimiterConfigSupplier, times(2)).get();
then(firstRateLimiter).isEqualTo(sameAsFirst);
then(firstRateLimiter).isNotEqualTo(anotherLimit);
} |
@Override
public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer) {
return aggregate(initializer, Materialized.with(null, null));
} | @Test
public void shouldNotHaveNullInitializerTwoOptionMaterializedOnAggregate() {
assertThrows(NullPointerException.class, () -> windowedCogroupedStream.aggregate(null, Materialized.as("test")));
} |
public void writeTrailingBytes(byte[] value) {
if ((value == null) || (value.length == 0)) {
throw new IllegalArgumentException("Value cannot be null or have 0 elements");
}
encodedArrays.add(value);
} | @Test
public void testWriteTrailingBytes() {
byte[] escapeChars =
new byte[] {
OrderedCode.ESCAPE1,
OrderedCode.NULL_CHARACTER,
OrderedCode.SEPARATOR,
OrderedCode.ESCAPE2,
OrderedCode.INFINITY,
OrderedCode.FF_CHARACTER
};
byte[] anotherArray = new byte[] {'a', 'b', 'c', 'd', 'e'};
OrderedCode orderedCode = new OrderedCode();
orderedCode.writeTrailingBytes(escapeChars);
assertArrayEquals(orderedCode.getEncodedBytes(), escapeChars);
assertArrayEquals(orderedCode.readTrailingBytes(), escapeChars);
try {
orderedCode.readInfinity();
fail("Expected IllegalArgumentException.");
} catch (IllegalArgumentException e) {
// expected
}
orderedCode = new OrderedCode();
orderedCode.writeTrailingBytes(anotherArray);
assertArrayEquals(orderedCode.getEncodedBytes(), anotherArray);
assertArrayEquals(orderedCode.readTrailingBytes(), anotherArray);
} |
@Override
public Object getObject(final int columnIndex) throws SQLException {
return mergeResultSet.getValue(columnIndex, Object.class);
} | @Test
void assertGetObjectWithBigInteger() throws SQLException {
BigInteger result = BigInteger.valueOf(0L);
when(mergeResultSet.getValue(1, BigInteger.class)).thenReturn(result);
assertThat(shardingSphereResultSet.getObject(1, BigInteger.class), is(result));
} |
@Override
public TopicAssignment place(
PlacementSpec placement,
ClusterDescriber cluster
) throws InvalidReplicationFactorException {
RackList rackList = new RackList(random, cluster.usableBrokers());
throwInvalidReplicationFactorIfNonPositive(placement.numReplicas());
throwInvalidReplicationFactorIfZero(rackList.numUnfencedBrokers());
throwInvalidReplicationFactorIfTooFewBrokers(placement.numReplicas(),
rackList.numTotalBrokers());
List<List<Integer>> placements = new ArrayList<>(placement.numPartitions());
for (int partition = 0; partition < placement.numPartitions(); partition++) {
placements.add(rackList.place(placement.numReplicas()));
}
return new TopicAssignment(
placements.stream().map(replicas -> new PartitionAssignment(replicas, cluster)).collect(Collectors.toList())
);
} | @Test
public void testRackListAllBrokersFenced() {
// ensure we can place N replicas on a rack when the rack has less than N brokers
MockRandom random = new MockRandom();
RackList rackList = new RackList(random, Arrays.asList(
new UsableBroker(0, Optional.empty(), true),
new UsableBroker(1, Optional.empty(), true),
new UsableBroker(2, Optional.empty(), true)).iterator());
assertEquals(3, rackList.numTotalBrokers());
assertEquals(0, rackList.numUnfencedBrokers());
assertEquals(Collections.singletonList(Optional.empty()), rackList.rackNames());
assertEquals("All brokers are currently fenced.",
assertThrows(InvalidReplicationFactorException.class,
() -> rackList.place(3)).getMessage());
} |
@Override
public boolean removeAll(Collection<?> c) {
return get(removeAllAsync(c));
} | @Test
public void testRemoveAll() {
RScoredSortedSet<Integer> set = redisson.getScoredSortedSet("simple");
set.add(0.1, 1);
set.add(0.2, 2);
set.add(0.3, 3);
Assertions.assertTrue(set.removeAll(Arrays.asList(1, 2)));
assertThat(set).containsOnly(3);
Assertions.assertEquals(1, set.size());
} |
@Override
public int getConnectTimeout() {
return clientConfig.getPropertyAsInteger(IClientConfigKey.Keys.ConnectTimeout, DEFAULT_CONNECT_TIMEOUT);
} | @Test
void testGetConnectTimeoutOverride() {
clientConfig.set(IClientConfigKey.Keys.ConnectTimeout, 1000);
assertEquals(1000, connectionPoolConfig.getConnectTimeout());
} |
public static UInstanceOf create(UExpression expression, UTree<?> type) {
return new AutoValue_UInstanceOf(expression, type);
} | @Test
public void equality() {
new EqualsTester()
.addEqualityGroup(
UInstanceOf.create(UFreeIdent.create("o"), UClassIdent.create("java.lang.String")))
.addEqualityGroup(
UInstanceOf.create(UFreeIdent.create("o"), UClassIdent.create("java.lang.Integer")))
.testEquals();
} |
@Override
public void run() {
if (processor != null) {
processor.execute();
} else {
if (!beforeHook()) {
logger.info("before-feature hook returned [false], aborting: {}", this);
} else {
scenarios.forEachRemaining(this::processScenario);
}
afterFeature();
}
} | @Test
void testCopyAndClone() {
run("copy.feature");
} |
public KsqlGenericRecord build(
final List<ColumnName> columnNames,
final List<Expression> expressions,
final LogicalSchema schema,
final DataSourceType dataSourceType
) {
final List<ColumnName> columns = columnNames.isEmpty()
? implicitColumns(schema)
: columnNames;
if (columns.size() != expressions.size()) {
throw new KsqlException(
"Expected a value for each column."
+ " Expected Columns: " + columnNames
+ ". Got " + expressions);
}
final LogicalSchema schemaWithPseudoColumns = withPseudoColumns(schema);
for (ColumnName col : columns) {
if (!schemaWithPseudoColumns.findColumn(col).isPresent()) {
throw new KsqlException("Column name " + col + " does not exist.");
}
if (SystemColumns.isDisallowedForInsertValues(col)) {
throw new KsqlException("Inserting into column " + col + " is not allowed.");
}
}
final Map<ColumnName, Object> values = resolveValues(
columns,
expressions,
schemaWithPseudoColumns,
functionRegistry,
config
);
if (dataSourceType == DataSourceType.KTABLE) {
final String noValue = schemaWithPseudoColumns.key().stream()
.map(Column::name)
.filter(colName -> !values.containsKey(colName))
.map(ColumnName::text)
.collect(Collectors.joining(", "));
if (!noValue.isEmpty()) {
throw new KsqlException("Value for primary key column(s) "
+ noValue + " is required for tables");
}
}
final long ts = (long) values.getOrDefault(SystemColumns.ROWTIME_NAME, clock.getAsLong());
final GenericKey key = buildKey(schema, values);
final GenericRow value = buildValue(schema, values);
return KsqlGenericRecord.of(key, value, ts);
} | @Test
public void shouldBuildWithRowtime() {
// Given:
final LogicalSchema schema = LogicalSchema.builder()
.keyColumn(KEY, SqlTypes.STRING)
.valueColumn(COL0, SqlTypes.STRING)
.build();
final List<ColumnName> names = ImmutableList.of(SystemColumns.ROWTIME_NAME, KEY, COL0);
final Expression exp = new StringLiteral("a");
// When:
final KsqlGenericRecord record = recordFactory.build(
names, ImmutableList.of(new LongLiteral(1L), exp, exp), schema, DataSourceType.KSTREAM
);
// Then:
assertThat(record, is(KsqlGenericRecord.of(
GenericKey.genericKey("a"),
GenericRow.genericRow("a"),
1
)));
} |
@Override
public Integer getIntAndRemove(K name) {
return null;
} | @Test
public void testGetIntAndRemove() {
assertNull(HEADERS.getIntAndRemove("name1"));
} |
@Override
public short getShort(int index) {
checkIndex(index, 2);
return _getShort(index);
} | @Test
public void getShortBoundaryCheck2() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
buffer.getShort(buffer.capacity() - 1);
}
});
} |
public static String dealRule(final String content, final MockRequest mockRequest) {
String afterDeal = content;
String placeHolder = getPlaceholder(content);
while (placeHolder != null) {
Object generateData = generate(placeHolder, mockRequest);
if (Objects.equals(generateData, placeHolder)) {
generateData = ERROR_PARSE;
}
String toString = String.valueOf(generateData);
placeHolder = placeHolder.replaceAll("([$|{}()\\]\\[])", "\\\\$1");
afterDeal = afterDeal.replaceFirst(placeHolder, toString);
placeHolder = getPlaceholder(afterDeal);
}
return afterDeal;
} | @Test
public void testDealRule() {
String dealedContent = GeneratorFactory.dealRule("${phone}", null);
assertThat(dealedContent, matchesRegex("^\"1[3-9]\\d{9}\"$"));
} |
@Override
public String execute(CommandContext commandContext, String[] args) {
if (ArrayUtils.isEmpty(args)) {
return "Please input the index of the method you want to invoke, eg: \r\n select 1";
}
Channel channel = commandContext.getRemote();
String message = args[0];
List<Method> methodList =
channel.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY).get();
if (CollectionUtils.isEmpty(methodList)) {
return "Please use the invoke command first.";
}
if (!StringUtils.isNumber(message)
|| Integer.parseInt(message) < 1
|| Integer.parseInt(message) > methodList.size()) {
return "Illegal index ,please input select 1~" + methodList.size();
}
Method method = methodList.get(Integer.parseInt(message) - 1);
channel.attr(SELECT_METHOD_KEY).set(method);
channel.attr(SELECT_KEY).set(Boolean.TRUE);
String invokeMessage = channel.attr(InvokeTelnet.INVOKE_MESSAGE_KEY).get();
return invokeTelnet.execute(commandContext, new String[] {invokeMessage});
} | @Test
void testInvokeWithNull() throws RemotingException {
defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).set(DemoService.class.getName());
defaultAttributeMap.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY).set(methods);
given(mockChannel.attr(ChangeTelnet.SERVICE_KEY))
.willReturn(defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY));
given(mockChannel.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY))
.willReturn(defaultAttributeMap.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY));
registerProvider(DemoService.class.getName(), new DemoServiceImpl(), DemoService.class);
String result = select.execute(mockCommandContext, new String[0]);
assertTrue(result.contains("Please input the index of the method you want to invoke"));
defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).remove();
defaultAttributeMap.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY).remove();
} |
public long periodBarriersCrossed(long start, long end) {
if (start > end)
throw new IllegalArgumentException("Start cannot come before end");
long startFloored = getStartOfCurrentPeriodWithGMTOffsetCorrection(start, getTimeZone());
long endFloored = getStartOfCurrentPeriodWithGMTOffsetCorrection(end, getTimeZone());
long diff = endFloored - startFloored;
switch (periodicityType) {
case TOP_OF_MILLISECOND:
return diff;
case TOP_OF_SECOND:
return diff / MILLIS_IN_ONE_SECOND;
case TOP_OF_MINUTE:
return diff / MILLIS_IN_ONE_MINUTE;
case TOP_OF_HOUR:
return diff / MILLIS_IN_ONE_HOUR;
case TOP_OF_DAY:
return diff / MILLIS_IN_ONE_DAY;
case TOP_OF_WEEK:
return diff / MILLIS_IN_ONE_WEEK;
case TOP_OF_MONTH:
return diffInMonths(start, end);
default:
throw new IllegalStateException("Unknown periodicity type.");
}
} | @Test
public void testPeriodBarriersCrossedJustBeforeEnteringDaylightSaving() {
RollingCalendar rc = new RollingCalendar(dailyPattern, TimeZone.getTimeZone("CET"), Locale.US);
// Sun Mar 26 22:18:38 CEST 2017, GMT offset = +2h
long start = 1490559518333L;
System.out.println(new Date(start));
// Mon Mar 27 00:05:18 CEST 2017, GMT offset = +2h
long end = 1490565918333L;
System.out.println(new Date(end));
assertEquals(1, rc.periodBarriersCrossed(start, end));
} |
public ConsumerGroupDescribeResponseData.Member asConsumerGroupDescribeMember(
Assignment targetAssignment,
TopicsImage topicsImage
) {
return new ConsumerGroupDescribeResponseData.Member()
.setMemberEpoch(memberEpoch)
.setMemberId(memberId)
.setAssignment(new ConsumerGroupDescribeResponseData.Assignment()
.setTopicPartitions(topicPartitionsFromMap(assignedPartitions, topicsImage)))
.setTargetAssignment(new ConsumerGroupDescribeResponseData.Assignment()
.setTopicPartitions(topicPartitionsFromMap(
targetAssignment != null ? targetAssignment.partitions() : Collections.emptyMap(),
topicsImage
)))
.setClientHost(clientHost)
.setClientId(clientId)
.setInstanceId(instanceId)
.setRackId(rackId)
.setSubscribedTopicNames(subscribedTopicNames == null ? null : new ArrayList<>(subscribedTopicNames))
.setSubscribedTopicRegex(subscribedTopicRegex);
} | @Test
public void testAsConsumerGroupDescribeMember() {
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
Uuid topicId3 = Uuid.randomUuid();
Uuid topicId4 = Uuid.randomUuid();
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, "topic1", 3)
.addTopic(topicId2, "topic2", 3)
.addTopic(topicId3, "topic3", 3)
.addTopic(topicId4, "topic4", 3)
.build();
List<Integer> assignedPartitions = Arrays.asList(0, 1, 2);
int epoch = 10;
ConsumerGroupCurrentMemberAssignmentValue record = new ConsumerGroupCurrentMemberAssignmentValue()
.setMemberEpoch(epoch)
.setPreviousMemberEpoch(epoch - 1)
.setAssignedPartitions(Collections.singletonList(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(assignedPartitions)))
.setPartitionsPendingRevocation(Collections.singletonList(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(3, 4, 5))));
String memberId = Uuid.randomUuid().toString();
String clientId = "clientId";
String instanceId = "instanceId";
String rackId = "rackId";
String clientHost = "clientHost";
List<String> subscribedTopicNames = Arrays.asList("topic1", "topic2");
String subscribedTopicRegex = "topic.*";
Map<Uuid, Set<Integer>> assignmentMap = new HashMap<>();
assignmentMap.put(topicId4, new HashSet<>(assignedPartitions));
Assignment targetAssignment = new Assignment(assignmentMap);
ConsumerGroupMember member = new ConsumerGroupMember.Builder(memberId)
.updateWith(record)
.setClientId(clientId)
.setInstanceId(instanceId)
.setRackId(rackId)
.setClientHost(clientHost)
.setSubscribedTopicNames(subscribedTopicNames)
.setSubscribedTopicRegex(subscribedTopicRegex)
.build();
ConsumerGroupDescribeResponseData.Member actual = member.asConsumerGroupDescribeMember(targetAssignment, metadataImage.topics());
ConsumerGroupDescribeResponseData.Member expected = new ConsumerGroupDescribeResponseData.Member()
.setMemberId(memberId)
.setMemberEpoch(epoch)
.setClientId(clientId)
.setInstanceId(instanceId)
.setRackId(rackId)
.setClientHost(clientHost)
.setSubscribedTopicNames(new ArrayList<>(subscribedTopicNames))
.setSubscribedTopicRegex(subscribedTopicRegex)
.setAssignment(
new ConsumerGroupDescribeResponseData.Assignment()
.setTopicPartitions(Collections.singletonList(new ConsumerGroupDescribeResponseData.TopicPartitions()
.setTopicId(topicId1)
.setTopicName("topic1")
.setPartitions(assignedPartitions)
))
)
.setTargetAssignment(
new ConsumerGroupDescribeResponseData.Assignment()
.setTopicPartitions(targetAssignment.partitions().entrySet().stream().map(
item -> new ConsumerGroupDescribeResponseData.TopicPartitions()
.setTopicId(item.getKey())
.setTopicName("topic4")
.setPartitions(new ArrayList<>(item.getValue()))
).collect(Collectors.toList()))
);
assertEquals(expected, actual);
} |
public static String getPinyin(char c) {
return getEngine().getPinyin(c);
} | @Test
public void getPinyinTest(){
final String pinyin = PinyinUtil.getPinyin("你好怡", " ");
assertEquals("ni hao yi", pinyin);
} |
public void dropIndex(Bson keys) {
delegate.dropIndex(keys);
} | @Test
void dropIndex() {
final var collection = jacksonCollection("simple", Simple.class);
collection.createIndex(new BasicDBObject("name", 1));
collection.createIndex(new BasicDBObject("name", 1).append("_id", 1));
assertThat(mongoCollection("simple").listIndexes()).extracting("name")
.containsExactlyInAnyOrder("_id_", "name_1", "name_1__id_1");
collection.dropIndex("name_1");
assertThat(mongoCollection("simple").listIndexes()).extracting("name")
.containsExactlyInAnyOrder("_id_", "name_1__id_1");
collection.dropIndex(new BasicDBObject("name", 1).append("_id", 1));
assertThat(mongoCollection("simple").listIndexes()).extracting("name")
.containsExactlyInAnyOrder("_id_");
} |
@VisibleForTesting
void validateParentDept(Long id, Long parentId) {
if (parentId == null || DeptDO.PARENT_ID_ROOT.equals(parentId)) {
return;
}
// 1. 不能设置自己为父部门
if (Objects.equals(id, parentId)) {
throw exception(DEPT_PARENT_ERROR);
}
// 2. 父部门不存在
DeptDO parentDept = deptMapper.selectById(parentId);
if (parentDept == null) {
throw exception(DEPT_PARENT_NOT_EXITS);
}
// 3. 递归校验父部门,如果父部门是自己的子部门,则报错,避免形成环路
if (id == null) { // id 为空,说明新增,不需要考虑环路
return;
}
for (int i = 0; i < Short.MAX_VALUE; i++) {
// 3.1 校验环路
parentId = parentDept.getParentId();
if (Objects.equals(id, parentId)) {
throw exception(DEPT_PARENT_IS_CHILD);
}
// 3.2 继续递归下一级父部门
if (parentId == null || DeptDO.PARENT_ID_ROOT.equals(parentId)) {
break;
}
parentDept = deptMapper.selectById(parentId);
if (parentDept == null) {
break;
}
}
} | @Test
public void testValidateParentDept_parentError() {
// 准备参数
Long id = randomLongId();
// 调用, 并断言异常
assertServiceException(() -> deptService.validateParentDept(id, id),
DEPT_PARENT_ERROR);
} |
@Override
public List<String> getInsertColumns() {
List<SQLExpr> columnSQLExprs = ast.getColumns();
if (columnSQLExprs.isEmpty()) {
// INSERT INTO ta VALUES (...), without fields clarified
return null;
}
List<String> list = new ArrayList<>(columnSQLExprs.size());
for (SQLExpr expr : columnSQLExprs) {
if (expr instanceof SQLIdentifierExpr) {
list.add(((SQLIdentifierExpr) expr).getName());
} else {
wrapSQLParsingException(expr);
}
}
return list;
} | @Test
public void testGetInsertColumns() {
//test for no column
String sql = "insert into t values (?)";
SQLStatement ast = getSQLStatement(sql);
SqlServerInsertRecognizer recognizer = new SqlServerInsertRecognizer(sql, ast);
List<String> insertColumns = recognizer.getInsertColumns();
Assertions.assertNull(insertColumns);
//test for normal
sql = "insert into t(a) values (?)";
ast = getSQLStatement(sql);
recognizer = new SqlServerInsertRecognizer(sql, ast);
insertColumns = recognizer.getInsertColumns();
Assertions.assertEquals(1, insertColumns.size());
Assertions.assertEquals(Collections.singletonList("a"), insertColumns);
//test for exception
Assertions.assertThrows(SQLParsingException.class, () -> {
String s = "insert into t(a) values (?)";
SQLStatement sqlStatement = getSQLStatement(s);
SQLInsertStatement sqlInsertStatement = (SQLInsertStatement) sqlStatement;
sqlInsertStatement.getColumns().add(new SQLDateExpr());
SqlServerInsertRecognizer sqlServerInsertRecognizer = new SqlServerInsertRecognizer(s, sqlInsertStatement);
sqlServerInsertRecognizer.getInsertColumns();
});
} |
@Override
public Double getNumber( Object object ) throws KettleValueException {
Long timestampAsInteger = getInteger( object );
if ( null != timestampAsInteger ) {
return timestampAsInteger.doubleValue();
} else {
return null;
}
} | @Test
public void testConvertTimestampToNumber_Null() throws KettleValueException {
ValueMetaTimestamp valueMetaTimestamp = new ValueMetaTimestamp();
assertNull( valueMetaTimestamp.getNumber( null ) );
} |
@Override
public void close() {
try {
restHighLevelClient.close();
} catch (IOException e) {
throw new ElasticsearchException("Could not close ES Rest high level client", e);
}
} | @Test
public void should_close_client() throws IOException {
underTest.close();
verify(restClient).close();
} |
@RequiresApi(Build.VERSION_CODES.R)
@Override
public boolean onInlineSuggestionsResponse(@NonNull InlineSuggestionsResponse response) {
final List<InlineSuggestion> inlineSuggestions = response.getInlineSuggestions();
if (inlineSuggestions.size() > 0) {
mInlineSuggestionAction.onNewSuggestions(inlineSuggestions);
getInputViewContainer().addStripAction(mInlineSuggestionAction, true);
getInputViewContainer().setActionsStripVisibility(true);
}
return !inlineSuggestions.isEmpty();
} | @Test
public void testPrioritizePinnedSuggestions() {
simulateOnStartInputFlow();
var inlineView1 = Mockito.mock(InlineContentView.class);
var inlineView2 = Mockito.mock(InlineContentView.class);
var inlineView3Pinned = Mockito.mock(InlineContentView.class);
var inlineView4 = Mockito.mock(InlineContentView.class);
var inlineView5Pinned = Mockito.mock(InlineContentView.class);
var response =
mockResponse(inlineView1, inlineView2, inlineView3Pinned, inlineView4, inlineView5Pinned);
var inlineSuggestion3 = response.getInlineSuggestions().get(2).getInfo();
Mockito.doReturn(true).when(inlineSuggestion3).isPinned();
var inlineSuggestion5 = response.getInlineSuggestions().get(4).getInfo();
Mockito.doReturn(true).when(inlineSuggestion5).isPinned();
mAnySoftKeyboardUnderTest.onInlineSuggestionsResponse(response);
var rootView =
mAnySoftKeyboardUnderTest
.getInputViewContainer()
.findViewById(R.id.inline_suggestions_strip_root);
Shadows.shadowOf(rootView).getOnClickListener().onClick(rootView);
var scroller =
(ScrollViewAsMainChild)
mAnySoftKeyboardUnderTest
.getInputViewContainer()
.findViewById(R.id.inline_suggestions_list);
Assert.assertNotNull(scroller);
Assert.assertEquals(5, scroller.getItemsCount());
var itemsHolder = (ViewGroup) scroller.getChildAt(0);
Assert.assertSame(inlineView3Pinned, itemsHolder.getChildAt(0));
Assert.assertSame(inlineView5Pinned, itemsHolder.getChildAt(1));
Assert.assertSame(inlineView1, itemsHolder.getChildAt(2));
Assert.assertSame(inlineView2, itemsHolder.getChildAt(3));
Assert.assertSame(inlineView4, itemsHolder.getChildAt(4));
} |
public String anonymize(final ParseTree tree) {
return build(tree);
} | @Test
public void shouldAnonymizeDefineUndefineProperty() {
Assert.assertEquals("DEFINE variable='[string]';",
anon.anonymize("DEFINE format = 'JSON';"));
Assert.assertEquals("UNDEFINE variable;",
anon.anonymize("UNDEFINE format;"));
} |
@Override
public V get() throws InterruptedException, ExecutionException {
try {
return resolve(future.get());
} catch (HazelcastSerializationException e) {
throw new ExecutionException(e);
}
} | @Test
public void test_get_whenData_andMultipleTimesInvoked_thenSameInstanceReturned() throws Exception {
Object value = "value";
Data data = serializationService.toData(value);
Future<Object> future = new DelegatingCompletableFuture<>(serializationService, newCompletedFuture(data));
Object result1 = future.get();
Object result2 = future.get();
assertSame(result1, result2);
} |
public void installIntents(Optional<IntentData> toUninstall, Optional<IntentData> toInstall) {
// If no any Intents to be uninstalled or installed, ignore it.
if (!toUninstall.isPresent() && !toInstall.isPresent()) {
return;
}
// Classify installable Intents to different installers.
ArrayListMultimap<IntentInstaller, Intent> uninstallInstallers;
ArrayListMultimap<IntentInstaller, Intent> installInstallers;
Set<IntentInstaller> allInstallers = Sets.newHashSet();
if (toUninstall.isPresent()) {
uninstallInstallers = getInstallers(toUninstall.get());
allInstallers.addAll(uninstallInstallers.keySet());
} else {
uninstallInstallers = ArrayListMultimap.create();
}
if (toInstall.isPresent()) {
installInstallers = getInstallers(toInstall.get());
allInstallers.addAll(installInstallers.keySet());
} else {
installInstallers = ArrayListMultimap.create();
}
// Generates an installation context for the high level Intent.
IntentInstallationContext installationContext =
new IntentInstallationContext(toUninstall.orElse(null), toInstall.orElse(null));
//Generates different operation context for different installable Intents.
Map<IntentInstaller, IntentOperationContext> contexts = Maps.newHashMap();
allInstallers.forEach(installer -> {
List<Intent> intentsToUninstall = uninstallInstallers.get(installer);
List<Intent> intentsToInstall = installInstallers.get(installer);
// Connect context to high level installation context
IntentOperationContext context =
new IntentOperationContext(intentsToUninstall, intentsToInstall,
installationContext);
installationContext.addPendingContext(context);
contexts.put(installer, context);
});
// Apply contexts to installers
contexts.forEach((installer, context) -> {
installer.apply(context);
});
} | @Test
public void testUninstallAndInstallIntent() {
IntentData toUninstall = new IntentData(createTestIntent(),
IntentState.INSTALLED,
new WallClockTimestamp());
IntentData toInstall = new IntentData(createTestIntent(),
IntentState.INSTALLING,
new WallClockTimestamp());
List<Intent> intentsToUninstall = Lists.newArrayList();
List<Intent> intentsToInstall = Lists.newArrayList();
IntStream.range(0, 10).forEach(val -> {
intentsToUninstall.add(new TestInstallableIntent(val));
});
IntStream.range(10, 20).forEach(val -> {
intentsToInstall.add(new TestInstallableIntent(val));
});
toUninstall = IntentData.compiled(toUninstall, intentsToUninstall);
toInstall = IntentData.compiled(toInstall, intentsToInstall);
installCoordinator.installIntents(Optional.of(toUninstall), Optional.of(toInstall));
Intent toInstallIntent = toInstall.intent();
TestTools.assertAfter(INSTALL_DELAY, INSTALL_DURATION, () -> {
IntentData newData = intentStore.newData;
assertEquals(toInstallIntent, newData.intent());
assertEquals(IntentState.INSTALLED, newData.state());
assertEquals(intentsToInstall, newData.installables());
});
} |
@Override
public <A extends Annotation> MergedAnnotation<A> get(Class<A> annotationType) {
return get(annotationType, null, null);
} | @Test
void adaptFromEmptyArrayToAnyComponentType() {
AttributeMethods methods = AttributeMethods.forAnnotationType(ArrayTypes.class);
Map<String, Object> attributes = new HashMap<>();
for (int i = 0; i < methods.size(); i++) {
attributes.put(methods.get(i).getName(), new Object[] {});
}
MergedAnnotation<ArrayTypes> annotation = TypeMappedAnnotation.of(null, null,
ArrayTypes.class, attributes);
assertThat(annotation.getValue("stringValue")).contains(new String[] {});
assertThat(annotation.getValue("byteValue")).contains(new byte[] {});
assertThat(annotation.getValue("shortValue")).contains(new short[] {});
assertThat(annotation.getValue("intValue")).contains(new int[] {});
assertThat(annotation.getValue("longValue")).contains(new long[] {});
assertThat(annotation.getValue("booleanValue")).contains(new boolean[] {});
assertThat(annotation.getValue("charValue")).contains(new char[] {});
assertThat(annotation.getValue("doubleValue")).contains(new double[] {});
assertThat(annotation.getValue("floatValue")).contains(new float[] {});
assertThat(annotation.getValue("classValue")).contains(new Class<?>[] {});
assertThat(annotation.getValue("annotationValue")).contains(new MergedAnnotation<?>[] {});
assertThat(annotation.getValue("enumValue")).contains(new ExampleEnum[] {});
} |
@Override
public AttributedList<Path> read(final Path directory, final List<String> replies) throws FTPInvalidListException {
final AttributedList<Path> children = new AttributedList<Path>();
// At least one entry successfully parsed
boolean success = false;
// Call hook for those implementors which need to perform some action upon the list after it has been created
// from the server stream, but before any clients see the list
parser.preParse(replies);
for(String line : replies) {
final FTPFile f = parser.parseFTPEntry(line);
if(null == f) {
continue;
}
final String name = f.getName();
if(!success) {
if(lenient) {
// Workaround for #2410. STAT only returns ls of directory itself
// Workaround for #2434. STAT of symbolic link directory only lists the directory itself.
if(directory.getName().equals(name)) {
log.warn(String.format("Skip %s matching parent directory name", f.getName()));
continue;
}
if(name.contains(String.valueOf(Path.DELIMITER))) {
if(!name.startsWith(directory.getAbsolute() + Path.DELIMITER)) {
// Workaround for #2434.
log.warn(String.format("Skip %s with delimiter in name", name));
continue;
}
}
}
}
success = true;
if(name.equals(".") || name.equals("..")) {
if(log.isDebugEnabled()) {
log.debug(String.format("Skip %s", f.getName()));
}
continue;
}
final Path parsed = new Path(directory, PathNormalizer.name(name), f.getType() == FTPFile.DIRECTORY_TYPE ? EnumSet.of(Path.Type.directory) : EnumSet.of(Path.Type.file));
switch(f.getType()) {
case FTPFile.SYMBOLIC_LINK_TYPE:
parsed.setType(EnumSet.of(Path.Type.file, Path.Type.symboliclink));
// Symbolic link target may be an absolute or relative path
final String target = f.getLink();
if(StringUtils.isBlank(target)) {
log.warn(String.format("Missing symbolic link target for %s", parsed));
final EnumSet<Path.Type> type = parsed.getType();
type.remove(Path.Type.symboliclink);
}
else if(StringUtils.startsWith(target, String.valueOf(Path.DELIMITER))) {
parsed.setSymlinkTarget(new Path(PathNormalizer.normalize(target), EnumSet.of(Path.Type.file)));
}
else if(StringUtils.equals("..", target)) {
parsed.setSymlinkTarget(directory);
}
else if(StringUtils.equals(".", target)) {
parsed.setSymlinkTarget(parsed);
}
else {
parsed.setSymlinkTarget(new Path(directory, target, EnumSet.of(Path.Type.file)));
}
break;
}
if(parsed.isFile()) {
parsed.attributes().setSize(f.getSize());
}
parsed.attributes().setOwner(f.getUser());
parsed.attributes().setGroup(f.getGroup());
Permission.Action u = Permission.Action.none;
if(f.hasPermission(FTPFile.USER_ACCESS, FTPFile.READ_PERMISSION)) {
u = u.or(Permission.Action.read);
}
if(f.hasPermission(FTPFile.USER_ACCESS, FTPFile.WRITE_PERMISSION)) {
u = u.or(Permission.Action.write);
}
if(f.hasPermission(FTPFile.USER_ACCESS, FTPFile.EXECUTE_PERMISSION)) {
u = u.or(Permission.Action.execute);
}
Permission.Action g = Permission.Action.none;
if(f.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.READ_PERMISSION)) {
g = g.or(Permission.Action.read);
}
if(f.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.WRITE_PERMISSION)) {
g = g.or(Permission.Action.write);
}
if(f.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.EXECUTE_PERMISSION)) {
g = g.or(Permission.Action.execute);
}
Permission.Action o = Permission.Action.none;
if(f.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.READ_PERMISSION)) {
o = o.or(Permission.Action.read);
}
if(f.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.WRITE_PERMISSION)) {
o = o.or(Permission.Action.write);
}
if(f.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.EXECUTE_PERMISSION)) {
o = o.or(Permission.Action.execute);
}
final Permission permission = new Permission(u, g, o);
if(f instanceof FTPExtendedFile) {
permission.setSetuid(((FTPExtendedFile) f).isSetuid());
permission.setSetgid(((FTPExtendedFile) f).isSetgid());
permission.setSticky(((FTPExtendedFile) f).isSticky());
}
if(!Permission.EMPTY.equals(permission)) {
parsed.attributes().setPermission(permission);
}
final Calendar timestamp = f.getTimestamp();
if(timestamp != null) {
parsed.attributes().setModificationDate(timestamp.getTimeInMillis());
}
children.add(parsed);
}
if(!success) {
throw new FTPInvalidListException(children);
}
return children;
} | @Test
public void testParseAbsolutePaths() throws Exception {
Path path = new Path(
"/data/FTP_pub", EnumSet.of(Path.Type.directory));
String[] replies = new String[]{
"- [RWCEAFMS] Petersm 0 May 05 2004 /data/FTP_pub/WelcomeTo_PeakFTP"
};
final CompositeFileEntryParser parser = new FTPParserSelector().getParser("NETWARE Type : L8");
final AttributedList<Path> list = new FTPListResponseReader(parser).read(path, Arrays.asList(replies)
);
assertEquals(1, list.size());
final Path parsed = list.get(0);
assertEquals("WelcomeTo_PeakFTP", parsed.getName());
assertEquals("/data/FTP_pub", parsed.getParent().getAbsolute());
assertFalse(parsed.attributes().getPermission().isSticky());
assertFalse(parsed.attributes().getPermission().isSetuid());
assertFalse(parsed.attributes().getPermission().isSetgid());
} |
public String getTag() {
return tag;
} | @Override
@Test
public void testDeserialize() throws JsonProcessingException {
String json = "{\"headers\":{\"notify\":\"true\"},\"dataId\":\"test_data\",\"group\":\"group\","
+ "\"tenant\":\"test_tenant\",\"notify\":true,\"module\":\"config\",\"tag\":\"tag\"}";
ConfigQueryRequest actual = mapper.readValue(json, ConfigQueryRequest.class);
assertEquals(DATA_ID, actual.getDataId());
assertEquals(GROUP, actual.getGroup());
assertEquals(TENANT, actual.getTenant());
assertEquals(TAG, actual.getTag());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getModule());
} |
@Override
public List<KsqlPartitionLocation> locate(
final List<KsqlKey> keys,
final RoutingOptions routingOptions,
final RoutingFilterFactory routingFilterFactory,
final boolean isRangeScan
) {
if (isRangeScan && keys.isEmpty()) {
throw new IllegalStateException("Query is range scan but found no range keys.");
}
final ImmutableList.Builder<KsqlPartitionLocation> partitionLocations = ImmutableList.builder();
final Set<Integer> filterPartitions = routingOptions.getPartitions();
final Optional<Set<KsqlKey>> keySet = keys.isEmpty() ? Optional.empty() :
Optional.of(Sets.newHashSet(keys));
// Depending on whether this is a key-based lookup, determine which metadata method to use.
// If we don't have keys, find the metadata for all partitions since we'll run the query for
// all partitions of the state store rather than a particular one.
//For issue #7174. Temporarily turn off metadata finding for a partition with keys
//if there are more than one key.
final List<PartitionMetadata> metadata;
if (keys.size() == 1 && keys.get(0).getKey().size() == 1 && !isRangeScan) {
metadata = getMetadataForKeys(keys, filterPartitions);
} else {
metadata = getMetadataForAllPartitions(filterPartitions, keySet);
}
if (metadata.isEmpty()) {
final MaterializationException materializationException = new MaterializationException(
"Cannot determine which host contains the required partitions to serve the pull query. \n"
+ "The underlying persistent query may be restarting (e.g. as a result of "
+ "ALTER SYSTEM) view the status of your by issuing <DESCRIBE foo>.");
LOG.debug(materializationException.getMessage());
throw materializationException;
}
// Go through the metadata and group them by partition.
for (PartitionMetadata partitionMetadata : metadata) {
LOG.debug("Handling pull query for partition {} of state store {}.",
partitionMetadata.getPartition(), storeName);
final HostInfo activeHost = partitionMetadata.getActiveHost();
final Set<HostInfo> standByHosts = partitionMetadata.getStandbyHosts();
final int partition = partitionMetadata.getPartition();
final Optional<Set<KsqlKey>> partitionKeys = partitionMetadata.getKeys();
LOG.debug("Active host {}, standby {}, partition {}.",
activeHost, standByHosts, partition);
// For a given partition, find the ordered, filtered list of hosts to consider
final List<KsqlNode> filteredHosts = getFilteredHosts(routingOptions, routingFilterFactory,
activeHost, standByHosts, partition);
partitionLocations.add(new PartitionLocation(partitionKeys, partition, filteredHosts));
}
return partitionLocations.build();
} | @Test
public void shouldThrowIfMetadataNotAvailable() {
// Given:
getEmtpyMetadata();
// When:
final Exception e = assertThrows(
MaterializationException.class,
() -> locator.locate(ImmutableList.of(KEY), routingOptions, routingFilterFactoryActive, false)
);
// Then:
assertThat(e.getMessage(), containsString(
"Materialized data for key [1] is not available yet. Please try again later."));
} |
public static Expression convert(Predicate[] predicates) {
Expression expression = Expressions.alwaysTrue();
for (Predicate predicate : predicates) {
Expression converted = convert(predicate);
Preconditions.checkArgument(
converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate);
expression = Expressions.and(expression, converted);
}
return expression;
} | @Test
public void testUnsupportedUDFConvert() {
ScalarFunction<UTF8String> icebergVersionFunc =
(ScalarFunction<UTF8String>) new IcebergVersionFunction().bind(new StructType());
UserDefinedScalarFunc udf =
new UserDefinedScalarFunc(
icebergVersionFunc.name(),
icebergVersionFunc.canonicalName(),
new org.apache.spark.sql.connector.expressions.Expression[] {});
LiteralValue literalValue = new LiteralValue("1.3.0", DataTypes.StringType);
Predicate predicate = new Predicate("=", expressions(udf, literalValue));
Expression icebergExpr = SparkV2Filters.convert(predicate);
assertThat(icebergExpr).isNull();
} |
@Override
public ValidationTaskResult validateImpl(Map<String, String> optionMap) {
// Skip this test if NOSASL
if (mConf.get(PropertyKey.SECURITY_AUTHENTICATION_TYPE)
.equals(AuthType.NOSASL)) {
return new ValidationTaskResult(ValidationUtils.State.SKIPPED, getName(),
String.format("Impersonation validation is skipped for NOSASL"), "");
}
ValidationTaskResult loadConfig = loadHdfsConfig();
if (loadConfig.getState() != ValidationUtils.State.OK) {
mAdvice.insert(0, "Validating the proxy user requires additional HDFS "
+ "configuration. ");
return loadConfig.setAdvice(mAdvice.toString());
}
// TODO(jiacheng): validate proxyuser.hosts for the cluster
// Validate proxyuser config for the current Alluxio user
try {
String alluxioUser = getCurrentUser();
return validateProxyUsers(alluxioUser);
} catch (UnauthenticatedException e) {
mMsg.append(String.format("Failed to authenticate in Alluxio: "));
mMsg.append(ExceptionUtils.asPlainText(e));
mAdvice.append("Please fix the authentication issue.");
return new ValidationTaskResult(ValidationUtils.State.FAILED, getName(),
mMsg.toString(), mAdvice.toString());
}
} | @Test
public void proxyUsersAndGroupsAllMissing() {
String userName = System.getProperty("user.name");
// Proxyuser configured for bob, not the running user
prepareHdfsConfFiles(ImmutableMap.of("hadoop.proxyuser.bob.users", "user1,user3",
"hadoop.proxyuser.bob.groups", "*"));
HdfsProxyUserValidationTask task =
new HdfsProxyUserValidationTask("hdfs://namenode:9000/alluxio", mConf);
ValidationTaskResult result = task.validateImpl(ImmutableMap.of());
assertEquals(ValidationUtils.State.FAILED, result.getState());
assertThat(result.getResult(), containsString(
"Alluxio is not able to perform impersonation."));
assertThat(result.getAdvice(), containsString(
String.format("Please enable Alluxio user %s to impersonate", userName)));
} |
public List<T> toList() {
List<T> result = new LinkedList<>();
for (int i = 0; i < size; i++) {
result.add((T) elements[i]);
}
return result;
} | @Test
void testOfferMoreThanSizeWithShuffle() {
List<Integer> testCase = new ArrayList<>(50);
for (int i = 0; i < 50; i++) {
testCase.add(i);
}
Collections.shuffle(testCase);
FixedSizePriorityQueue<Integer> queue = new FixedSizePriorityQueue<>(10, Comparator.<Integer>naturalOrder());
testCase.forEach(queue::offer);
List<Integer> list = queue.toList();
assertEquals(10, list.size());
for (int i = 49; i > 39; i--) {
assertTrue(list.contains(i));
}
} |
public static int[] toIntArray(Collection<Integer> collection) {
int[] collectionArray = new int[collection.size()];
int index = 0;
for (Integer item : collection) {
collectionArray[index++] = item;
}
return collectionArray;
} | @Test(expected = NullPointerException.class)
public void testToIntArray_whenNull_thenThrowNPE() {
toIntArray(null);
} |
@Override
public void loadConfiguration(NacosLoggingProperties loggingProperties) {
String location = loggingProperties.getLocation();
configurator.setLoggingProperties(loggingProperties);
LoggerContext loggerContext = loadConfigurationOnStart(location);
if (hasNoListener(loggerContext)) {
addListener(loggerContext, location);
}
} | @Test
void testLoadConfigurationStop() {
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
loggerContext.putObject(CoreConstants.RECONFIGURE_ON_CHANGE_TASK, new ReconfigureOnChangeTask());
logbackNacosLoggingAdapter.loadConfiguration(loggingProperties);
loggerContext.stop();
verify(loggerContextListener).onReset(loggerContext);
verify(loggerContextListener, never()).onStop(loggerContext);
for (Logger each : loggerContext.getLoggerList()) {
if (!"com.alibaba.nacos.client.naming".equals(each.getName())) {
continue;
}
assertNotNull(each.getAppender("ASYNC-NAMING"));
}
assertTrue(loggerContext.getCopyOfListenerList().isEmpty());
} |
public void execute(){
logger.debug("[" + getOperationName() + "] Starting execution of paged operation. maximum time: " + maxTime + ", maximum pages: " + maxPages);
long startTime = System.currentTimeMillis();
long executionTime = 0;
int i = 0;
int exceptionsSwallowedCount = 0;
int operationsCompleted = 0;
Set<String> exceptionsSwallowedClasses = new HashSet<String>();
while (i< maxPages && executionTime < maxTime){
Collection<T> page = fetchPage();
if(page == null || page.size() == 0){
break;
}
for (T item : page) {
try {
doOperation(item);
operationsCompleted++;
} catch (Exception e){
if(swallowExceptions){
exceptionsSwallowedCount++;
exceptionsSwallowedClasses.add(e.getClass().getName());
logger.debug("Swallowing exception " + e.getMessage(), e);
} else {
logger.debug("Rethrowing exception " + e.getMessage());
throw e;
}
}
}
i++;
executionTime = System.currentTimeMillis() - startTime;
}
finalReport(operationsCompleted, exceptionsSwallowedCount, exceptionsSwallowedClasses);
} | @Test(timeout = 1000L)
public void execute_emptypage(){
CountingPageOperation op = new EmptyPageCountingPageOperation(Integer.MAX_VALUE, Long.MAX_VALUE);
op.execute();
assertEquals(0L, op.getCounter());
} |
@Override
public void execute(ComputationStep.Context context) {
new DepthTraversalTypeAwareCrawler(
new TypeAwareVisitorAdapter(CrawlerDepthLimit.PROJECT, PRE_ORDER) {
@Override
public void visitProject(Component project) {
executeForProject(project);
}
}).visit(treeRootHolder.getRoot());
} | @Test
void mutableQualityGateStatusHolder_is_not_populated_if_there_is_no_qualitygate() {
qualityGateHolder.setQualityGate(null);
underTest.execute(new TestComputationStepContext());
assertThatThrownBy(() -> qualityGateStatusHolder.getStatus())
.isInstanceOf(IllegalStateException.class)
.hasMessage("Quality gate status has not been set yet");
} |
public HollowHashIndexResult findMatches(Object... query) {
if (hashStateVolatile == null) {
throw new IllegalStateException(this + " wasn't initialized");
}
int hashCode = 0;
for(int i=0;i<query.length;i++) {
if(query[i] == null)
throw new IllegalArgumentException("querying by null unsupported; i=" + i);
hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i));
}
HollowHashIndexResult result;
HollowHashIndexState hashState;
do {
result = null;
hashState = hashStateVolatile;
long bucket = hashCode & hashState.getMatchHashMask();
long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
while (!bucketIsEmpty) {
if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) {
int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize());
long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer());
result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize);
break;
}
bucket = (bucket + 1) & hashState.getMatchHashMask();
hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
}
} while (hashState != hashStateVolatile);
return result;
} | @Test
public void testIndexingStringTypeFieldWithNullValues() throws Exception {
mapper.add(new TypeB(null));
mapper.add(new TypeB("onez:"));
roundTripSnapshot();
HollowHashIndex index = new HollowHashIndex(readStateEngine, "TypeB", "", "b1.value");
Assert.assertNull(index.findMatches("one:"));
assertIteratorContainsAll(index.findMatches("onez:").iterator(), 1);
} |
public void print(final ByteBuffer encodedMessage, final StringBuilder output)
{
final UnsafeBuffer buffer = new UnsafeBuffer(encodedMessage);
print(output, buffer, 0);
} | @Test
void exampleMessagePrintedAsJson() throws Exception
{
final ByteBuffer encodedSchemaBuffer = ByteBuffer.allocate(SCHEMA_BUFFER_CAPACITY);
encodeSchema(encodedSchemaBuffer);
final ByteBuffer encodedMsgBuffer = ByteBuffer.allocate(MSG_BUFFER_CAPACITY);
encodeTestMessage(encodedMsgBuffer);
encodedSchemaBuffer.flip();
final Ir ir = decodeIr(encodedSchemaBuffer);
final JsonPrinter printer = new JsonPrinter(ir);
final String result = printer.print(encodedMsgBuffer);
assertEquals(
"{\n" +
" \"serialNumber\": 1234,\n" +
" \"modelYear\": 2013,\n" +
" \"available\": \"T\",\n" +
" \"code\": \"A\",\n" +
" \"someNumbers\": [0, 1, 2, 3, 4],\n" +
" \"vehicleCode\": \"ab\\\"def\",\n" +
" \"extras\": { \"sunRoof\": false, \"sportsPack\": true, \"cruiseControl\": true },\n" +
" \"engine\": \n" +
" {\n" +
" \"capacity\": 2000,\n" +
" \"numCylinders\": 4,\n" +
" \"maxRpm\": 9000,\n" +
" \"manufacturerCode\": \"123\",\n" +
" \"fuel\": \"Petrol\"\n" +
" },\n" +
" \"uuid\": [7, 3],\n" +
" \"cupHolderCount\": 5,\n" +
" \"fuelFigures\": [\n" +
" {\n" +
" \"speed\": 30,\n" +
" \"mpg\": 35.9\n" +
" },\n" +
" {\n" +
" \"speed\": 55,\n" +
" \"mpg\": 49.0\n" +
" },\n" +
" {\n" +
" \"speed\": 75,\n" +
" \"mpg\": 40.0\n" +
" }],\n" +
" \"performanceFigures\": [\n" +
" {\n" +
" \"octaneRating\": 95,\n" +
" \"acceleration\": [\n" +
" {\n" +
" \"mph\": 30,\n" +
" \"seconds\": 4.0\n" +
" },\n" +
" {\n" +
" \"mph\": 60,\n" +
" \"seconds\": 7.5\n" +
" },\n" +
" {\n" +
" \"mph\": 100,\n" +
" \"seconds\": 12.2\n" +
" }]\n" +
" },\n" +
" {\n" +
" \"octaneRating\": 99,\n" +
" \"acceleration\": [\n" +
" {\n" +
" \"mph\": 30,\n" +
" \"seconds\": 3.8\n" +
" },\n" +
" {\n" +
" \"mph\": 60,\n" +
" \"seconds\": 7.1\n" +
" },\n" +
" {\n" +
" \"mph\": 100,\n" +
" \"seconds\": 11.8\n" +
" }]\n" +
" }],\n" +
" \"manufacturer\": \"Honda\",\n" +
" \"model\": \"Civic VTi\",\n" +
" \"activationCode\": \"315\\\\8\"\n" +
"}",
result);
} |
public Statement buildStatement(final ParserRuleContext parseTree) {
return build(Optional.of(getSources(parseTree)), parseTree);
} | @Test
public void shouldBuildAssertSchemaWithSubject() {
// Given:
final SingleStatementContext stmt
= givenQuery("ASSERT SCHEMA SUBJECT X;");
// When:
final AssertSchema assertSchema = (AssertSchema) builder.buildStatement(stmt);
// Then:
assertThat(assertSchema.getSubject(), is(Optional.of("X")));
assertThat(assertSchema.getId(), is(Optional.empty()));
assertThat(assertSchema.getTimeout(), is(Optional.empty()));
assertThat(assertSchema.checkExists(), is(true));
} |
@Override
public void run() {
JobConfig jobConfig = null;
Serializable taskArgs = null;
try {
jobConfig = (JobConfig) SerializationUtils.deserialize(
mRunTaskCommand.getJobConfig().toByteArray());
if (mRunTaskCommand.hasTaskArgs()) {
taskArgs = SerializationUtils.deserialize(mRunTaskCommand.getTaskArgs().toByteArray());
}
} catch (IOException | ClassNotFoundException e) {
fail(e, jobConfig, null);
}
PlanDefinition<JobConfig, Serializable, Serializable> definition;
try {
definition = PlanDefinitionRegistry.INSTANCE.getJobDefinition(jobConfig);
} catch (JobDoesNotExistException e) {
LOG.error("The job definition for config {} does not exist.", jobConfig.getName());
fail(e, jobConfig, taskArgs);
return;
}
mTaskExecutorManager.notifyTaskRunning(mJobId, mTaskId);
Serializable result;
try {
result = definition.runTask(jobConfig, taskArgs, mContext);
} catch (InterruptedException | CancelledException e) {
// Cleanup around the interruption should already have been handled by a different thread
Thread.currentThread().interrupt();
return;
} catch (Throwable t) {
fail(t, jobConfig, taskArgs);
return;
}
mTaskExecutorManager.notifyTaskCompletion(mJobId, mTaskId, result);
} | @Test
public void runFailure() throws Exception {
long jobId = 1;
long taskId = 2;
JobConfig jobConfig = new SleepJobConfig(10);
Serializable taskArgs = Lists.newArrayList(1);
RunTaskContext context = mock(RunTaskContext.class);
@SuppressWarnings("unchecked")
PlanDefinition<JobConfig, Serializable, Serializable> planDefinition =
mock(PlanDefinition.class);
when(mRegistry.getJobDefinition(eq(jobConfig))).thenReturn(planDefinition);
when(planDefinition.runTask(eq(jobConfig), any(Serializable.class), any(RunTaskContext.class)))
.thenThrow(new UnsupportedOperationException("failure"));
RunTaskCommand command = RunTaskCommand.newBuilder()
.setJobConfig(ByteString.copyFrom(SerializationUtils.serialize(jobConfig)))
.setTaskArgs(ByteString.copyFrom(SerializationUtils.serialize(taskArgs))).build();
TaskExecutor executor =
new TaskExecutor(jobId, taskId, command, context, mTaskExecutorManager);
executor.run();
verify(mTaskExecutorManager).notifyTaskFailure(eq(jobId), eq(taskId), any());
} |
public String route(final ReadwriteSplittingDataSourceGroupRule rule) {
return rule.getLoadBalancer().getTargetName(rule.getName(), getFilteredReadDataSources(rule));
} | @Test
void assertRouteWithFilter() {
rule.disableDataSource("read_ds_0");
assertThat(new StandardReadwriteSplittingDataSourceRouter().route(rule), is("read_ds_1"));
} |
@Override
public List<MetricFamilySamples> collect() {
List<MetricFamilySamples> result = new ArrayList<>();
result.addAll(clientTotal.collect());
result.addAll(clientFail.collect());
result.addAll(serverTotal.collect());
result.addAll(serverFail.collect());
result.addAll(requestSize.collect());
result.addAll(responseSize.collect());
result.addAll(providerCounter.collect());
result.addAll(consumerCounter.collect());
ServerConfig serverConfig = serverConfigReference.get();
ThreadPoolExecutor threadPoolExecutor = executorReference.get();
if (serverConfig != null) {
threadPoolConfigCore.labels(commonLabelValues)
.set(serverConfig.getCoreThreads());
result.addAll(threadPoolConfigCore.collect());
threadPoolConfigMax.labels(commonLabelValues)
.set(serverConfig.getMaxThreads());
result.addAll(threadPoolConfigMax.collect());
threadPoolConfigQueue.labels(commonLabelValues)
.set(serverConfig.getQueues());
result.addAll(threadPoolConfigQueue.collect());
}
if (threadPoolExecutor != null) {
threadPoolActive.labels(commonLabelValues)
.set(threadPoolExecutor.getActiveCount());
result.addAll(threadPoolActive.collect());
threadPoolIdle.labels(commonLabelValues)
.set(threadPoolExecutor.getPoolSize() - threadPoolExecutor.getActiveCount());
result.addAll(threadPoolIdle.collect());
threadPoolQueue.labels(commonLabelValues)
.set(threadPoolExecutor.getQueue().size());
result.addAll(threadPoolQueue.collect());
}
return result;
} | @Test
public void testPrometheusMetricsCollect2() throws Exception {
MetricsBuilder metricsBuilder = new MetricsBuilder();
// set buckets
metricsBuilder.getClientTotalBuilder()
.exponentialBuckets(1, 2, 15);
metricsBuilder.getClientFailBuilder()
.linearBuckets(0, 5, 15);
Map<String, String> testLabels = new HashMap<>();
testLabels.put("from", "test");
try (SofaRpcMetricsCollector collector = new SofaRpcMetricsCollector(testLabels, metricsBuilder)) {
CollectorRegistry registry = new CollectorRegistry();
collector.register(registry);
SofaRequest request = buildRequest();
SofaResponse successResponse = buildSuccessResponse();
SofaResponse failResponse = buildFailResponse();
RpcInternalContext.getContext()
.setAttachment(RpcConstants.INTERNAL_KEY_CLIENT_ELAPSE, 100)
.setAttachment(RpcConstants.INTERNAL_KEY_IMPL_ELAPSE, 10)
.setAttachment(RpcConstants.INTERNAL_KEY_REQ_SIZE, 3)
.setAttachment(RpcConstants.INTERNAL_KEY_RESP_SIZE, 4);
List<Collector.MetricFamilySamples> samplesList;
EventBus.post(new ClientEndInvokeEvent(request, successResponse, null));
EventBus.post(new ClientEndInvokeEvent(request, failResponse, null));
EventBus.post(new ProviderPubEvent(new ProviderConfig<>()));
EventBus.post(new ConsumerSubEvent(new ConsumerConfig<>()));
ServerConfig serverConfig = new ServerConfig();
EventBus.post(new ServerStartedEvent(serverConfig, new ThreadPoolExecutor(1, 1,
0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>())));
samplesList = collector.collect();
Assert.assertEquals(samplesList.size(), 14);
// new HTTPServer(new InetSocketAddress(9000),registry);
// Thread.currentThread().join();
}
} |
public static boolean pathIsFromNamespace(String path) {
return path.startsWith(BASE_POLICIES_PATH + "/")
&& path.substring(BASE_POLICIES_PATH.length() + 1).contains("/");
} | @Test
public void test_pathIsFromNamespace() {
assertFalse(NamespaceResources.pathIsFromNamespace("/admin/clusters"));
assertFalse(NamespaceResources.pathIsFromNamespace("/admin/policies"));
assertFalse(NamespaceResources.pathIsFromNamespace("/admin/policies/my-tenant"));
assertTrue(NamespaceResources.pathIsFromNamespace("/admin/policies/my-tenant/my-ns"));
} |
public Authentication getAuthentication(String token) throws AccessException {
if (!tokenMap.containsKey(token)) {
return jwtTokenManager.getAuthentication(token);
}
return tokenMap.get(token).getAuthentication();
} | @Test
void testGetAuthentication() throws AccessException {
assertNotNull(cachedJwtTokenManager.getAuthentication("token"));
} |
public static <K, E> Collector<E, ImmutableSetMultimap.Builder<K, E>, ImmutableSetMultimap<K, E>> unorderedIndex(Function<? super E, K> keyFunction) {
return unorderedIndex(keyFunction, Function.identity());
} | @Test
public void unorderedIndex_returns_SetMultimap() {
SetMultimap<Integer, MyObj> multimap = LIST.stream().collect(unorderedIndex(MyObj::getId));
assertThat(multimap.size()).isEqualTo(3);
Map<Integer, Collection<MyObj>> map = multimap.asMap();
assertThat(map.get(1)).containsOnly(MY_OBJ_1_A);
assertThat(map.get(2)).containsOnly(MY_OBJ_2_B);
assertThat(map.get(3)).containsOnly(MY_OBJ_3_C);
} |
public Long getValue(final List<Object> params) {
return getValueFromExpression(expressionSegment, params);
} | @Test
void assertGetValueWithMixed() {
ExpressionRowNumberValueSegment actual = new ExpressionRowNumberValueSegment(0, 0, new BinaryOperationExpression(0, 0, new LiteralExpressionSegment(0, 0, 1),
new BinaryOperationExpression(0, 0, new ParameterMarkerExpressionSegment(0, 0, 0), new LiteralExpressionSegment(0, 0, 2), "+", "? + 2"), "+", "1 + ? + 2"), false);
assertThat(actual.getValue(Collections.singletonList(1)), is(4L));
} |
@Override
public READ3Response read(XDR xdr, RpcInfo info) {
return read(xdr, getSecurityHandler(info), info.remoteAddress());
} | @Test(timeout = 60000)
public void testRead() throws Exception {
HdfsFileStatus status = nn.getRpcServer().getFileInfo("/tmp/bar");
long dirId = status.getFileId();
int namenodeId = Nfs3Utils.getNamenodeId(config);
FileHandle handle = new FileHandle(dirId, namenodeId);
READ3Request readReq = new READ3Request(handle, 0, 5);
XDR xdr_req = new XDR();
readReq.serialize(xdr_req);
// Attempt by an unpriviledged user should fail.
READ3Response response1 = nfsd.read(xdr_req.asReadOnlyWrap(),
securityHandlerUnpriviledged,
new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
response1.getStatus());
// Attempt by a priviledged user should pass.
READ3Response response2 = nfsd.read(xdr_req.asReadOnlyWrap(),
securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
response2.getStatus());
} |
@Nonnull
public static String uppercaseFirstChar(@Nonnull String name) {
int len = name.length();
if (len == 1)
return name.toUpperCase();
else if (len > 1)
return name.substring(0, 1).toUpperCase() + name.substring(1);
else
return name;
} | @Test
void testUppercaseFirstChar() {
assertEquals("", StringUtil.uppercaseFirstChar(""));
assertEquals("F", StringUtil.uppercaseFirstChar("f"));
assertEquals("Foo", StringUtil.uppercaseFirstChar("foo"));
assertEquals("FOO", StringUtil.uppercaseFirstChar("fOO"));
} |
public void schedule(BeanContainer container, String id, String cron, String interval, String zoneId, String className, String methodName, List<JobParameter> parameterList) {
JobScheduler scheduler = container.beanInstance(JobScheduler.class);
String jobId = getId(id);
String optionalCronExpression = getCronExpression(cron);
String optionalInterval = getInterval(interval);
if (StringUtils.isNullOrEmpty(cron) && StringUtils.isNullOrEmpty(optionalInterval))
throw new IllegalArgumentException("Either cron or interval attribute is required.");
if (StringUtils.isNotNullOrEmpty(cron) && StringUtils.isNotNullOrEmpty(optionalInterval))
throw new IllegalArgumentException("Both cron and interval attribute provided. Only one is allowed.");
if (Recurring.RECURRING_JOB_DISABLED.equals(optionalCronExpression) || Recurring.RECURRING_JOB_DISABLED.equals(optionalInterval)) {
if (isNullOrEmpty(jobId)) {
LOGGER.warn("You are trying to disable a recurring job using placeholders but did not define an id.");
} else {
scheduler.deleteRecurringJob(jobId);
}
} else {
JobDetails jobDetails = new JobDetails(className, null, methodName, parameterList);
jobDetails.setCacheable(true);
if (isNotNullOrEmpty(optionalCronExpression)) {
scheduler.scheduleRecurrently(id, jobDetails, CronExpression.create(optionalCronExpression), getZoneId(zoneId));
} else {
scheduler.scheduleRecurrently(id, jobDetails, new Interval(optionalInterval), getZoneId(zoneId));
}
}
} | @Test
void beansWithMethodsAnnotatedWithRecurringAnnotationCronAndIntervalWillThrowException() {
final String id = "my-job-id";
final JobDetails jobDetails = jobDetails().build();
final String cron = "*/15 * * * *";
final String interval = "PT10M";
final String zoneId = null;
assertThatThrownBy(() -> jobRunrRecurringJobRecorder.schedule(beanContainer, id, cron, interval, zoneId, jobDetails.getClassName(), jobDetails.getMethodName(), jobDetails.getJobParameters())).isInstanceOf(IllegalArgumentException.class);
} |
public UiTopoOverlayFactory topoOverlayFactory() {
return topoOverlayFactory;
} | @Test
public void topoOverlayFactory() {
viewList = ImmutableList.of(HIDDEN_VIEW);
ext = new UiExtension.Builder(cl, viewList)
.topoOverlayFactory(TO_FACTORY)
.build();
assertNull("unexpected message handler factory", ext.messageHandlerFactory());
assertEquals("wrong topo overlay factory", TO_FACTORY,
ext.topoOverlayFactory());
} |
public static String getEffectivePath(String path, int port) {
return path.replace("_PORT", String.valueOf(port));
} | @Test(timeout=180000)
public void testSocketPathSetGet() throws IOException {
Assert.assertEquals("/var/run/hdfs/sock.100",
DomainSocket.getEffectivePath("/var/run/hdfs/sock._PORT", 100));
} |
@Override
public Class<? extends AvgPercentileFunctionBuilder> builder() {
return AvgPercentileFunctionBuilder.class;
} | @Test
public void testBuilder() throws IllegalAccessException, InstantiationException {
PercentileFunctionInst inst = new PercentileFunctionInst();
inst.accept(
MeterEntity.newService("service-test", Layer.GENERAL),
new PercentileArgument(
new BucketedValues(
BUCKETS,
new long[] {
10,
20,
30,
40
}
),
RANKS
)
);
inst.calculate();
final StorageBuilder storageBuilder = inst.builder().newInstance();
// Simulate the storage layer do, convert the datatable to string.
final HashMapConverter.ToStorage toStorage = new HashMapConverter.ToStorage();
storageBuilder.entity2Storage(inst, toStorage);
final Map<String, Object> map = toStorage.obtain();
map.put(
AvgHistogramPercentileFunction.COUNT,
((DataTable) map.get(AvgHistogramPercentileFunction.COUNT)).toStorageData()
);
map.put(
AvgHistogramPercentileFunction.SUMMATION,
((DataTable) map.get(AvgHistogramPercentileFunction.SUMMATION)).toStorageData()
);
map.put(
AvgHistogramPercentileFunction.DATASET,
((DataTable) map.get(AvgHistogramPercentileFunction.DATASET)).toStorageData()
);
map.put(
AvgHistogramPercentileFunction.VALUE,
((DataTable) map.get(AvgHistogramPercentileFunction.VALUE)).toStorageData()
);
map.put(
AvgHistogramPercentileFunction.RANKS,
((IntList) map.get(AvgHistogramPercentileFunction.RANKS)).toStorageData()
);
final AvgHistogramPercentileFunction inst2 = (AvgHistogramPercentileFunction) storageBuilder.storage2Entity(
new HashMapConverter.ToEntity(map));
assertEquals(inst, inst2);
// HistogramFunction equal doesn't include dataset.
assertEquals(inst.getDataset(), inst2.getDataset());
assertEquals(inst.getPercentileValues(), inst2.getPercentileValues());
assertEquals(inst.getRanks(), inst2.getRanks());
} |
protected FEEL newFeelEvaluator(AtomicReference<FEELEvent> errorHolder) {
// cleanup existing error
errorHolder.set(null);
FEEL feel = FEELBuilder.builder().withProfiles(singletonList(new ExtendedDMNProfile())).build();
feel.addListener(event -> {
FEELEvent feelEvent = errorHolder.get();
if (!(feelEvent instanceof SyntaxErrorEvent) &&
ERROR.equals(event.getSeverity())) {
errorHolder.set(event);
}
});
return feel;
} | @Test
public void listener_sintaxErrorAsFirst() {
FEELEvent syntaxErrorEvent = new SyntaxErrorEvent(Severity.ERROR, "test", null, 0, 0, null);
FEELEvent genericError = new FEELEventBase(Severity.ERROR, "error", null);
AtomicReference<FEELEvent> error = new AtomicReference<>();
FEEL feel = expressionEvaluator.newFeelEvaluator(error);
applyEvents(List.of(syntaxErrorEvent, genericError), feel);
assertThat(error.get()).isEqualTo(syntaxErrorEvent);
} |
public boolean checkFeExistByRPCPort(String host, int rpcPort) {
try {
tryLock(true);
return frontends
.values()
.stream()
.anyMatch(fe -> fe.getHost().equals(host) && fe.getRpcPort() == rpcPort);
} finally {
unlock();
}
} | @Test
public void testCheckFeExistByRpcPort() {
NodeMgr nodeMgr = new NodeMgr();
Frontend fe = new Frontend(FrontendNodeType.FOLLOWER, "node1", "10.0.0.3", 9010);
fe.handleHbResponse(new FrontendHbResponse("node1", 9030, 9020, 1,
System.currentTimeMillis(), System.currentTimeMillis(), "v1"), true);
nodeMgr.replayAddFrontend(fe);
Assert.assertTrue(nodeMgr.checkFeExistByRPCPort("10.0.0.3", 9020));
Assert.assertFalse(nodeMgr.checkFeExistByRPCPort("10.0.0.3", 9030));
Assert.assertFalse(nodeMgr.checkFeExistByRPCPort("10.0.0.2", 9020));
} |
@Override
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException {
String path = ((HttpServletRequest) req).getRequestURI().replaceFirst(((HttpServletRequest) req).getContextPath(), "");
MAX_AGE_BY_PATH.entrySet().stream()
.filter(m -> path.startsWith(m.getKey()))
.map(Map.Entry::getValue)
.findFirst()
.ifPresent(maxAge -> ((HttpServletResponse) resp).addHeader(CACHE_CONTROL_HEADER, format(MAX_AGE_TEMPLATE, maxAge)));
chain.doFilter(req, resp);
} | @Test
public void max_age_is_set_to_five_minutes_on_images() throws Exception {
HttpServletRequest request = newRequest("/images/logo.png");
underTest.doFilter(request, response, chain);
verify(response).addHeader("Cache-Control", format("max-age=%s", 300));
} |
public void convert(FSConfigToCSConfigConverterParams params)
throws Exception {
validateParams(params);
this.clusterResource = getClusterResource(params);
this.convertPlacementRules = params.isConvertPlacementRules();
this.outputDirectory = params.getOutputDirectory();
this.rulesToFile = params.isPlacementRulesToFile();
this.usePercentages = params.isUsePercentages();
this.preemptionMode = params.getPreemptionMode();
prepareOutputFiles(params.isConsole());
loadConversionRules(params.getConversionRulesConfig());
Configuration inputYarnSiteConfig = getInputYarnSiteConfig(params);
handleFairSchedulerConfig(params, inputYarnSiteConfig);
convert(inputYarnSiteConfig);
} | @Test
public void testInvalidFairSchedulerXml() throws Exception {
FSConfigToCSConfigConverterParams params = createDefaultParamsBuilder()
.withClusterResource(CLUSTER_RESOURCE_STRING)
.withFairSchedulerXmlConfig(FAIR_SCHEDULER_XML_INVALID)
.build();
expectedException.expect(RuntimeException.class);
converter.convert(params);
} |
public static boolean isEmpty(CharSequence str) {
return str == null || str.length() == 0;
} | @Test
public void assertIsEmpty() {
String string = "";
Assert.assertTrue(StringUtil.isEmpty(string));
} |
public ServiceResponse getServiceConfigGeneration(Application application, String hostAndPortToCheck, Duration timeout) {
Long wantedGeneration = application.getApplicationGeneration();
try (CloseableHttpAsyncClient client = createHttpClient()) {
client.start();
if ( ! hostInApplication(application, hostAndPortToCheck))
return new ServiceResponse(ServiceResponse.Status.hostNotFound, wantedGeneration);
long currentGeneration = getServiceGeneration(client, URI.create("http://" + hostAndPortToCheck), timeout).get();
boolean converged = currentGeneration >= wantedGeneration;
return new ServiceResponse(ServiceResponse.Status.ok, wantedGeneration, currentGeneration, converged);
} catch (InterruptedException | ExecutionException | CancellationException e) { // e.g. if we cannot connect to the service to find generation
return new ServiceResponse(ServiceResponse.Status.notFound, wantedGeneration, e.getMessage());
} catch (Exception e) {
return new ServiceResponse(ServiceResponse.Status.error, wantedGeneration, e.getMessage());
}
} | @Test
public void service_convergence() {
{ // Known service
wireMock.stubFor(get(urlEqualTo("/state/v1/config")).willReturn(okJson("{\"config\":{\"generation\":3}}")));
ServiceResponse response = checker.getServiceConfigGeneration(application, hostAndPort(this.service), clientTimeout);
assertEquals(3, response.wantedGeneration.longValue());
assertEquals(3, response.currentGeneration.longValue());
assertTrue(response.converged);
assertEquals(ServiceResponse.Status.ok, response.status);
}
{ // Missing service
ServiceResponse response = checker.getServiceConfigGeneration(application, "notPresent:1337", clientTimeout);
assertEquals(3, response.wantedGeneration.longValue());
assertEquals(ServiceResponse.Status.hostNotFound, response.status);
}
} |
public Date parseString(String dateString) throws ParseException {
if (dateString == null || dateString.isEmpty()) {
return null;
}
Matcher xep82WoMillisMatcher = xep80DateTimeWoMillisPattern.matcher(dateString);
Matcher xep82Matcher = xep80DateTimePattern.matcher(dateString);
if (xep82WoMillisMatcher.matches() || xep82Matcher.matches()) {
String rfc822Date;
// Convert the ISO 8601 time zone string to a RFC822 compatible format
// since SimpleDateFormat supports ISO8601 only with Java7 or higher
if (dateString.charAt(dateString.length() - 1) == 'Z') {
rfc822Date = dateString.replace("Z", "+0000");
} else {
// If the time zone wasn't specified with 'Z', then it's in
// ISO8601 format (i.e. '(+|-)HH:mm')
// RFC822 needs a similar format just without the colon (i.e.
// '(+|-)HHmm)'), so remove it
int lastColon = dateString.lastIndexOf(':');
rfc822Date = dateString.substring(0, lastColon) + dateString.substring(lastColon + 1);
}
if (xep82WoMillisMatcher.matches()) {
synchronized (dateTimeFormatWoMillies) {
return dateTimeFormatWoMillies.parse(rfc822Date);
}
} else {
// OF-898: Replace any number of millisecond-characters with at most three of them.
rfc822Date = rfc822Date.replaceAll("(\\.[0-9]{3})[0-9]*", "$1");
synchronized (dateTimeFormat) {
return dateTimeFormat.parse(rfc822Date);
}
}
}
throw new ParseException("Date String could not be parsed: \"" + dateString + "\"", 0);
} | @Test
public void testFormatThreeSecondFractions() throws Exception
{
// Setup fixture
final String testValue = "2015-03-19T22:54:15.841+00:00"; // Thu, 19 Mar 2015 22:54:15.841 GMT
// Execute system under test
final Date result = xmppDateTimeFormat.parseString(testValue);
// Verify results
long expected = 1426805655841L; // Epoch value of Thu, 19 Mar 2015 22:54:15.841 GMT
assertEquals( expected, result.getTime() );
} |
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) {
CharStream input = CharStreams.fromString(source);
FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input );
CommonTokenStream tokens = new CommonTokenStream( lexer );
FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens );
ParserHelper parserHelper = new ParserHelper(eventsManager);
additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol()));
parser.setHelper(parserHelper);
parser.setErrorHandler( new FEELErrorHandler() );
parser.removeErrorListeners(); // removes the error listener that prints to the console
parser.addErrorListener( new FEELParserErrorListener( eventsManager ) );
// pre-loads the parser with symbols
defineVariables( inputVariableTypes, inputVariables, parser );
if (typeRegistry != null) {
parserHelper.setTypeRegistry(typeRegistry);
}
return parser;
} | @Disabled("dropped since DMNv1.2")
@Test
void functionDecisionTableInvocation() {
String inputExpression = "decision table( "
+ " outputs: \"Applicant Risk Rating\","
+ " input expression list: [\"Applicant Age\", \"Medical History\"],"
+ " rule list: ["
+ " [ >60 , \"good\" , \"Medium\" ],"
+ " [ >60 , \"bad\" , \"High\" ],"
+ " [ [25..60] , - , \"Medium\" ]," // also another problem is the - operator cannot be inside of expression.
+ " [ <25 , \"good\" , \"Low\" ],"
+ " [ <25 , \"bad\" , \"Medium\" ] ],"
+ " hit policy: \"Unique\" )";
// need to call parse passing in the input variables
BaseNode functionBase = parse( inputExpression );
assertThat( functionBase).isInstanceOf(FunctionInvocationNode.class);
assertThat( functionBase.getText()).isEqualTo(inputExpression);
FunctionInvocationNode function = (FunctionInvocationNode) functionBase;
assertThat( function.getName()).isInstanceOf(NameRefNode.class);
assertThat( function.getName().getText()).isEqualTo( "decision table");
assertThat( function.getParams()).isInstanceOf(ListNode.class);
assertThat( function.getParams().getElements()).hasSize(4);
assertThat( function.getParams().getElements().get( 0 )).isInstanceOf(NamedParameterNode.class);
assertThat( function.getParams().getElements().get( 1 )).isInstanceOf(NamedParameterNode.class);
assertThat( function.getParams().getElements().get( 2 )).isInstanceOf(NamedParameterNode.class);
assertThat( function.getParams().getElements().get( 3 )).isInstanceOf(NamedParameterNode.class);
NamedParameterNode named = (NamedParameterNode) function.getParams().getElements().get( 0 );
assertThat( named.getText()).isEqualTo( "outputs: \"Applicant Risk Rating\"");
assertThat( named.getName().getText()).isEqualTo("outputs");
assertThat( named.getExpression()).isInstanceOf(StringNode.class);
assertThat( named.getExpression().getText()).isEqualTo( "\"Applicant Risk Rating\"");
named = (NamedParameterNode) function.getParams().getElements().get( 1 );
assertThat( named.getName().getText()).isEqualTo( "input expression list");
assertThat( named.getExpression()).isInstanceOf(ListNode.class);
ListNode list = (ListNode) named.getExpression();
assertThat( list.getElements()).hasSize(2);
assertThat( list.getElements().get( 0 )).isInstanceOf(StringNode.class);
assertThat( list.getElements().get( 0 ).getText()).isEqualTo( "\"Applicant Age\"");
assertThat( list.getElements().get( 1 )).isInstanceOf(StringNode.class);
assertThat( list.getElements().get( 1 ).getText()).isEqualTo( "\"Medical History\"");
named = (NamedParameterNode) function.getParams().getElements().get( 2 );
assertThat( named.getName().getText()).isEqualTo( "rule list");
assertThat( named.getExpression()).isInstanceOf(ListNode.class);
list = (ListNode) named.getExpression();
assertThat(list.getElements()).hasSize(5); // this assert on the 5 rows but third row contains the - operation which is not allowed in expression.
assertThat( list.getElements().get( 0 )).isInstanceOf(ListNode.class);
ListNode rule = (ListNode) list.getElements().get( 0 );
assertThat( rule.getElements()).hasSize(3);
assertThat( rule.getElements().get( 0 )).isInstanceOf(RangeNode.class);
assertThat( rule.getElements().get( 0 ).getText()).isEqualTo( ">60");
assertThat( rule.getElements().get( 1 )).isInstanceOf(StringNode.class);
assertThat( rule.getElements().get( 1 ).getText()).isEqualTo( "\"good\"");
assertThat( rule.getElements().get( 2 )).isInstanceOf(StringNode.class);
assertThat( rule.getElements().get( 2 ).getText()).isEqualTo( "\"Medium\"");
named = (NamedParameterNode) function.getParams().getElements().get( 3 );
assertThat( named.getName().getText()).isEqualTo( "hit policy");
assertThat( named.getExpression()).isInstanceOf(StringNode.class);
assertThat( named.getExpression().getText()).isEqualTo( "\"Unique\"");
} |
@Override
public boolean hasNext() {
try {
if (this.nextElement == null) {
if (this.readPhase) {
// read phase, get next element from buffer
T tmp = getNextRecord(this.reuseElement);
if (tmp != null) {
this.nextElement = tmp;
return true;
} else {
return false;
}
} else {
if (this.input.hasNext()) {
final T next = this.input.next();
if (writeNextRecord(next)) {
this.nextElement = next;
return true;
} else {
this.leftOverElement = next;
return false;
}
} else {
this.noMoreBlocks = true;
return false;
}
}
} else {
return true;
}
} catch (IOException ioex) {
throw new RuntimeException(
"Error (de)serializing record in block resettable iterator.", ioex);
}
} | @Test
void testDoubleBufferedBlockResettableIterator() throws Exception {
final AbstractInvokable memOwner = new DummyInvokable();
// create the resettable Iterator
final ReusingBlockResettableIterator<Record> iterator =
new ReusingBlockResettableIterator<Record>(
this.memman, this.reader, this.serializer, 2, memOwner);
// open the iterator
iterator.open();
// now test walking through the iterator
int lower = 0;
int upper = 0;
do {
lower = upper;
upper = lower;
// find the upper bound
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(upper++);
}
// now reset the buffer a few times
for (int i = 0; i < 5; ++i) {
iterator.reset();
int count = 0;
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(lower + (count++));
}
assertThat(count).isEqualTo(upper - lower);
}
} while (iterator.nextBlock());
assertThat(upper).isEqualTo(NUM_VALUES);
// close the iterator
iterator.close();
} |
public String error(Message errorMessage, Locale locale) {
var localizedErrorMessage = formatLocalizedErrorMessage(errorMessage, locale);
return renderer.render(
"error.html.mustache", Map.of("errorMessage", localizedErrorMessage), locale);
} | @Test
void error_withFixture() {
var sut = new Pages(renderer);
var rendered = sut.error(new Message("error.serverError", ""), Locale.US);
assertEquals(Fixtures.getUtf8String("pages_golden_error.bin"), rendered);
} |
@Override
public boolean userDefinedIndexMode(boolean enable) {
if (meters.isEmpty() && meterIdGenerators.isEmpty()) {
userDefinedIndexMode = enable;
} else {
log.warn("Unable to {} user defined index mode as store did" +
"already some allocations", enable ? "activate" : "deactivate");
}
return userDefinedIndexMode;
} | @Test
public void testEnableUserDefinedIndex() {
initMeterStore(false);
assertTrue(meterStore.userDefinedIndexMode(true));
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.