focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public String toString() {
return "RangeSet(from=" + from + " (inclusive), to=" + to + " (exclusive))";
} | @Test
void testToString() {
RangeSet rangeSet = new RangeSet(5, 8);
assertEquals("RangeSet(from=5 (inclusive), to=8 (exclusive))", rangeSet.toString());
} |
public int getResourceProfileFailedRetrieved() {
return numGetResourceProfileFailedRetrieved.value();
} | @Test
public void testGetResourceProfileRetrievedFailed() {
long totalBadBefore = metrics.getResourceProfileFailedRetrieved();
badSubCluster.getResourceProfileFailed();
Assert.assertEquals(totalBadBefore + 1,
metrics.getResourceProfileFailedRetrieved());
} |
@Override
public boolean isUserConsentRequiredAfterUpgrade() {
return configuration.getBoolean(GITLAB_USER_CONSENT_FOR_PERMISSION_PROVISIONING_REQUIRED).isPresent();
} | @Test
public void isUserConsentRequiredForPermissionProvisioning_returnsTrueWhenPropertyPresent() {
settings.setProperty(GITLAB_USER_CONSENT_FOR_PERMISSION_PROVISIONING_REQUIRED, "");
assertThat(config.isUserConsentRequiredAfterUpgrade()).isTrue();
} |
@Override
public Object getObject(final int columnIndex) throws SQLException {
return mergeResultSet.getValue(columnIndex, Object.class);
} | @Test
void assertGetObjectWithTimestamp() throws SQLException {
Timestamp result = mock(Timestamp.class);
when(mergeResultSet.getValue(1, Timestamp.class)).thenReturn(result);
assertThat(shardingSphereResultSet.getObject(1, Timestamp.class), is(result));
} |
@Override
public String toString() {
return "ResourceConfig{" +
"url=" + url +
", id='" + id + '\'' +
", resourceType=" + resourceType +
'}';
} | @Test
public void when_addDuplicateZipOfJarWithPath_then_throwsException() throws Exception {
// Given
String resourceId = "zipFile";
String path1 = createFile("path/to/" + resourceId).toString();
String path2 = createFile("path/to/another/" + resourceId).toString();
config.addJarsInZip(path1);
// When
assertThrows(IllegalArgumentException.class, () -> config.addJarsInZip(path2));
} |
static int inferParallelism(
ReadableConfig readableConfig, long limitCount, Supplier<Integer> splitCountProvider) {
int parallelism =
readableConfig.get(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM);
if (readableConfig.get(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM)) {
int maxInferParallelism =
readableConfig.get(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM_MAX);
Preconditions.checkState(
maxInferParallelism >= 1,
FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM_MAX.key()
+ " cannot be less than 1");
parallelism = Math.min(splitCountProvider.get(), maxInferParallelism);
}
if (limitCount > 0) {
int limit = limitCount >= Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) limitCount;
parallelism = Math.min(parallelism, limit);
}
// parallelism must be positive.
parallelism = Math.max(1, parallelism);
return parallelism;
} | @Test
public void testInferedParallelism() throws IOException {
Configuration configuration = new Configuration();
// Empty table, infer parallelism should be at least 1
int parallelism = SourceUtil.inferParallelism(configuration, -1L, () -> 0);
assertThat(parallelism).isEqualTo(1);
// 2 splits (max infer is the default value 100 , max > splits num), the parallelism is splits
// num : 2
parallelism = SourceUtil.inferParallelism(configuration, -1L, () -> 2);
assertThat(parallelism).isEqualTo(2);
// 2 splits and limit is 1 , max infer parallelism is default 100,
// which is greater than splits num and limit, the parallelism is the limit value : 1
parallelism = SourceUtil.inferParallelism(configuration, 1, () -> 2);
assertThat(parallelism).isEqualTo(1);
// 2 splits and max infer parallelism is 1 (max < splits num), the parallelism is 1
configuration.setInteger(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM_MAX, 1);
parallelism = SourceUtil.inferParallelism(configuration, -1L, () -> 2);
assertThat(parallelism).isEqualTo(1);
// 2 splits, max infer parallelism is 1, limit is 3, the parallelism is max infer parallelism :
// 1
parallelism = SourceUtil.inferParallelism(configuration, 3, () -> 2);
assertThat(parallelism).isEqualTo(1);
// 2 splits, infer parallelism is disabled, the parallelism is flink default parallelism 1
configuration.setBoolean(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM, false);
parallelism = SourceUtil.inferParallelism(configuration, 3, () -> 2);
assertThat(parallelism).isEqualTo(1);
} |
@Override
public Path move(final Path source, final Path target, final TransferStatus status, final Delete.Callback callback,
final ConnectionCallback connectionCallback) throws BackgroundException {
if(containerService.isContainer(source)) {
if(new SimplePathPredicate(source.getParent()).test(target.getParent())) {
// Rename only
return proxy.move(source, target, status, callback, connectionCallback);
}
}
if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(source) ^ new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(containerService.getContainer(target))) {
// Moving into or from an encrypted room
final Copy copy = new SDSDelegatingCopyFeature(session, nodeid, new SDSCopyFeature(session, nodeid));
if(log.isDebugEnabled()) {
log.debug(String.format("Move %s to %s using copy feature %s", source, target, copy));
}
final Path c = copy.copy(source, target, status, connectionCallback, new DisabledStreamListener());
// Delete source file after copy is complete
final Delete delete = new SDSDeleteFeature(session, nodeid);
if(delete.isSupported(source)) {
log.warn(String.format("Delete source %s copied to %s", source, target));
delete.delete(Collections.singletonMap(source, status), connectionCallback, callback);
}
return c;
}
else {
return proxy.move(source, target, status, callback, connectionCallback);
}
} | @Test
public void testMoveEncryptedDataRoom() throws Exception {
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
final String roomName = new AlphanumericRandomStringService().random();
final Path room = new SDSDirectoryFeature(session, nodeid).mkdir(new Path(roomName, EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final EncryptRoomRequest encrypt = new EncryptRoomRequest().isEncrypted(true);
new NodesApi(session.getClient()).encryptRoom(encrypt, Long.parseLong(new SDSNodeIdProvider(session).getVersionId(room)), StringUtils.EMPTY, null);
final AttributedList<Path> list = new SDSListService(session, nodeid).list(room.getParent(), new DisabledListProgressListener());
final Path encrypted = list.get(room);
// create file inside encrypted room
final Path folder = new SDSDirectoryFeature(session, nodeid).mkdir(
new Path(encrypted, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.directory)), new TransferStatus());
final Path renamed = new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume));
new SDSDelegatingMoveFeature(session, nodeid, new SDSMoveFeature(session, nodeid)).move(encrypted, renamed, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback());
assertEquals(0, session.getMetrics().get(Copy.class));
assertFalse(new SDSFindFeature(session, nodeid).find(new Path(roomName, EnumSet.of(Path.Type.directory, Path.Type.volume))));
assertTrue(new SDSFindFeature(session, nodeid).find(renamed));
assertTrue(new SDSFindFeature(session, nodeid).find(new Path(renamed, folder.getName(), EnumSet.of(Path.Type.directory, Path.Type.directory))));
new SDSDeleteFeature(session, nodeid).delete(Collections.singletonList(renamed), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
@Override
public final void calculate() {
} | @Test
public void testCalculate() {
function.accept(MeterEntity.newService("service-test", Layer.GENERAL), HTTP_CODE_COUNT_1);
function.accept(MeterEntity.newService("service-test", Layer.GENERAL), HTTP_CODE_COUNT_2);
function.calculate();
assertThat(function.getValue()).isEqualTo(HTTP_CODE_COUNT_3);
} |
public static boolean test(byte[] bloomBytes, byte[]... topics) {
Bloom bloom = new Bloom(bloomBytes);
if (topics == null) {
throw new IllegalArgumentException("topics can not be null");
}
for (byte[] topic : topics) {
if (!bloom.test(topic)) {
return false;
}
}
return true;
} | @Test
public void testStaticMethodTestWhenAllTopicsIsInBloomForBytesInput() {
boolean result =
Bloom.test(
Numeric.hexStringToByteArray(ethereumSampleLogsBloom),
Numeric.hexStringToByteArray(ethereumSampleLogs.get(0)),
Numeric.hexStringToByteArray(ethereumSampleLogs.get(100)));
assertTrue(result, "must return true");
} |
@Override
public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) {
long datetime = payload.readInt8();
return 0L == datetime ? MySQLTimeValueUtils.DATETIME_OF_ZERO : readDateTime(datetime);
} | @Test
void assertReadNullTime() {
when(payload.readInt8()).thenReturn(0L);
assertThat(new MySQLDatetimeBinlogProtocolValue().read(columnDef, payload), is(MySQLTimeValueUtils.DATETIME_OF_ZERO));
} |
public static void addListPopulationByObjectCreationExpr(final List<ObjectCreationExpr> toAdd,
final BlockStmt body,
final String listName) {
toAdd.forEach(objectCreationExpr -> {
NodeList<Expression> arguments = NodeList.nodeList(objectCreationExpr);
MethodCallExpr methodCallExpr = new MethodCallExpr();
methodCallExpr.setScope(new NameExpr(listName));
methodCallExpr.setName("add");
methodCallExpr.setArguments(arguments);
ExpressionStmt expressionStmt = new ExpressionStmt();
expressionStmt.setExpression(methodCallExpr);
body.addStatement(expressionStmt);
});
} | @Test
void addListPopulation() {
final List<ObjectCreationExpr> toAdd = IntStream.range(0, 5)
.mapToObj(i -> {
ObjectCreationExpr toReturn = new ObjectCreationExpr();
toReturn.setType(String.class);
Expression value = new StringLiteralExpr("String" + i);
toReturn.setArguments(NodeList.nodeList(value));
return toReturn;
})
.collect(Collectors.toList());
BlockStmt body = new BlockStmt();
String listName = "LIST_NAME";
CommonCodegenUtils.addListPopulationByObjectCreationExpr(toAdd, body, listName);
NodeList<Statement> statements = body.getStatements();
assertThat(statements).hasSameSizeAs(toAdd);
for (Statement statement : statements) {
assertThat(statement).isInstanceOf(ExpressionStmt.class);
ExpressionStmt expressionStmt = (ExpressionStmt) statement;
assertThat(expressionStmt.getExpression()).isInstanceOf(MethodCallExpr.class);
MethodCallExpr methodCallExpr = (MethodCallExpr) expressionStmt.getExpression();
assertThat(methodCallExpr.getScope().get().asNameExpr().getNameAsString()).isEqualTo(listName);
NodeList<com.github.javaparser.ast.expr.Expression> arguments = methodCallExpr.getArguments();
assertThat(arguments).hasSize(1);
assertThat(arguments.get(0)).isInstanceOf(ObjectCreationExpr.class);
ObjectCreationExpr objectCreationExpr = (ObjectCreationExpr) arguments.get(0);
assertThat(String.class.getSimpleName()).isEqualTo(objectCreationExpr.getType().asString());
arguments = objectCreationExpr.getArguments();
assertThat(arguments).hasSize(1);
assertThat(arguments.get(0)).isInstanceOf(StringLiteralExpr.class);
}
for (ObjectCreationExpr entry : toAdd) {
int matchingDeclarations = (int) statements.stream().filter(statement -> {
ExpressionStmt expressionStmt = (ExpressionStmt) statement;
com.github.javaparser.ast.expr.Expression expression = expressionStmt.getExpression();
MethodCallExpr methodCallExpr = (MethodCallExpr) expression;
final NodeList<com.github.javaparser.ast.expr.Expression> arguments = methodCallExpr.getArguments();
return entry.equals(arguments.get(0).asObjectCreationExpr());
}).count();
assertThat(matchingDeclarations).isEqualTo(1);
}
} |
public static String computeQueryHash(String query)
{
requireNonNull(query, "query is null");
if (query.isEmpty()) {
return "";
}
byte[] queryBytes = query.getBytes(UTF_8);
long queryHash = new XxHash64().update(queryBytes).hash();
return toHexString(queryHash);
} | @Test
public void testComputeQueryHashEquivalentStrings()
{
String query1 = "SELECT * FROM REPORTS WHERE YEAR >= 2000 AND ID = 293";
String query2 = "SELECT * FROM REPORTS WHERE YEAR >= 2000 AND ID = 293";
assertEquals(computeQueryHash(query1), computeQueryHash(query2));
} |
public static String getDigitsOnly( String input ) {
if ( Utils.isEmpty( input ) ) {
return null;
}
StringBuilder digitsOnly = new StringBuilder();
char c;
for ( int i = 0; i < input.length(); i++ ) {
c = input.charAt( i );
if ( Character.isDigit( c ) ) {
digitsOnly.append( c );
}
}
return digitsOnly.toString();
} | @Test
public void testGetDigitsOnly() {
assertNull( Const.removeDigits( null ) );
assertEquals( "123456789", Const.getDigitsOnly( "123foo456bar789" ) );
} |
@Override
public boolean isSingleton() {
return true;
} | @Test
public final void isSingletonShouldAlwaysReturnTrue() {
objectUnderTest = new SpringRemoteCacheManagerFactoryBean();
assertTrue(
"isSingleton() should always return true since each SpringRemoteCacheManagerFactoryBean will always produce "
+ "the same SpringRemoteCacheManager instance. However,it returned false.",
objectUnderTest.isSingleton());
} |
public T send() throws IOException {
return web3jService.send(this, responseType);
} | @Test
public void testEthNewPendingTransactionFilter() throws Exception {
web3j.ethNewPendingTransactionFilter().send();
verifyResult(
"{\"jsonrpc\":\"2.0\",\"method\":\"eth_newPendingTransactionFilter\","
+ "\"params\":[],\"id\":1}");
} |
@Override
public void deleteCouponTemplate(Long id) {
// 校验存在
validateCouponTemplateExists(id);
// 删除
couponTemplateMapper.deleteById(id);
} | @Test
public void testDeleteCouponTemplate_notExists() {
// 准备参数
Long id = randomLongId();
// 调用, 并断言异常
assertServiceException(() -> couponTemplateService.deleteCouponTemplate(id), COUPON_TEMPLATE_NOT_EXISTS);
} |
public static void convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(Pipeline pipeline) {
if (!(ExperimentalOptions.hasExperiment(pipeline.getOptions(), "use_sdf_read")
|| ExperimentalOptions.hasExperiment(
pipeline.getOptions(), "use_unbounded_sdf_wrapper"))
|| ExperimentalOptions.hasExperiment(
pipeline.getOptions(), "beam_fn_api_use_deprecated_read")
|| ExperimentalOptions.hasExperiment(pipeline.getOptions(), "use_deprecated_read")) {
convertReadBasedSplittableDoFnsToPrimitiveReads(pipeline);
}
} | @Test
public void testConvertIsSkippedWhenUsingUseUnboundedSDFWrapper() {
PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(CrashingRunner.class);
ExperimentalOptions.addExperiment(
pipelineOptions.as(ExperimentalOptions.class), "use_unbounded_sdf_wrapper");
Pipeline sdfRead = Pipeline.create(pipelineOptions);
sdfRead.apply(Read.from(new FakeBoundedSource()));
sdfRead.apply(Read.from(new BoundedToUnboundedSourceAdapter<>(new FakeBoundedSource())));
SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(sdfRead);
sdfRead.traverseTopologically(
new Defaults() {
@Override
public void visitPrimitiveTransform(Node node) {
assertThat(
node.getTransform(), not(instanceOf(SplittableParDo.PrimitiveBoundedRead.class)));
assertThat(
node.getTransform(), not(instanceOf(SplittableParDo.PrimitiveUnboundedRead.class)));
}
});
} |
@Override
public Iterator<Text> search(String term) {
if (invertedFile.containsKey(term)) {
ArrayList<Text> hits = new ArrayList<>(invertedFile.get(term));
return hits.iterator();
} else {
return Collections.emptyIterator();
}
} | @Test
public void testSearchNoHits() {
System.out.println("search 'no hits'");
String[] terms = {"thisisnotaword"};
Iterator<Relevance> hits = corpus.search(new BM25(), terms);
assertFalse(hits.hasNext());
} |
@Override
public MapperResult getCapacityList4CorrectUsage(MapperContext context) {
String sql = "SELECT id, tenant_id FROM tenant_capacity WHERE id>? OFFSET 0 ROWS FETCH NEXT ? ROWS ONLY";
return new MapperResult(sql, CollectionUtils.list(context.getWhereParameter(FieldConstant.ID),
context.getWhereParameter(FieldConstant.LIMIT_SIZE)));
} | @Test
void testGetCapacityList4CorrectUsage() {
Object id = 1;
Object limit = 10;
context.putWhereParameter(FieldConstant.ID, id);
context.putWhereParameter(FieldConstant.LIMIT_SIZE, limit);
MapperResult mapperResult = tenantCapacityMapperByDerby.getCapacityList4CorrectUsage(context);
assertEquals("SELECT id, tenant_id FROM tenant_capacity WHERE id>? OFFSET 0 ROWS FETCH NEXT ? ROWS ONLY", mapperResult.getSql());
assertArrayEquals(new Object[] {id, limit}, mapperResult.getParamList().toArray());
} |
@Around(CLIENT_INTERFACE_REMOVE_CONFIG_RPC)
Object removeConfigAroundRpc(ProceedingJoinPoint pjp, ConfigRemoveRequest request, RequestMeta meta)
throws Throwable {
final ConfigChangePointCutTypes configChangePointCutType = ConfigChangePointCutTypes.REMOVE_BY_RPC;
final List<ConfigChangePluginService> pluginServices = getPluginServices(
configChangePointCutType);
// didn't enabled or add relative plugin
if (pluginServices.isEmpty()) {
return pjp.proceed();
}
ConfigChangeRequest configChangeRequest = new ConfigChangeRequest(configChangePointCutType);
configChangeRequest.setArg("dataId", request.getDataId());
configChangeRequest.setArg("group", request.getGroup());
configChangeRequest.setArg("tenant", request.getTenant());
configChangeRequest.setArg("appName", request.getHeader("appName"));
configChangeRequest.setArg("srcIp", meta.getClientIp());
configChangeRequest.setArg("requestIpApp", request.getHeader("requestIpApp"));
configChangeRequest.setArg("srcUser", request.getHeader("src_user"));
configChangeRequest.setArg("use", request.getHeader("use"));
return configChangeServiceHandle(pjp, pluginServices, configChangeRequest);
} | @Test
void testRemoveConfigAroundRpc() throws Throwable {
Mockito.when(configChangePluginService.executeType()).thenReturn(ConfigChangeExecuteTypes.EXECUTE_BEFORE_TYPE);
ProceedingJoinPoint proceedingJoinPoint = Mockito.mock(ProceedingJoinPoint.class);
ConfigRemoveRequest request = new ConfigRemoveRequest();
RequestMeta requestMeta = new RequestMeta();
ConfigPublishResponse configPublishResponse = ConfigPublishResponse.buildSuccessResponse();
Mockito.when(proceedingJoinPoint.proceed(any())).thenReturn(configPublishResponse);
//execute
Object o = configChangeAspect.removeConfigAroundRpc(proceedingJoinPoint, request, requestMeta);
//expect
Mockito.verify(configChangePluginService, Mockito.times(1))
.execute(any(ConfigChangeRequest.class), any(ConfigChangeResponse.class));
assertEquals(configPublishResponse, o);
} |
public int indexOf(final String str) {
return indexOf(str, 0);
} | @Test
public void testIndexOf2() {
final UnicodeHelper lh = new UnicodeHelper("a", Method.GRAPHEME);
assertEquals(-1, lh.indexOf("b"));
final UnicodeHelper lh2 = new UnicodeHelper(
"a" + new String(Character.toChars(0x1f600)) + "a" + UCSTR + "A" + "k\u035fh" + "z"
+ "a" + new String(Character.toChars(0x1f600)) + "a" + UCSTR + "A"
+ "k\u035fh" + "z",
Method.GRAPHEME);
assertEquals(1, lh2.indexOf(new String(Character.toChars(0x1f600))));
assertEquals(9, lh2.indexOf(new String(Character.toChars(0x1f600)), 8));
assertEquals(3, lh2.indexOf(UCSTR));
assertEquals(11, lh2.indexOf(UCSTR), 8);
final UnicodeHelper lh3 = new UnicodeHelper("mm̂mm̂m", Method.GRAPHEME);
assertEquals(0, lh3.indexOf("m"));
assertEquals(2, lh3.indexOf("m", 1));
assertEquals(3, lh3.indexOf("m̂", 2));
} |
public static boolean isNotEmpty(String str) {
return !isEmpty(str);
} | @Test
void testIsNotEmpty() throws Exception {
assertFalse(StringUtils.isNotEmpty(null));
assertFalse(StringUtils.isNotEmpty(""));
assertTrue(StringUtils.isNotEmpty("abc"));
} |
public static Map<String, String> parseCondaCommonStdout(String out)
throws IOException, InterruptedException {
Map<String, String> kv = new LinkedHashMap<String, String>();
String[] lines = out.split("\n");
for (String s : lines) {
if (s == null || s.isEmpty() || s.startsWith("#")) {
continue;
}
Matcher match = PATTERN_OUTPUT_ENV_LIST.matcher(s);
if (!match.matches()) {
continue;
}
kv.put(match.group(1), match.group(2));
}
return kv;
} | @Test
void testParseCondaCommonStdout()
throws IOException, InterruptedException {
StringBuilder sb = new StringBuilder()
.append("# comment1\n")
.append("# comment2\n")
.append("env1 /location1\n")
.append("env2 /location2\n");
Map<String, String> locationPerEnv =
PythonCondaInterpreter.parseCondaCommonStdout(sb.toString());
assertEquals("/location1", locationPerEnv.get("env1"));
assertEquals("/location2", locationPerEnv.get("env2"));
} |
public static long findAndVerifyWindowGrace(final GraphNode graphNode) {
return findAndVerifyWindowGrace(graphNode, "");
} | @Test
public void shouldExtractGraceFromSessionAncestorThroughStatefulParent() {
final SessionWindows windows = SessionWindows.ofInactivityGapAndGrace(ofMillis(10L), ofMillis(1234L));
final StatefulProcessorNode<String, Long> graceGrandparent = new StatefulProcessorNode<>(
"asdf",
new ProcessorParameters<>(new KStreamSessionWindowAggregate<String, Long, Integer>(
windows, "asdf", EmitStrategy.onWindowUpdate(), null, null, null
), "asdf"),
(StoreFactory) null
);
final StatefulProcessorNode<String, Long> statefulParent = new StatefulProcessorNode<>(
"stateful",
new ProcessorParameters<>(
() -> new Processor<String, Long, String, Long>() {
@Override
public void process(final Record<String, Long> record) {}
},
"dummy"
),
(StoreFactory) null
);
graceGrandparent.addChild(statefulParent);
final ProcessorGraphNode<String, Long> node = new ProcessorGraphNode<>("stateless", null);
statefulParent.addChild(node);
final long extracted = GraphGraceSearchUtil.findAndVerifyWindowGrace(node);
assertThat(extracted, is(windows.gracePeriodMs() + windows.inactivityGap()));
} |
public Filter parseSingleExpression(final String filterExpression, final List<EntityAttribute> attributes) {
if (!filterExpression.contains(FIELD_AND_VALUE_SEPARATOR)) {
throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG);
}
final String[] split = filterExpression.split(FIELD_AND_VALUE_SEPARATOR, 2);
final String fieldPart = split[0];
if (fieldPart == null || fieldPart.isEmpty()) {
throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG);
}
final String valuePart = split[1];
if (valuePart == null || valuePart.isEmpty()) {
throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG);
}
final EntityAttribute attributeMetaData = getAttributeMetaData(attributes, fieldPart);
final SearchQueryField.Type fieldType = attributeMetaData.type();
if (isRangeValueExpression(valuePart, fieldType)) {
if (valuePart.startsWith(RANGE_VALUES_SEPARATOR)) {
return new RangeFilter(attributeMetaData.id(),
null,
extractValue(fieldType, valuePart.substring(RANGE_VALUES_SEPARATOR.length()))
);
} else if (valuePart.endsWith(RANGE_VALUES_SEPARATOR)) {
return new RangeFilter(attributeMetaData.id(),
extractValue(fieldType, valuePart.substring(0, valuePart.length() - RANGE_VALUES_SEPARATOR.length())),
null
);
} else {
final String[] ranges = valuePart.split(RANGE_VALUES_SEPARATOR);
return new RangeFilter(attributeMetaData.id(),
extractValue(fieldType, ranges[0]),
extractValue(fieldType, ranges[1])
);
}
} else {
return new SingleValueFilter(attributeMetaData.id(), extractValue(fieldType, valuePart));
}
} | @Test
void parsesFilterExpressionCorrectlyForOpenDateRanges() {
final String dateString = "2012-12-12 12:12:12";
final DateTime dateObject = new DateTime(2012, 12, 12, 12, 12, 12, DateTimeZone.UTC);
final List<EntityAttribute> entityAttributes = List.of(EntityAttribute.builder()
.id("created_at")
.title("Creation Date")
.type(SearchQueryField.Type.DATE)
.filterable(true)
.build());
assertEquals(
new RangeFilter("created_at", dateObject.toDate(), null),
toTest.parseSingleExpression("created_at:" + dateString + RANGE_VALUES_SEPARATOR,
entityAttributes
));
assertEquals(
new RangeFilter("created_at", null, dateObject.toDate()),
toTest.parseSingleExpression("created_at:" + RANGE_VALUES_SEPARATOR + dateString,
entityAttributes
));
} |
public static boolean isCompositeURI(URI uri) {
String ssp = stripPrefix(uri.getRawSchemeSpecificPart().trim(), "//").trim();
if (ssp.indexOf('(') == 0 && checkParenthesis(ssp)) {
return true;
}
return false;
} | @Test
public void testIsCompositeURIWithQueryNoSlashes() throws URISyntaxException {
URI[] compositeURIs = new URI[] { new URI("test:(part1://host?part1=true)?outside=true"), new URI("broker:(tcp://localhost:61616)?name=foo") };
for (URI uri : compositeURIs) {
assertTrue(uri + " must be detected as composite URI", URISupport.isCompositeURI(uri));
}
} |
@Override
public List<String> getServerList() {
return serverList.isEmpty() ? serversFromEndpoint : serverList;
} | @Test
void testConstructWithEndpointAndRefreshEmpty() throws Exception {
Properties properties = new Properties();
properties.put(PropertyKeyConst.ENDPOINT, "127.0.0.1");
serverListManager = new ServerListManager(properties);
List<String> serverList = serverListManager.getServerList();
assertEquals(1, serverList.size());
assertEquals("127.0.0.1:8848", serverList.get(0));
httpRestResult.setData("");
mockThreadInvoke(serverListManager, true);
serverList = serverListManager.getServerList();
assertEquals(1, serverList.size());
assertEquals("127.0.0.1:8848", serverList.get(0));
} |
@Override
public void acknowledge(List<? extends Acknowledgeable> messages) {
@SuppressWarnings("ConstantConditions")
final Optional<Long> max =
messages.stream()
.map(Acknowledgeable::getMessageQueueId)
.filter(this::isValidMessageQueueId)
.map(Long.class::cast)
.max(Long::compare);
max.ifPresent(this::doAcknowledge);
metrics.acknowledgedMessages().mark(messages.size());
} | @Test
void acknowledgeMessageWithWrongTypeOfMessageQueueId(MessageFactory messageFactory) {
final Message message = messageFactory.createMessage("message", "source", DateTime.now(UTC));
message.setMessageQueueId("foo");
acknowledger.acknowledge(message);
verifyNoMoreInteractions(kafkaJournal);
} |
public void setName(String name) {
this.name = name;
} | @Test
public void testSetName() {
String name = "name";
Model instance = new Model();
instance.setName(name);
assertEquals("name", instance.getName());
} |
public DataSchemaParser.ParseResult parseSources(String[] rawSources) throws IOException
{
Set<String> fileExtensions = _parserByFileExtension.keySet();
Map<String, List<String>> byExtension = new HashMap<>(fileExtensions.size());
for (String fileExtension : fileExtensions)
{
byExtension.put(fileExtension, new ArrayList<>());
}
String[] sortedSources = Arrays.copyOf(rawSources, rawSources.length);
Arrays.sort(sortedSources);
// Extract all schema files from the given source paths and group by extension (JARs are handled specially)
for (String source : sortedSources)
{
final File sourceFile = new File(source);
if (sourceFile.exists())
{
if (sourceFile.isDirectory())
{
// Source path is a directory, so recursively find all schema files contained therein
final FileExtensionFilter filter = new FileExtensionFilter(fileExtensions);
final List<File> sourceFilesInDirectory = FileUtil.listFiles(sourceFile, filter);
// Add each schema to the corresponding extension's source list
for (File f : sourceFilesInDirectory)
{
String ext = FilenameUtils.getExtension(f.getName());
List<String> filesForExtension = byExtension.get(ext);
if (filesForExtension != null)
{
filesForExtension.add(f.getAbsolutePath());
}
}
}
else if (sourceFile.getName().endsWith(".jar"))
{
// Source path is a JAR, so add it to each extension's source list.
// The file-based parser for each extension will extract the JAR and process only files matching the extension
byExtension.values().forEach(files -> files.add(sourceFile.getAbsolutePath()));
}
else
{
// Source path is a non-JAR file, so add it to the corresponding extension's source list
String ext = FilenameUtils.getExtension(sourceFile.getName());
List<String> filesForExtension = byExtension.get(ext);
if (filesForExtension != null)
{
filesForExtension.add(sourceFile.getAbsolutePath());
}
}
}
}
// Parse all schema files and JARs using the appropriate file format parser
final ParseResult result = new ParseResult();
for (Map.Entry<String, List<String>> entry : byExtension.entrySet())
{
String ext = entry.getKey();
List<String> files = entry.getValue();
_parserByFileExtension.get(ext).parseSources(files.toArray(new String[files.size()]), result);
}
return result;
} | @Test(dataProvider = "inputFiles")
public void testCustomSourceSchemaDirectory(String pegasusFilename, String[] expectedSchemas) throws Exception
{
String tempDirectoryPath = _tempDir.getAbsolutePath();
String jarFile = tempDirectoryPath + FS + "test.jar";
SchemaDirectory customSchemaDirectory = () -> "custom";
String pegasusFile = TEST_RESOURCES_DIR + FS + pegasusFilename;
String pegasusFileInJar = customSchemaDirectory.getName() + "/" + pegasusFilename;
createTempJarFile(Collections.singletonMap(pegasusFile, pegasusFileInJar), jarFile);
// Load with default parser, this will return zero scheams.
DataSchemaParser parser = new DataSchemaParser.Builder(tempDirectoryPath).build();
DataSchemaParser.ParseResult parseResult = parser.parseSources(new String[]{jarFile});
assertEquals(parseResult.getSchemaAndLocations().size(), 0);
// Now create a parser with custom directory as source
parser = new DataSchemaParser.Builder(tempDirectoryPath)
.setSourceDirectories(Collections.singletonList(customSchemaDirectory))
.build();
parseResult = parser.parseSources(new String[]{jarFile});
assertEquals(parseResult.getSchemaAndLocations().size(), expectedSchemas.length);
Set<String> schemaNames = parseResult.getSchemaAndLocations().keySet().stream().map(DataSchema::getUnionMemberKey).collect(
Collectors.toSet());
for (String schema : expectedSchemas)
{
assertTrue(schemaNames.contains(schema));
}
parseResult.getSchemaAndLocations().values().forEach(loc -> assertEquals(loc.getSourceFile().getAbsolutePath(), jarFile));
} |
public void incQueueGetNums(final String group, final String topic, final Integer queueId, final int incValue) {
if (enableQueueStat) {
final String statsKey = buildStatsKey(topic, queueId, group);
this.statsTable.get(Stats.QUEUE_GET_NUMS).addValue(statsKey, incValue, 1);
}
} | @Test
public void testIncQueueGetNums() {
brokerStatsManager.incQueueGetNums(GROUP_NAME, TOPIC, QUEUE_ID, 1);
final String statsKey = brokerStatsManager.buildStatsKey(brokerStatsManager.buildStatsKey(TOPIC, String.valueOf(QUEUE_ID)), GROUP_NAME);
assertThat(brokerStatsManager.getStatsItem(QUEUE_GET_NUMS, statsKey).getValue().doubleValue()).isEqualTo(1L);
} |
public static int toInt(String str) {
return toInt(str, 0);
} | @Test
void testToInt() {
assertEquals(0, NumberUtils.toInt(null));
assertEquals(0, NumberUtils.toInt(StringUtils.EMPTY));
assertEquals(1, NumberUtils.toInt("1"));
} |
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( getClass() != obj.getClass() ) {
return false;
}
final SelectionParameters other = (SelectionParameters) obj;
if ( !equals( this.qualifiers, other.qualifiers ) ) {
return false;
}
if ( !Objects.equals( this.qualifyingNames, other.qualifyingNames ) ) {
return false;
}
if ( !Objects.equals( this.conditionQualifiers, other.conditionQualifiers ) ) {
return false;
}
if ( !Objects.equals( this.conditionQualifyingNames, other.conditionQualifyingNames ) ) {
return false;
}
if ( !Objects.equals( this.sourceRHS, other.sourceRHS ) ) {
return false;
}
return equals( this.resultType, other.resultType );
} | @Test
public void testEqualsQualifiersOneNull() {
List<String> qualifyingNames = Arrays.asList( "language", "german" );
TypeMirror resultType = new TestTypeMirror( "resultType" );
List<TypeMirror> qualifiers = new ArrayList<>();
qualifiers.add( new TestTypeMirror( "org.mapstruct.test.SomeType" ) );
qualifiers.add( new TestTypeMirror( "org.mapstruct.test.SomeOtherType" ) );
SelectionParameters params = new SelectionParameters( qualifiers, qualifyingNames, resultType, typeUtils );
SelectionParameters params2 = new SelectionParameters( null, qualifyingNames, resultType, typeUtils );
assertThat( params.equals( params2 ) ).as( "Second null qualifiers" ).isFalse();
assertThat( params2.equals( params ) ).as( "First null qualifiers" ).isFalse();
} |
@Description("Returns the type of the geometry")
@ScalarFunction("ST_GeometryType")
@SqlType(VARCHAR)
public static Slice stGeometryType(@SqlType(GEOMETRY_TYPE_NAME) Slice input)
{
return EsriGeometrySerde.getGeometryType(input).standardName();
} | @Test
public void testSTGeometryType()
{
assertFunction("ST_GeometryType(ST_Point(1, 4))", VARCHAR, "ST_Point");
assertFunction("ST_GeometryType(ST_GeometryFromText('LINESTRING (1 1, 2 2)'))", VARCHAR, "ST_LineString");
assertFunction("ST_GeometryType(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'))", VARCHAR, "ST_Polygon");
assertFunction("ST_GeometryType(ST_GeometryFromText('MULTIPOINT (1 1, 2 2)'))", VARCHAR, "ST_MultiPoint");
assertFunction("ST_GeometryType(ST_GeometryFromText('MULTILINESTRING ((1 1, 2 2), (3 3, 4 4))'))", VARCHAR, "ST_MultiLineString");
assertFunction("ST_GeometryType(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 4, 4 4, 4 1, 1 1)), ((1 1, 1 4, 4 4, 4 1, 1 1)))'))", VARCHAR, "ST_MultiPolygon");
assertFunction("ST_GeometryType(ST_GeometryFromText('GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6, 7 10))'))", VARCHAR, "ST_GeomCollection");
assertFunction("ST_GeometryType(ST_Envelope(ST_GeometryFromText('LINESTRING (1 1, 2 2)')))", VARCHAR, "ST_Polygon");
} |
@Override
public String toString(final RouteUnit routeUnit) {
StringBuilder result = new StringBuilder();
appendInsertValue(routeUnit, result);
result.delete(result.length() - 2, result.length());
return result.toString();
} | @Test
void assertToStringWithoutRouteUnit() {
assertThat(shardingInsertValuesToken.toString(), is("('shardingsphere', 'test')"));
} |
@Override
public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException {
try {
if(status.isExists()) {
if(log.isWarnEnabled()) {
log.warn(String.format("Delete file %s to be replaced with %s", renamed, file));
}
new BoxDeleteFeature(session, fileid).delete(Collections.singletonList(renamed), callback, delete);
}
final String id = fileid.getFileId(file);
if(file.isDirectory()) {
final Folder result = new FoldersApi(new BoxApiClient(session.getClient())).putFoldersId(
id, new FoldersFolderIdBody()
.name(renamed.getName())
.parent(new FoldersfolderIdParent()
.id(fileid.getFileId(renamed.getParent()))),
null, BoxAttributesFinderFeature.DEFAULT_FIELDS);
fileid.cache(file, null);
fileid.cache(renamed, id);
return renamed.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes(result));
}
final File result = new FilesApi(new BoxApiClient(session.getClient())).putFilesId(
id, new FilesFileIdBody()
.name(renamed.getName())
.parent(new FilesfileIdParent()
.id(fileid.getFileId(renamed.getParent()))),
null, BoxAttributesFinderFeature.DEFAULT_FIELDS);
fileid.cache(file, null);
fileid.cache(renamed, id);
return renamed.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes(result));
}
catch(ApiException e) {
throw new BoxExceptionMappingService(fileid).map("Cannot rename {0}", e, file);
}
} | @Test(expected = NotfoundException.class)
public void testMoveNotFound() throws Exception {
final BoxFileidProvider fileid = new BoxFileidProvider(session);
final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
new BoxMoveFeature(session, fileid).move(test, new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback());
} |
public static boolean deactivate(Operation op) {
return op.deactivate();
} | @Test
public void testDeactivate() {
Operation operation = new DummyOperation();
setCallId(operation, 10);
assertTrue(deactivate(operation));
assertFalse(operation.isActive());
} |
@Override
public void write(final PostgreSQLPacketPayload payload, final Object value) {
payload.writeBytes((byte[]) value);
} | @Test
void assertWrite() {
byte[] bytes = new byte[5];
ByteBuf byteBuf = Unpooled.wrappedBuffer(bytes).writerIndex(0);
PostgreSQLPacketPayload payload = new PostgreSQLPacketPayload(byteBuf, StandardCharsets.UTF_8);
byte[] expected = "value".getBytes(StandardCharsets.UTF_8);
new PostgreSQLByteaBinaryProtocolValue().write(payload, expected);
assertThat(bytes, is(expected));
} |
public static String substVars(String val, PropertyContainer pc1) throws ScanException {
return substVars(val, pc1, null);
} | @Test
public void leftAccoladeFollowedByDefaultStateWithNoLiteral() throws ScanException {
Exception e = assertThrows(ScanException.class, () -> {
OptionHelper.substVars("x{:-a}", context);
});
String expectedMessage = EXPECTING_DATA_AFTER_LEFT_ACCOLADE;
assertEquals(expectedMessage, e.getMessage());
} |
public CompletableFuture<QueryAssignmentResponse> queryAssignment(ProxyContext ctx,
QueryAssignmentRequest request) {
CompletableFuture<QueryAssignmentResponse> future = new CompletableFuture<>();
try {
validateTopicAndConsumerGroup(request.getTopic(), request.getGroup());
List<org.apache.rocketmq.proxy.common.Address> addressList = this.convertToAddressList(request.getEndpoints());
ProxyTopicRouteData proxyTopicRouteData = this.messagingProcessor.getTopicRouteDataForProxy(
ctx,
addressList,
request.getTopic().getName());
boolean fifo = false;
SubscriptionGroupConfig config = this.messagingProcessor.getSubscriptionGroupConfig(ctx,
request.getGroup().getName());
if (config != null && config.isConsumeMessageOrderly()) {
fifo = true;
}
List<Assignment> assignments = new ArrayList<>();
Map<String, Map<Long, Broker>> brokerMap = buildBrokerMap(proxyTopicRouteData.getBrokerDatas());
for (QueueData queueData : proxyTopicRouteData.getQueueDatas()) {
if (PermName.isReadable(queueData.getPerm()) && queueData.getReadQueueNums() > 0) {
Map<Long, Broker> brokerIdMap = brokerMap.get(queueData.getBrokerName());
if (brokerIdMap != null) {
Broker broker = brokerIdMap.get(MixAll.MASTER_ID);
Permission permission = this.convertToPermission(queueData.getPerm());
if (fifo) {
for (int i = 0; i < queueData.getReadQueueNums(); i++) {
MessageQueue defaultMessageQueue = MessageQueue.newBuilder()
.setTopic(request.getTopic())
.setId(i)
.setPermission(permission)
.setBroker(broker)
.build();
assignments.add(Assignment.newBuilder()
.setMessageQueue(defaultMessageQueue)
.build());
}
} else {
MessageQueue defaultMessageQueue = MessageQueue.newBuilder()
.setTopic(request.getTopic())
.setId(-1)
.setPermission(permission)
.setBroker(broker)
.build();
assignments.add(Assignment.newBuilder()
.setMessageQueue(defaultMessageQueue)
.build());
}
}
}
}
QueryAssignmentResponse response;
if (assignments.isEmpty()) {
response = QueryAssignmentResponse.newBuilder()
.setStatus(ResponseBuilder.getInstance().buildStatus(Code.FORBIDDEN, "no readable queue"))
.build();
} else {
response = QueryAssignmentResponse.newBuilder()
.addAllAssignments(assignments)
.setStatus(ResponseBuilder.getInstance().buildStatus(Code.OK, Code.OK.name()))
.build();
}
future.complete(response);
} catch (Throwable t) {
future.completeExceptionally(t);
}
return future;
} | @Test
public void testQueryAssignmentWithNoReadQueue() throws Throwable {
when(this.messagingProcessor.getTopicRouteDataForProxy(any(), any(), anyString()))
.thenReturn(createProxyTopicRouteData(0, 2, 6));
QueryAssignmentResponse response = this.routeActivity.queryAssignment(
createContext(),
QueryAssignmentRequest.newBuilder()
.setEndpoints(grpcEndpoints)
.setTopic(GRPC_TOPIC)
.setGroup(GRPC_GROUP)
.build()
).get();
assertEquals(Code.FORBIDDEN, response.getStatus().getCode());
} |
public static Result parse(String body) throws ParseException {
Matcher m;
Result result = new Result();
m = PATTERN_IMAGE_URL.matcher(body);
if (m.find()) {
result.imageUrl = StringUtils.unescapeXml(StringUtils.trim(m.group(1)));
}
m = PATTERN_SKIP_HATH_KEY.matcher(body);
if (m.find()) {
result.skipHathKey = StringUtils.unescapeXml(StringUtils.trim(m.group(1)));
}
m = PATTERN_ORIGIN_IMAGE_URL.matcher(body);
if (m.find()) {
result.originImageUrl = StringUtils.unescapeXml(m.group(1)) + "fullimg" + StringUtils.unescapeXml(m.group(2));
}
m = PATTERN_SHOW_KEY.matcher(body);
if (m.find()) {
result.showKey = m.group(1);
}
if (!TextUtils.isEmpty(result.imageUrl) && !TextUtils.isEmpty(result.showKey)) {
return result;
} else {
throw new ParseException("Parse image url and show error", body);
}
} | @Test
public void testParse() throws IOException, ParseException {
InputStream resource = GalleryPageParserTest.class.getResourceAsStream("GalleryPageParserTest.GalleryTopListEX.html");
BufferedSource source = Okio.buffer(Okio.source(resource));
String body = source.readUtf8();
GalleryPageParser.Result result = GalleryPageParser.parse(body);
assertEquals("http://108.6.41.160:2688/h/5c63e9a5810d8d9c873d9e0dfaadc4a0d70a13bf-188862-1280-879-jpg/keystamp=1550291700-145ecbbb10;fileindex=67290651;xres=1280/10.jpg", result.imageUrl);
assertEquals("26664-430636", result.skipHathKey);
assertEquals("https://e-hentai.org/fullimg.php?gid=1363978&page=10&key=qt2hwrx98a4", result.originImageUrl);
assertEquals("ghz0e5m98a4", result.showKey);
} |
@Override
public Buffer allocate() {
return allocate(this.pageSize);
} | @Test
public void testDefault() throws Exception {
final PooledBufferAllocatorImpl allocator = new PooledBufferAllocatorImpl(4096);
final Buffer buffer = allocator.allocate();
assertEquals(0, buffer.offset());
assertEquals(0, buffer.limit());
assertEquals(4096, buffer.capacity());
buffer.release();
} |
Future<Boolean> canRoll(int podId) {
LOGGER.debugCr(reconciliation, "Determining whether broker {} can be rolled", podId);
return canRollBroker(descriptions, podId);
} | @Test
public void testMinIsrMoreThanReplicas(VertxTestContext context) {
KSB ksb = new KSB()
.addNewTopic("A", false)
.addToConfig(TopicConfig.MIN_IN_SYNC_REPLICAS_CONFIG, "2")
.addNewPartition(0)
.replicaOn(0)
.leader(0)
.isr(0)
.endPartition()
.endTopic()
.addBroker(3);
KafkaAvailability kafkaAvailability = new KafkaAvailability(new Reconciliation("dummy", "kind", "namespace", "A"), ksb.ac());
Checkpoint a = context.checkpoint(ksb.brokers.size());
for (Integer brokerId : ksb.brokers.keySet()) {
kafkaAvailability.canRoll(brokerId).onComplete(context.succeeding(canRoll -> context.verify(() -> {
assertTrue(canRoll,
"broker " + brokerId + " should be rollable, being minisr = 2, but only 1 replicas");
a.flag();
})));
}
} |
@Override
public void removeDevicePorts(DeviceId deviceId) {
checkNotNull(deviceId, DEVICE_ID_NULL);
if (isAvailable(deviceId)) {
log.debug("Cannot remove ports of device {} while it is available.", deviceId);
return;
}
List<PortDescription> portDescriptions = ImmutableList.of();
List<DeviceEvent> events = store.updatePorts(getProvider(deviceId).id(),
deviceId, portDescriptions);
if (events != null) {
for (DeviceEvent event : events) {
post(event);
}
}
} | @Test
public void removeDevicePorts() {
connectDevice(DID1, SW1);
List<PortDescription> pds = new ArrayList<>();
pds.add(DefaultPortDescription.builder().withPortNumber(P1).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P2).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P3).isEnabled(true).build());
providerService.updatePorts(DID1, pds);
validateEvents(DEVICE_ADDED, PORT_ADDED, PORT_ADDED, PORT_ADDED);
// Try removing ports while device is available/connected; it should be a no-op.
admin.removeDevicePorts(DID1);
assertEquals("wrong port count", 3, service.getPorts(DID1).size());
// Disconnect device
providerService.deviceDisconnected(DID1);
assertFalse("device should not be available", service.isAvailable(DID1));
validateEvents(DEVICE_AVAILABILITY_CHANGED);
// Now remove ports for real
admin.removeDevicePorts(DID1);
validateEvents(PORT_REMOVED, PORT_REMOVED, PORT_REMOVED);
assertEquals("wrong port count", 0, service.getPorts(DID1).size());
} |
@Override
public boolean supportsStoredFunctionsUsingCallSyntax() {
return false;
} | @Test
void assertSupportsStoredFunctionsUsingCallSyntax() {
assertFalse(metaData.supportsStoredFunctionsUsingCallSyntax());
} |
protected boolean clearMetricsHeaders(Message in) {
return in.removeHeaders(HEADER_PATTERN);
} | @Test
public void testClearRealHeaders() {
Message msg = new DefaultMessage(new DefaultCamelContext());
Object val = new Object();
msg.setHeader(HEADER_HISTOGRAM_VALUE, 109L);
msg.setHeader(HEADER_METRIC_NAME, "the metric");
msg.setHeader("notRemoved", val);
assertThat(msg.getHeaders().size(), is(3));
assertThat(msg.getHeader(HEADER_HISTOGRAM_VALUE, Long.class), is(109L));
assertThat(msg.getHeader(HEADER_METRIC_NAME, String.class), is("the metric"));
assertThat(msg.getHeader("notRemoved"), is(val));
okProducer.clearMetricsHeaders(msg);
assertThat(msg.getHeaders().size(), is(1));
assertThat(msg.getHeader("notRemoved"), is(val));
} |
@SuppressWarnings("unchecked")
public QueryMetadataHolder handleStatement(
final ServiceContext serviceContext,
final Map<String, Object> configOverrides,
final Map<String, Object> requestProperties,
final PreparedStatement<?> statement,
final Optional<Boolean> isInternalRequest,
final MetricsCallbackHolder metricsCallbackHolder,
final Context context,
final boolean excludeTombstones
) {
if (statement.getStatement() instanceof Query) {
return handleQuery(
serviceContext,
(PreparedStatement<Query>) statement,
isInternalRequest,
metricsCallbackHolder,
configOverrides,
requestProperties,
context,
excludeTombstones
);
} else {
return QueryMetadataHolder.unhandled();
}
} | @Test
public void queryLoggerShouldNotReceiveStatementsWhenHandlePullQuery() {
when(mockDataSource.getDataSourceType()).thenReturn(DataSourceType.KTABLE);
when(ksqlEngine.executeTablePullQuery(any(), any(), any(), any(), any(), any(), any(),
anyBoolean(), any()))
.thenReturn(pullQueryResult);
try (MockedStatic<QueryLogger> logger = Mockito.mockStatic(QueryLogger.class)) {
queryExecutor.handleStatement(serviceContext, ImmutableMap.of(), ImmutableMap.of(),
pullQuery, Optional.empty(), metricsCallbackHolder, context, false);
logger.verify(() -> QueryLogger.info("Transient query created",
PULL_QUERY_STRING), never());
}
} |
public RuleRestResponse toRuleRestResponse(RuleInformation ruleInformation) {
RuleRestResponse.Builder builder = RuleRestResponse.Builder.builder();
RuleDto ruleDto = ruleInformation.ruleDto();
builder
.setId(ruleDto.getUuid())
.setKey(ruleDto.getKey().toString())
.setRepositoryKey(ruleDto.getRepositoryKey())
.setName(ruleDto.getName())
.setSeverity(ruleDto.getSeverityString())
.setType(RuleTypeRestEnum.from(RuleType.valueOf(ruleDto.getType())))
.setImpacts(toImpactRestResponse(ruleDto.getDefaultImpacts()))
.setCleanCodeAttribute(CleanCodeAttributeRestEnum.from(ruleDto.getCleanCodeAttribute()))
.setCleanCodeAttributeCategory(ofNullable(ruleDto.getCleanCodeAttribute())
.map(CleanCodeAttribute::getAttributeCategory)
.map(CleanCodeAttributeCategoryRestEnum::from)
.orElse(null))
.setStatus(RuleStatusRestEnum.from(ruleDto.getStatus()))
.setExternal(ruleDto.isExternal())
.setCreatedAt(toDateTime(ruleDto.getCreatedAt()))
.setGapDescription(ruleDto.getGapDescription())
.setHtmlNote(ofNullable(ruleDto.getNoteData()).map(n -> macroInterpreter.interpret(Markdown.convertToHtml(n))).orElse(null))
.setMarkdownNote(ruleDto.getNoteData())
.setEducationPrinciples(new ArrayList<>(ruleDto.getEducationPrinciples()))
.setTemplate(ruleDto.isTemplate())
.setTemplateId(ruleDto.getTemplateUuid())
.setTags(new ArrayList<>(ruleDto.getTags()))
.setSystemTags(new ArrayList<>(ruleDto.getSystemTags()))
.setLanguageKey(ruleDto.getLanguage())
.setLanguageName(getLanguageName(ruleDto.getLanguage()))
.setParameters(toRuleParameterResponse(ruleInformation.params()));
setDescriptionFields(builder, ruleDto);
setRemediationFunctionFields(builder, ruleDto);
if (ruleDto.isAdHoc()) {
ofNullable(ruleDto.getAdHocName()).ifPresent(builder::setName);
ofNullable(ruleDto.getAdHocDescription())
.map(this::toDescriptionSectionResponse)
.ifPresent(section -> builder.setDescriptionSections(List.of(section)));
ofNullable(ruleDto.getAdHocSeverity()).ifPresent(builder::setSeverity);
ofNullable(ruleDto.getAdHocType()).ifPresent(type -> builder.setType(RuleTypeRestEnum.from(RuleType.valueOf(type))));
}
return builder.build();
} | @Test
public void toRuleRestResponse_shouldReturnNullFields_whenRuleIsEmpty() {
RuleDto dto = new RuleDto().setRuleKey("key").setRepositoryKey("repoKey").setStatus(RuleStatus.READY).setType(RuleType.BUG.getDbConstant());
RuleRestResponse ruleRestResponse = ruleRestResponseGenerator.toRuleRestResponse(new RuleInformation(dto, List.of()));
assertThat(ruleRestResponse.cleanCodeAttribute()).isNull();
assertThat(ruleRestResponse.cleanCodeAttributeCategory()).isNull();
assertThat(ruleRestResponse.htmlNote()).isNull();
} |
public JobId enqueue(JobLambda job) {
return enqueue(null, job);
} | @Test
void onSaveJobCreatingAndCreatedAreCalled() {
when(storageProvider.save(any(Job.class))).thenAnswer(invocation -> invocation.getArgument(0));
jobScheduler.enqueue(() -> testService.doWork());
assertThat(jobClientLogFilter.onCreating).isTrue();
assertThat(jobClientLogFilter.onCreated).isTrue();
} |
public static List<String> splitOnCharacterAsList(String value, char needle, int count) {
// skip leading and trailing needles
int end = value.length() - 1;
boolean skipStart = value.charAt(0) == needle;
boolean skipEnd = value.charAt(end) == needle;
if (skipStart && skipEnd) {
value = value.substring(1, end);
count = count - 2;
} else if (skipStart) {
value = value.substring(1);
count = count - 1;
} else if (skipEnd) {
value = value.substring(0, end);
count = count - 1;
}
List<String> rc = new ArrayList<>(count);
int pos = 0;
for (int i = 0; i < count; i++) {
end = value.indexOf(needle, pos);
if (end != -1) {
String part = value.substring(pos, end);
pos = end + 1;
rc.add(part);
} else {
rc.add(value.substring(pos));
break;
}
}
return rc;
} | @Test
public void testSplitOnCharacterAsList() {
List<String> list = splitOnCharacterAsList("foo", ',', 1);
assertEquals(1, list.size());
assertEquals("foo", list.get(0));
list = splitOnCharacterAsList("foo,bar", ',', 2);
assertEquals(2, list.size());
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
list = splitOnCharacterAsList("foo,bar,", ',', 3);
assertEquals(2, list.size());
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
list = splitOnCharacterAsList(",foo,bar", ',', 3);
assertEquals(2, list.size());
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
list = splitOnCharacterAsList(",foo,bar,", ',', 4);
assertEquals(2, list.size());
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 100; i++) {
sb.append(i);
sb.append(",");
}
String value = sb.toString();
int count = StringHelper.countChar(value, ',') + 1;
list = splitOnCharacterAsList(value, ',', count);
assertEquals(100, list.size());
assertEquals("0", list.get(0));
assertEquals("50", list.get(50));
assertEquals("99", list.get(99));
} |
@VisibleForTesting
protected String buildShortMessage(Map<String, Object> fields) {
final StringBuilder shortMessage = new StringBuilder();
shortMessage.append("JSON API poll result: ");
if (!flatten) {
shortMessage.append(jsonPath.getPath());
}
shortMessage.append(" -> ");
if (fields.toString().length() > 50) {
shortMessage.append(fields.toString().substring(0, 50)).append("[...]");
} else {
shortMessage.append(fields.toString());
}
return shortMessage.toString();
} | @Test
public void testBuildShortMessageThatGetsCut() throws Exception {
Map<String, Object> fields = Maps.newLinkedHashMap();
fields.put("baz", 9001);
fields.put("foo", "bargggdzrtdfgfdgldfsjgkfdlgjdflkjglfdjgljslfperitperoujglkdnfkndsbafdofhasdpfoöadjsFOO");
JsonPathCodec selector = new JsonPathCodec(configOf(CK_PATH, "$.download_count", CK_FLATTEN, false), objectMapperProvider.get(), messageFactory);
assertThat(selector.buildShortMessage(fields)).isEqualTo("JSON API poll result: $['download_count'] -> {baz=9001, foo=bargggdzrtdfgfdgldfsjgkfdlgjdflkjgl[...]");
} |
public static String getNamespaceFromResource(String resource) {
if (StringUtils.isEmpty(resource) || isSystemResource(resource)) {
return STRING_BLANK;
}
String resourceWithoutRetryAndDLQ = withOutRetryAndDLQ(resource);
int index = resourceWithoutRetryAndDLQ.indexOf(NAMESPACE_SEPARATOR);
return index > 0 ? resourceWithoutRetryAndDLQ.substring(0, index) : STRING_BLANK;
} | @Test
public void testGetNamespaceFromResource() {
String namespaceExpectBlank = NamespaceUtil.getNamespaceFromResource(TOPIC);
Assert.assertEquals(namespaceExpectBlank, NamespaceUtil.STRING_BLANK);
String namespace = NamespaceUtil.getNamespaceFromResource(TOPIC_WITH_NAMESPACE);
Assert.assertEquals(namespace, INSTANCE_ID);
String namespaceFromRetryTopic = NamespaceUtil.getNamespaceFromResource(RETRY_TOPIC_WITH_NAMESPACE);
Assert.assertEquals(namespaceFromRetryTopic, INSTANCE_ID);
} |
@Override
public void deleteArticleCategory(Long id) {
// 校验存在
validateArticleCategoryExists(id);
// 校验是不是存在关联文章
Long count = articleService.getArticleCountByCategoryId(id);
if (count > 0) {
throw exception(ARTICLE_CATEGORY_DELETE_FAIL_HAVE_ARTICLES);
}
// 删除
articleCategoryMapper.deleteById(id);
} | @Test
public void testDeleteArticleCategory_notExists() {
// 准备参数
Long id = randomLongId();
// 调用, 并断言异常
assertServiceException(() -> articleCategoryService.deleteArticleCategory(id), ARTICLE_CATEGORY_NOT_EXISTS);
} |
@Override
public <T> @NonNull Schema schemaFor(TypeDescriptor<T> typeDescriptor) {
return schemaFor(typeDescriptor.getRawType());
} | @Test
public void testUnionSchema() {
final Schema schema = defaultSchemaProvider.schemaFor(TypeDescriptor.of(TestThriftUnion.class));
assertNotNull(schema);
assertEquals(TypeName.LOGICAL_TYPE, schema.getField("camelCaseEnum").getType().getTypeName());
assertEquals(
EnumerationType.IDENTIFIER,
schema.getField("camelCaseEnum").getType().getLogicalType().getIdentifier());
assertEquals(TypeName.ROW, schema.getField("snake_case_nested_struct").getType().getTypeName());
} |
public void project() {
srcPotentialIndex = 0;
trgPotentialIndex = 0;
recurse(0, 0);
BayesAbsorption.normalize(trgPotentials);
} | @Test
public void testProjection4() {
// Projects from node1 into sep. A, B and C are in node1. B and C are in the sep.
// this tests a non separator var, is before the vars
BayesVariable a = new BayesVariable<String>( "A", 0, new String[] {"A1", "A2"}, new double[][] {{0.1, 0.2}});
BayesVariable b = new BayesVariable<String>( "B", 1, new String[] {"B1", "B2"}, new double[][] {{0.1, 0.2}});
BayesVariable c = new BayesVariable<String>( "C", 2, new String[] {"C1", "C2"}, new double[][] {{0.1, 0.2}});
Graph<BayesVariable> graph = new BayesNetwork();
GraphNode x0 = addNode(graph);
GraphNode x1 = addNode(graph);
GraphNode x2 = addNode(graph);
GraphNode x3 = addNode(graph);
x0.setContent( a );
x1.setContent( b );
x2.setContent( c );
JunctionTreeClique node1 = new JunctionTreeClique(0, graph, bitSet("0111") );
JunctionTreeClique node2 = new JunctionTreeClique(1, graph, bitSet("0110") );
SeparatorState sep = new JunctionTreeSeparator(0, node1, node2, bitSet("0101"), graph).createState();
double v = 0.1;
for ( int i = 0; i < node1.getPotentials().length; i++ ) {
node1.getPotentials()[i] = v;
v = scaleDouble(3, v + 0.1 );
}
BayesVariable[] vars = new BayesVariable[] {a, b, c};
BayesVariable[] sepVars = new BayesVariable[] { b, c };
int[] sepVarPos = PotentialMultiplier.createSubsetVarPos(vars, sepVars);
int sepVarNumberOfStates = PotentialMultiplier.createNumberOfStates(sepVars);
int[] sepVarMultipliers = PotentialMultiplier.createIndexMultipliers(sepVars, sepVarNumberOfStates);
double[] projectedSepPotentials = new double[ sep.getPotentials().length];
BayesProjection p = new BayesProjection(vars, node1.getPotentials(), sepVarPos, sepVarMultipliers, projectedSepPotentials);
p.project();
// remember it's been normalized, from 0.6 0.8 1.0 1.2
assertArray(new double[]{0.167, 0.222, 0.278, 0.333}, scaleDouble(3, projectedSepPotentials));
} |
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof DefaultQueueDescription) {
final DefaultQueueDescription other = (DefaultQueueDescription) obj;
return Objects.equals(this.queueId, other.queueId) &&
Objects.equals(this.type, other.type) &&
Objects.equals(this.dscp, other.dscp) &&
Objects.equals(this.maxRate, other.maxRate) &&
Objects.equals(this.minRate, other.minRate) &&
Objects.equals(this.burst, other.burst) &&
Objects.equals(this.priority, other.priority);
}
return false;
} | @Test
public void testEquals() {
new EqualsTester()
.addEqualityGroup(queueDescription1, sameAsQueueDescription1)
.addEqualityGroup(queueDescription2)
.testEquals();
} |
@Override
public void onHeartbeatSuccess(ShareGroupHeartbeatResponseData response) {
if (response.errorCode() != Errors.NONE.code()) {
String errorMessage = String.format(
"Unexpected error in Heartbeat response. Expected no error, but received: %s",
Errors.forCode(response.errorCode())
);
throw new IllegalArgumentException(errorMessage);
}
MemberState state = state();
if (state == MemberState.LEAVING) {
log.debug("Ignoring heartbeat response received from broker. Member {} with epoch {} is " +
"already leaving the group.", memberId, memberEpoch);
return;
}
if (state == MemberState.UNSUBSCRIBED && maybeCompleteLeaveInProgress()) {
log.debug("Member {} with epoch {} received a successful response to the heartbeat " +
"to leave the group and completed the leave operation. ", memberId, memberEpoch);
return;
}
if (isNotInGroup()) {
log.debug("Ignoring heartbeat response received from broker. Member {} is in {} state" +
" so it's not a member of the group. ", memberId, state);
return;
}
// Update the group member id label in the client telemetry reporter if the member id has
// changed. Initially the member id is empty, and it is updated when the member joins the
// group. This is done here to avoid updating the label on every heartbeat response. Also
// check if the member id is null, as the schema defines it as nullable.
if (response.memberId() != null && !response.memberId().equals(memberId)) {
clientTelemetryReporter.ifPresent(reporter -> reporter.updateMetricsLabels(
Collections.singletonMap(ClientTelemetryProvider.GROUP_MEMBER_ID, response.memberId())));
}
this.memberId = response.memberId();
updateMemberEpoch(response.memberEpoch());
ShareGroupHeartbeatResponseData.Assignment assignment = response.assignment();
if (assignment != null) {
if (!state.canHandleNewAssignment()) {
// New assignment received but member is in a state where it cannot take new
// assignments (ex. preparing to leave the group)
log.debug("Ignoring new assignment {} received from server because member is in {} state.",
assignment, state);
return;
}
Map<Uuid, SortedSet<Integer>> newAssignment = new HashMap<>();
assignment.topicPartitions().forEach(topicPartition -> newAssignment.put(topicPartition.topicId(), new TreeSet<>(topicPartition.partitions())));
processAssignmentReceived(newAssignment);
}
} | @Test
public void testMemberKeepsUnresolvedAssignmentWaitingForMetadataUntilResolved() {
// Assignment with 2 topics, only 1 found in metadata
Uuid topic1 = Uuid.randomUuid();
String topic1Name = "topic1";
Uuid topic2 = Uuid.randomUuid();
ShareGroupHeartbeatResponseData.Assignment assignment = new ShareGroupHeartbeatResponseData.Assignment()
.setTopicPartitions(Arrays.asList(
new ShareGroupHeartbeatResponseData.TopicPartitions()
.setTopicId(topic1)
.setPartitions(Collections.singletonList(0)),
new ShareGroupHeartbeatResponseData.TopicPartitions()
.setTopicId(topic2)
.setPartitions(Arrays.asList(1, 3))
));
when(metadata.topicNames()).thenReturn(Collections.singletonMap(topic1, topic1Name));
// Receive assignment partly in metadata - reconcile+ack what's in metadata, keep the
// unresolved and request metadata update.
ShareMembershipManager membershipManager = mockJoinAndReceiveAssignment(true, assignment);
assertEquals(MemberState.ACKNOWLEDGING, membershipManager.state());
verify(metadata).requestUpdate(anyBoolean());
assertEquals(Collections.singleton(topic2), membershipManager.topicsAwaitingReconciliation());
// When the ack is sent the member should go back to RECONCILING because it still has
// unresolved assignment to be reconciled.
membershipManager.onHeartbeatRequestGenerated();
assertEquals(MemberState.RECONCILING, membershipManager.state());
// Target assignment received again with the same unresolved topic. Client should keep it
// as unresolved.
clearInvocations(subscriptionState);
membershipManager.onHeartbeatSuccess(createShareGroupHeartbeatResponse(assignment).data());
assertEquals(MemberState.RECONCILING, membershipManager.state());
assertEquals(Collections.singleton(topic2), membershipManager.topicsAwaitingReconciliation());
verify(subscriptionState, never()).assignFromSubscribed(anyCollection());
} |
public static String getMediaType(String fileName) {
if (fileName == null) return null;
int idx = fileName.lastIndexOf(".");
if (idx == -1 || idx == fileName.length()) return null;
return FILE_MAP.get(fileName.toLowerCase().substring(idx + 1));
} | @Test
public void testResolver() {
assertNull(MediaTypeResolver.getMediaType(null));
assertNull(MediaTypeResolver.getMediaType("noextension"));
assertNull(MediaTypeResolver.getMediaType("124."));
assertEquals("application/javascript", MediaTypeResolver.getMediaType("file.js"));
assertEquals("image/jpeg", MediaTypeResolver.getMediaType("file.jpg"));
assertEquals("image/jpeg", MediaTypeResolver.getMediaType("file.jpeg"));
assertEquals("image/jpeg", MediaTypeResolver.getMediaType("file.jpe"));
assertEquals("text/css", MediaTypeResolver.getMediaType("file.css"));
assertEquals("text/html", MediaTypeResolver.getMediaType("file.htm"));
assertEquals("text/html", MediaTypeResolver.getMediaType("file.html"));
assertEquals("application/java-archive", MediaTypeResolver.getMediaType("file.JAR"));
} |
public void setEnabled(boolean enabled) {
this.enabled = enabled;
} | @Test
public void setEnabled() {
properties.setEnabled(false);
assertThat(properties.isEnabled()).isEqualTo(false);
} |
@Override
public String rpcType() {
return RpcTypeEnum.DUBBO.getName();
} | @Test
public void testBuildDivideUpstreamList() {
List<URIRegisterDTO> list = new ArrayList<>();
list.add(URIRegisterDTO.builder().appName("test1")
.rpcType(RpcTypeEnum.DUBBO.getName())
.host(LOCALHOST).port(8090).build());
list.add(URIRegisterDTO.builder().appName("test2")
.rpcType(RpcTypeEnum.DUBBO.getName())
.host(LOCALHOST).port(8091).build());
try {
Method testMethod = shenyuClientRegisterDubboService.getClass().getDeclaredMethod("buildDubboUpstreamList", List.class);
testMethod.setAccessible(true);
List<DivideUpstream> result = (List<DivideUpstream>) testMethod.invoke(shenyuClientRegisterDubboService, list);
assertEquals(result.size(), 2);
} catch (Exception e) {
throw new ShenyuException(e.getCause());
}
} |
@Override
public OutputStream create(String path, CreateOptions options) throws IOException {
if (!options.isEnsureAtomic()) {
return createDirect(path, options);
}
return new AtomicFileOutputStream(path, this, options);
} | @Test
public void create() throws IOException {
String filepath = PathUtils.concatPath(mLocalUfsRoot, getUniqueFileName());
OutputStream os = mLocalUfs.create(filepath);
os.close();
assertTrue(mLocalUfs.isFile(filepath));
File file = new File(filepath);
assertTrue(file.exists());
} |
public ClusterStateBundle cloneWithMapper(Function<ClusterState, ClusterState> mapper) {
AnnotatedClusterState clonedBaseline = baselineState.cloneWithClusterState(
mapper.apply(baselineState.getClusterState().clone()));
Map<String, AnnotatedClusterState> clonedDerived = derivedBucketSpaceStates.entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().cloneWithClusterState(
mapper.apply(e.getValue().getClusterState().clone()))));
return new ClusterStateBundle(clonedBaseline, clonedDerived, distributionConfig, feedBlock, deferredActivation);
} | @Test
void cloning_preserves_feed_block_state() {
var bundle = createTestBundleWithFeedBlock("foo");
var derived = bundle.cloneWithMapper(Function.identity());
assertEquals(bundle, derived);
} |
@GetMapping("/image-path")
public String getImagePath() {
LOGGER.info("Successfully found image path");
return "/product-image.png";
} | @Test
void testGetImagePath() {
var imageController = new ImageController();
var imagePath = imageController.getImagePath();
assertEquals("/product-image.png", imagePath);
} |
public static String formatSql(final AstNode root) {
final StringBuilder builder = new StringBuilder();
new Formatter(builder).process(root, 0);
return StringUtils.stripEnd(builder.toString(), "\n");
} | @Test
public void shouldFormatReplaceSelectQueryCorrectly() {
final String statementString =
"CREATE OR REPLACE STREAM S AS SELECT a.address->city FROM address a;";
final Statement statement = parseSingle(statementString);
assertThat(SqlFormatter.formatSql(statement), equalTo("CREATE OR REPLACE STREAM S AS SELECT A.ADDRESS->CITY\n"
+ "FROM ADDRESS A\nEMIT CHANGES"));
} |
public static List<WeightedHostAddress> prioritize(WeightedHostAddress[] records) {
final List<WeightedHostAddress> result = new LinkedList<>();
// sort by priority (ascending)
SortedMap<Integer, Set<WeightedHostAddress>> byPriority = new TreeMap<>();
for(final WeightedHostAddress record : records) {
if (byPriority.containsKey(record.getPriority())) {
byPriority.get(record.getPriority()).add(record);
} else {
final Set<WeightedHostAddress> set = new HashSet<>();
set.add(record);
byPriority.put(record.getPriority(), set);
}
}
// now, randomize each priority set by weight.
for(Map.Entry<Integer, Set<WeightedHostAddress>> weights : byPriority.entrySet()) {
List<WeightedHostAddress> zeroWeights = new LinkedList<>();
int totalWeight = 0;
final Iterator<WeightedHostAddress> i = weights.getValue().iterator();
while (i.hasNext()) {
final WeightedHostAddress next = i.next();
if (next.weight == 0) {
// set aside, as these should be considered last according to the RFC.
zeroWeights.add(next);
i.remove();
continue;
}
totalWeight += next.getWeight();
}
int iterationWeight = totalWeight;
Iterator<WeightedHostAddress> iter = weights.getValue().iterator();
while (iter.hasNext()) {
int needle = new Random().nextInt(iterationWeight);
while (true) {
final WeightedHostAddress record = iter.next();
needle -= record.getWeight();
if (needle <= 0) {
result.add(record);
iter.remove();
iterationWeight -= record.getWeight();
break;
}
}
iter = weights.getValue().iterator();
}
// finally, append the hosts with zero priority (shuffled)
Collections.shuffle(zeroWeights);
result.addAll(zeroWeights);
}
return result;
} | @Test
public void testZeroWeights() throws Exception {
// setup
final DNSUtil.WeightedHostAddress hostA = new DNSUtil.WeightedHostAddress("hostA", 5222, false, 1, 0);
final DNSUtil.WeightedHostAddress hostB = new DNSUtil.WeightedHostAddress("hostB", 5222, false, 1, 0);
final DNSUtil.WeightedHostAddress[] hosts = new DNSUtil.WeightedHostAddress[] { hostA, hostB };
// do magic
boolean hostAWasFirst = false;
boolean hostBWasFirst = false;
final int maxTries = Integer.MAX_VALUE;
for (int i=0; i<maxTries; i++) {
final List<DNSUtil.WeightedHostAddress> result = DNSUtil.prioritize(hosts);
if (hostA.equals(result.get(0))) {
hostAWasFirst = true;
}
if (hostB.equals(result.get(0))) {
hostBWasFirst = true;
}
if (hostAWasFirst && hostBWasFirst) {
break;
}
if (i%1000000==0 && i>0) {
System.err.println("The last " + i + " iterations of this test all had the same result, which is very unlikely to occur (there should be an even distribution between two possible outcomes). We'll iterate up to "+ maxTries +" times, but you might want to abort the unit test at this point...");
}
}
// verify
assertTrue(hostAWasFirst);
assertTrue(hostBWasFirst);
} |
public static Set<String> validateScopes(String scopeClaimName, Collection<String> scopes) throws ValidateException {
if (scopes == null)
throw new ValidateException(String.format("%s value must be non-null", scopeClaimName));
Set<String> copy = new HashSet<>();
for (String scope : scopes) {
scope = validateString(scopeClaimName, scope);
if (copy.contains(scope))
throw new ValidateException(String.format("%s value must not contain duplicates - %s already present", scopeClaimName, scope));
copy.add(scope);
}
return Collections.unmodifiableSet(copy);
} | @Test
public void testValidateScopesDisallowsDuplicates() {
assertThrows(ValidateException.class, () -> ClaimValidationUtils.validateScopes("scope", Arrays.asList("a", "b", "a")));
assertThrows(ValidateException.class, () -> ClaimValidationUtils.validateScopes("scope", Arrays.asList("a", "b", " a ")));
} |
public static <T> T getMapper(Class<T> clazz) {
try {
List<ClassLoader> classLoaders = collectClassLoaders( clazz.getClassLoader() );
return getMapper( clazz, classLoaders );
}
catch ( ClassNotFoundException | NoSuchMethodException e ) {
throw new RuntimeException( e );
}
} | @Test
public void findsNestedMapperImpl() {
assertThat( Mappers.getMapper( SomeClass.Foo.class ) ).isNotNull();
assertThat( Mappers.getMapper( SomeClass.NestedClass.Foo.class ) ).isNotNull();
} |
public void deletePolicy(String policyName) {
policies.remove(policyName);
} | @Test
public void testDeletePolicy() throws Exception {
NamespaceIsolationPolicies policies = this.getDefaultTestPolicies();
policies.deletePolicy("non-existing-policy");
assertFalse(policies.getPolicies().isEmpty());
policies.deletePolicy("policy1");
assertTrue(policies.getPolicies().isEmpty());
} |
@CheckForNull
@Override
public CeTaskResult process(CeTask task) {
try (TaskContainer container = new TaskContainerImpl(ceEngineContainer, newContainerPopulator(task))) {
container.bootup();
container.getComponentByType(ComputationStepExecutor.class).execute();
}
return null;
} | @Test
public void processThrowsNPEIfCeTaskIsNull() {
assertThatThrownBy(() -> underTest.process(null))
.isInstanceOf(NullPointerException.class);
} |
@Override
public boolean apply(InputFile inputFile) {
return extension.equals(getExtension(inputFile));
} | @Test
public void should_match_correct_extension_case_insensitively() throws IOException {
FileExtensionPredicate predicate = new FileExtensionPredicate("jAVa");
assertThat(predicate.apply(mockWithName("Program.java"))).isTrue();
assertThat(predicate.apply(mockWithName("Program.JAVA"))).isTrue();
assertThat(predicate.apply(mockWithName("Program.Java"))).isTrue();
assertThat(predicate.apply(mockWithName("Program.JaVa"))).isTrue();
} |
public boolean isNewer(Timestamped<T> other) {
return isNewerThan(checkNotNull(other).timestamp());
} | @Test
public final void testIsNewer() {
Timestamped<String> a = new Timestamped<>("a", TS_1_2);
Timestamped<String> b = new Timestamped<>("b", TS_1_1);
assertTrue(a.isNewer(b));
assertFalse(b.isNewer(a));
} |
public static Collection<Object> accumulateValues(DataIterator it, Collection<Object> accumulator)
{
for(DataElement element = it.next(); element !=null; element = it.next())
{
accumulator.add(element.getValue());
}
return accumulator;
} | @Test
public void testAccumulateByPath() throws Exception
{
SimpleTestData data = IteratorTestData.createSimpleTestData();
List<Object> ids = new LinkedList<>();
Builder.create(data.getDataElement(), IterationOrder.PRE_ORDER)
.filterBy(Predicates.and(Predicates.pathMatchesPattern("foo", Wildcard.ANY_ONE, "id")))
.accumulateValues(ids);
assertEquals(3, ids.size());
assertTrue(ids.contains(1));
assertTrue(ids.contains(2));
assertTrue(ids.contains(3));
} |
@Description("cube root")
@ScalarFunction
@SqlType(StandardTypes.DOUBLE)
public static double cbrt(@SqlType(StandardTypes.DOUBLE) double num)
{
return Math.cbrt(num);
} | @Test
public void testCbrt()
{
for (double doubleValue : DOUBLE_VALUES) {
assertFunction("cbrt(" + doubleValue + ")", DOUBLE, Math.cbrt(doubleValue));
assertFunction("cbrt(REAL '" + (float) doubleValue + "')", DOUBLE, Math.cbrt((float) doubleValue));
}
assertFunction("cbrt(NULL)", DOUBLE, null);
} |
@Override
public void write(Cache.Entry<? extends K, ? extends V> entry) throws CacheWriterException {
long startNanos = Timer.nanos();
try {
delegate.get().write(entry);
} finally {
writeProbe.recordValue(Timer.nanosElapsed(startNanos));
}
} | @Test
public void write() {
Cache.Entry<Integer, String> entry = new CacheEntry<>(1, "peter");
cacheWriter.write(entry);
verify(delegate).write(entry);
assertProbeCalledOnce("write");
} |
@Override
public MastershipRole getRole(DeviceId deviceId) {
checkNotNull(deviceId, DEVICE_NULL);
// TODO hard coded to master for now.
return MastershipRole.MASTER;
} | @Test
public void testGetRole() {
manager.registerTenantId(TenantId.tenantId(tenantIdValue1));
VirtualNetwork virtualNetwork = manager.createVirtualNetwork(TenantId.tenantId(tenantIdValue1));
DeviceService deviceService = manager.get(virtualNetwork.id(), DeviceService.class);
// test the getRole() method
assertEquals("The expect device role did not match.", MastershipRole.MASTER,
deviceService.getRole(DID1));
} |
@Override
public void requestInitialized(final ServletRequestEvent sre) {
try {
HttpServletRequest request = (HttpServletRequest) sre.getServletRequest();
if (Objects.nonNull(request) && Objects.nonNull(request.getSession())) {
HttpSession session = request.getSession();
request.setAttribute(CLIENT_IP_NAME, sre.getServletRequest().getRemoteAddr());
session.setAttribute(CLIENT_IP_NAME, sre.getServletRequest().getRemoteAddr());
}
} catch (Exception e) {
LOG.error("request initialized error", e);
}
} | @Test
public void testRequestInitialized() {
ServletRequestEvent sre = mock(ServletRequestEvent.class);
HttpServletRequest request = mock(HttpServletRequest.class);
HttpSession session = mock(HttpSession.class);
when(sre.getServletRequest()).thenReturn(request);
when(request.getSession()).thenReturn(session);
websocketListener.requestInitialized(sre);
// For session will invoke request one more time.
verify(request, times(2)).setAttribute(CLIENT_IP_NAME, sre.getServletRequest().getRemoteAddr());
verify(session).setAttribute(CLIENT_IP_NAME, sre.getServletRequest().getRemoteAddr());
} |
public static String concatUfsPath(String base, String path) {
Pattern basePattern = Pattern.compile("(...)\\/\\/");
// use conCatPath to join path if base is not starting with //
if (!basePattern.matcher(base).matches()) {
return concatPath(base, path);
} else {
Preconditions.checkArgument(base != null, "Failed to concatPath: base is null");
Preconditions.checkArgument(path != null, "Failed to concatPath: a null set of paths");
String trimmedPath = SEPARATOR_MATCHER.trimFrom(path);
StringBuilder output = new StringBuilder(base.length() + trimmedPath.length());
output.append(base);
output.append(trimmedPath);
return output.toString();
}
} | @Test
public void concatUfsPath() {
assertEquals("s3://", PathUtils.concatUfsPath("s3://", ""));
assertEquals("s3://bar", PathUtils.concatUfsPath("s3://", "bar"));
assertEquals("hdfs://localhost:9010",
PathUtils.concatUfsPath("hdfs://localhost:9010/", ""));
assertEquals("hdfs://localhost:9010/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/", "bar"));
assertEquals("s3://foo", PathUtils.concatUfsPath("s3://foo", ""));
assertEquals("hdfs://localhost:9010/foo",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo", ""));
// Join base without trailing "/"
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo", "bar"));
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo", "bar/"));
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo", "/bar"));
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo", "/bar/"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo", "bar"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo", "bar/"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo", "/bar"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo", "/bar/"));
// Join base with trailing "/"
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo/", "bar"));
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo/", "bar/"));
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo/", "/bar"));
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo/", "/bar/"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo/", "bar"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo/", "bar/"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo/", "/bar"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo/", "/bar/"));
// Redundant separator must be trimmed.
assertEquals("s3://foo/bar", PathUtils.concatUfsPath("s3://foo/", "bar//"));
assertEquals("hdfs://localhost:9010/foo/bar",
PathUtils.concatUfsPath("hdfs://localhost:9010/foo/", "bar//"));
} |
public static Validator parses(final Function<String, ?> parser) {
return (name, val) -> {
if (val != null && !(val instanceof String)) {
throw new IllegalArgumentException("validator should only be used with STRING defs");
}
try {
parser.apply((String)val);
} catch (final Exception e) {
throw new ConfigException("Configuration " + name + " is invalid: " + e.getMessage());
}
};
} | @Test
public void shouldThrowIfParserThrows() {
// Given:
final Validator validator = ConfigValidators.parses(parser);
when(parser.apply(any())).thenThrow(new IllegalArgumentException("some error"));
// When:
final Exception e = assertThrows(
ConfigException.class,
() -> validator.ensureValid("propName", "value")
);
// Then:
assertThat(e.getMessage(), containsString("Configuration propName is invalid: some error"));
} |
Object[] findValues(int ordinal) {
return getAllValues(ordinal, type, 0);
} | @Test
public void testMapObjectValueReference() throws Exception {
MapValue val1 = new MapValue();
val1.val = "one";
MapValue val2 = new MapValue();
val2.val = "two";
Map<Integer, MapValue> map = new HashMap<>();
map.put(1, val1);
map.put(2, val2);
MapObjectReference mapObjectReference = new MapObjectReference();
mapObjectReference.mapValues = map;
objectMapper.add(mapObjectReference);
StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine);
// for values
FieldPath fieldPath = new FieldPath(readStateEngine, "MapObjectReference", "mapValues.value");
Object[] values = fieldPath.findValues(0);
Assert.assertEquals(2, values.length);
Set<String> valuesAsSet = new HashSet<>();
for (Object v : values) valuesAsSet.add((String) v);
Assert.assertTrue(valuesAsSet.contains("one"));
Assert.assertTrue(valuesAsSet.contains("two"));
// for keys
fieldPath = new FieldPath(readStateEngine, "MapObjectReference", "mapValues.key");
values = fieldPath.findValues(0);
Assert.assertEquals(2, values.length);
Set<Integer> keysAsSet = new HashSet<>();
for (Object v : values) keysAsSet.add((int) v);
Assert.assertTrue(keysAsSet.contains(1));
Assert.assertTrue(keysAsSet.contains(2));
} |
public static int[] getCutIndices(String s, String splitChar, int index) {
int found = 0;
char target = splitChar.charAt(0);
for (int i = 0; i < s.length(); i++) {
if (s.charAt(i) == target) {
found++;
}
if (found == index) {
int begin = i;
if (begin != 0) {
begin += 1;
}
int end = s.indexOf(target, i + 1);
// End will be -1 if this is the last last token in the string and there is no other occurence.
if (end == -1) {
end = s.length();
}
return new int[]{begin, end};
}
}
return new int[]{0, 0};
} | @Test
public void testCutIndicesWithFirstToken() throws Exception {
String s = "<10> 07 Aug 2013 somesubsystem";
int[] result = SplitAndIndexExtractor.getCutIndices(s, " ", 0);
assertEquals(0, result[0]);
assertEquals(4, result[1]);
} |
@SuppressWarnings("unchecked")
private SpscChannelConsumer<E> newConsumer(Object... args) {
return mapper.newFlyweight(SpscChannelConsumer.class, "ChannelConsumerTemplate.java",
Template.fromFile(Channel.class, "ChannelConsumerTemplate.java"), args);
} | @Test
public void shouldReadAnObject() {
ChannelConsumer consumer = newConsumer();
shouldWriteAnObject();
assertTrue(consumer.read());
assertEmpty();
} |
@Override
public void run() {
try (DbSession dbSession = dbClient.openSession(false)) {
List<CeActivityDto> recentSuccessfulTasks = getRecentSuccessfulTasks(dbSession);
Collection<String> entityUuids = recentSuccessfulTasks.stream()
.map(CeActivityDto::getEntityUuid)
.toList();
List<EntityDto> entities = dbClient.entityDao().selectByUuids(dbSession, entityUuids);
Map<String, String> entityUuidAndKeys = entities.stream()
.collect(Collectors.toMap(EntityDto::getUuid, EntityDto::getKey));
reportObservedDurationForTasks(recentSuccessfulTasks, entityUuidAndKeys);
}
lastUpdatedTimestamp = system.now();
} | @Test
public void run_givenNullExecutionTime_dontReportMetricData() {
RecentTasksDurationTask task = new RecentTasksDurationTask(dbClient, metrics, config, system);
List<CeActivityDto> recentTasks = createTasks(1, 0);
recentTasks.get(0).setExecutionTimeMs(null);
when(entityDao.selectByUuids(any(), any())).thenReturn(createEntityDtos(1));
when(ceActivityDao.selectNewerThan(any(), anyLong())).thenReturn(recentTasks);
task.run();
verify(metrics, times(0)).observeComputeEngineTaskDuration(anyLong(), any(), any());
} |
public ZookeeperPrefixChildFilter(ZookeeperEphemeralPrefixGenerator prefixGenerator)
{
_prefixGenerator = prefixGenerator;
} | @Test(dataProvider = "prefixFilterDataProvider")
public void testZookeeperPrefixChildFilter(String hostName, List<String> children, List<String> expectedFilteredChildren)
{
ZookeeperPrefixChildFilter filter = new ZookeeperPrefixChildFilter(new AnnouncerHostPrefixGenerator(hostName));
List<String> actualFilteredChildren = filter.filter(children);
Assert.assertEquals(actualFilteredChildren, expectedFilteredChildren);
} |
public static final FluentKieModuleDeploymentHelper newFluentInstance() {
return new KieModuleDeploymentHelperImpl();
} | @Test
public void testFluentDeploymentHelper() throws Exception {
int numFiles = 0;
int numDirs = 0;
FluentKieModuleDeploymentHelper deploymentHelper = KieModuleDeploymentHelper.newFluentInstance();
String groupId = "org.kie.api.builder.fluent";
String artifactId = "test-kjar";
String version = "0.1-SNAPSHOT";
deploymentHelper = deploymentHelper.setGroupId(groupId)
.setArtifactId(artifactId)
.setVersion(version)
.addResourceFilePath("builder/test/", "builder/simple_query_test.drl")
.addResourceFilePath("/META-INF/WorkDefinitions.conf") // from the drools-core jar
.addClass(KieModuleDeploymentHelperTest.class)
.addClass(KieModule.class)
.addClass(Cheese.class);
// class dirs
numDirs += 5; // org.kie.api.builder.helper
numDirs += 2; // (org.)drools.compiler
// pom.xml, pom.properties
numFiles += 3;
// kmodule.xml, kmodule.info
numFiles += 2;
// kbase.cache x 2
numFiles += 2;
// drl files
numFiles += 2;
// WorkDefinitions
++numFiles;
// classes
numFiles += 3;
// META-INF/maven/org.kie.api.builder/test-kjar
numDirs += 4;
// defaultKiebase, META-INF/defaultKieBase
numDirs += 2;
KieBaseModel kbaseModel = deploymentHelper.getKieModuleModel().newKieBaseModel("otherKieBase");
kbaseModel.setEqualsBehavior(EqualityBehaviorOption.EQUALITY).setEventProcessingMode(EventProcessingOption.STREAM);
kbaseModel.newKieSessionModel("otherKieSession").setClockType(ClockTypeOption.REALTIME);
// META-INF/otherKieBase
++numDirs;
deploymentHelper.getKieModuleModel().getKieBaseModels().get("defaultKieBase").newKieSessionModel("secondKieSession");
deploymentHelper.createKieJarAndDeployToMaven();
File artifactFile = MavenRepository.getMavenRepository().resolveArtifact(groupId + ":" + artifactId + ":" + version).getFile();
zip = new ZipInputStream(new FileInputStream(artifactFile));
Set<String> jarFiles = new HashSet<String>();
Set<String> jarDirs = new HashSet<String>();
ZipEntry ze = zip.getNextEntry();
logger.debug("Getting files form deployed jar: ");
while( ze != null ) {
String fileName = ze.getName();
if( fileName.endsWith("drl")
|| fileName.endsWith("class")
|| fileName.endsWith("tst")
|| fileName.endsWith("conf")
|| fileName.endsWith("xml")
|| fileName.endsWith("info")
|| fileName.endsWith("properties")
|| fileName.endsWith("cache") ) {
jarFiles.add(fileName);
logger.debug("> " + fileName);
} else {
jarDirs.add(fileName);
logger.debug("] " + fileName);
}
ze = zip.getNextEntry();
}
assertThat(jarFiles.size()).as("Num files in kjar").isEqualTo(numFiles);
} |
public static ThrowableType getThrowableType(Throwable cause) {
final ThrowableAnnotation annotation =
cause.getClass().getAnnotation(ThrowableAnnotation.class);
return annotation == null ? ThrowableType.RecoverableError : annotation.value();
} | @Test
void testThrowableType_Recoverable() {
assertThat(ThrowableClassifier.getThrowableType(new Exception("")))
.isEqualTo(ThrowableType.RecoverableError);
assertThat(ThrowableClassifier.getThrowableType(new TestRecoverableErrorException()))
.isEqualTo(ThrowableType.RecoverableError);
} |
public static void setInvocationTime(Operation op, long invocationTime) {
op.setInvocationTime(invocationTime);
} | @Test
public void testSetInvocationTime() {
Operation operation = new DummyOperation();
setInvocationTime(operation, 10);
assertEquals(10, operation.getInvocationTime());
} |
@Nonnull
public static <T> BatchSource<T> batchFromProcessor(
@Nonnull String sourceName,
@Nonnull ProcessorMetaSupplier metaSupplier
) {
checkSerializable(metaSupplier, "metaSupplier");
return new BatchSourceTransform<>(sourceName, metaSupplier);
} | @Test
public void fromProcessor() {
// Given
List<Integer> input = sequence(itemCount);
putToBatchSrcMap(input);
// When
BatchSource<Integer> source = Sources.batchFromProcessor("test",
readMapP(srcName, truePredicate(), Entry::getValue));
// Then
p.readFrom(source).writeTo(sink);
execute();
assertEquals(toBag(input), sinkToBag());
} |
@Override
public void updateNetwork(Network osNet) {
checkNotNull(osNet, ERR_NULL_NETWORK);
checkArgument(!Strings.isNullOrEmpty(osNet.getId()), ERR_NULL_NETWORK_ID);
osNetworkStore.updateNetwork(osNet);
OpenstackNetwork finalAugmentedNetwork = buildAugmentedNetworkFromType(osNet);
augmentedNetworkMap.compute(osNet.getId(), (id, existing) -> {
final String error = osNet.getId() + ERR_NOT_FOUND;
checkArgument(existing != null, error);
return finalAugmentedNetwork;
});
log.info(String.format(MSG_NETWORK, osNet.getId(), MSG_UPDATED));
} | @Test(expected = IllegalArgumentException.class)
public void testUpdateNetworkWithNullName() {
final Network updated = NeutronNetwork.builder()
.name(null)
.build();
updated.setId(NETWORK_ID);
target.updateNetwork(updated);
} |
public static String getValidFilePath(String inputPath) {
return getValidFilePath(inputPath, false);
} | @Test
public void getValidFilePath_nullOrEmpty_returnsNull() {
assertNull(SecurityUtils.getValidFilePath(""));
assertNull(SecurityUtils.getValidFilePath(null));
} |
@PostMapping
@Secured(resource = AuthConstants.CONSOLE_RESOURCE_NAME_PREFIX + "namespaces", action = ActionTypes.WRITE)
public Boolean createNamespace(@RequestParam("customNamespaceId") String namespaceId,
@RequestParam("namespaceName") String namespaceName,
@RequestParam(value = "namespaceDesc", required = false) String namespaceDesc) {
if (StringUtils.isBlank(namespaceId)) {
namespaceId = UUID.randomUUID().toString();
} else {
namespaceId = namespaceId.trim();
if (!namespaceIdCheckPattern.matcher(namespaceId).matches()) {
return false;
}
if (namespaceId.length() > NAMESPACE_ID_MAX_LENGTH) {
return false;
}
// check unique
if (namespacePersistService.tenantInfoCountByTenantId(namespaceId) > 0) {
return false;
}
}
// contains illegal chars
if (!namespaceNameCheckPattern.matcher(namespaceName).matches()) {
return false;
}
try {
return namespaceOperationService.createNamespace(namespaceId, namespaceName, namespaceDesc);
} catch (NacosException e) {
return false;
}
} | @Test
void testCreateNamespaceWithIllegalName() {
assertFalse(namespaceController.createNamespace(null, "test@Name", "testDesc"));
assertFalse(namespaceController.createNamespace(null, "test#Name", "testDesc"));
assertFalse(namespaceController.createNamespace(null, "test$Name", "testDesc"));
assertFalse(namespaceController.createNamespace(null, "test%Name", "testDesc"));
assertFalse(namespaceController.createNamespace(null, "test^Name", "testDesc"));
assertFalse(namespaceController.createNamespace(null, "test&Name", "testDesc"));
assertFalse(namespaceController.createNamespace(null, "test*Name", "testDesc"));
} |
protected InstrumentedHttpClientConnectionManager createConnectionManager(Registry<ConnectionSocketFactory> registry,
String name) {
final Duration ttl = configuration.getTimeToLive();
final InstrumentedHttpClientConnectionManager manager = InstrumentedHttpClientConnectionManager.builder(metricRegistry)
.socketFactoryRegistry(registry)
.dnsResolver(resolver)
.timeToLive(TimeValue.of(ttl.getQuantity(), ttl.getUnit()))
.name(name)
.build();
return configureConnectionManager(manager);
} | @Test
void usesASystemDnsResolverByDefault() {
final InstrumentedHttpClientConnectionManager manager = builder.createConnectionManager(registry, "test");
assertThat(manager)
.extracting("connectionOperator")
.extracting("dnsResolver")
.isInstanceOf(SystemDefaultDnsResolver.class);
} |
public static Map<TopicPartition, Long> parseSinkConnectorOffsets(Map<Map<String, ?>, Map<String, ?>> partitionOffsets) {
Map<TopicPartition, Long> parsedOffsetMap = new HashMap<>();
for (Map.Entry<Map<String, ?>, Map<String, ?>> partitionOffset : partitionOffsets.entrySet()) {
Map<String, ?> partitionMap = partitionOffset.getKey();
if (partitionMap == null) {
throw new BadRequestException("The partition for a sink connector offset cannot be null or missing");
}
if (!partitionMap.containsKey(KAFKA_TOPIC_KEY) || !partitionMap.containsKey(KAFKA_PARTITION_KEY)) {
throw new BadRequestException(String.format("The partition for a sink connector offset must contain the keys '%s' and '%s'",
KAFKA_TOPIC_KEY, KAFKA_PARTITION_KEY));
}
if (partitionMap.get(KAFKA_TOPIC_KEY) == null) {
throw new BadRequestException("Kafka topic names must be valid strings and may not be null");
}
if (partitionMap.get(KAFKA_PARTITION_KEY) == null) {
throw new BadRequestException("Kafka partitions must be valid numbers and may not be null");
}
String topic = String.valueOf(partitionMap.get(KAFKA_TOPIC_KEY));
int partition;
try {
// We parse it this way because both "10" and 10 should be accepted as valid partition values in the REST API's
// JSON request payload. If it throws an exception, we should propagate it since it's indicative of a badly formatted value.
partition = Integer.parseInt(String.valueOf(partitionMap.get(KAFKA_PARTITION_KEY)));
} catch (Exception e) {
throw new BadRequestException("Failed to parse the following Kafka partition value in the provided offsets: '" +
partitionMap.get(KAFKA_PARTITION_KEY) + "'. Partition values for sink connectors need " +
"to be integers.", e);
}
TopicPartition tp = new TopicPartition(topic, partition);
Map<String, ?> offsetMap = partitionOffset.getValue();
if (offsetMap == null) {
// represents an offset reset
parsedOffsetMap.put(tp, null);
} else {
if (!offsetMap.containsKey(KAFKA_OFFSET_KEY)) {
throw new BadRequestException(String.format("The offset for a sink connector should either be null or contain " +
"the key '%s'", KAFKA_OFFSET_KEY));
}
long offset;
try {
// We parse it this way because both "1000" and 1000 should be accepted as valid offset values in the REST API's
// JSON request payload. If it throws an exception, we should propagate it since it's indicative of a badly formatted value.
offset = Long.parseLong(String.valueOf(offsetMap.get(KAFKA_OFFSET_KEY)));
} catch (Exception e) {
throw new BadRequestException("Failed to parse the following Kafka offset value in the provided offsets: '" +
offsetMap.get(KAFKA_OFFSET_KEY) + "'. Offset values for sink connectors need " +
"to be integers.", e);
}
parsedOffsetMap.put(tp, offset);
}
}
return parsedOffsetMap;
} | @Test
public void testNullOffset() {
Map<String, Object> partitionMap = new HashMap<>();
partitionMap.put(SinkUtils.KAFKA_TOPIC_KEY, "topic");
partitionMap.put(SinkUtils.KAFKA_PARTITION_KEY, 10);
Map<Map<String, ?>, Map<String, ?>> partitionOffsets = new HashMap<>();
partitionOffsets.put(partitionMap, null);
Map<TopicPartition, Long> parsedOffsets = SinkUtils.parseSinkConnectorOffsets(partitionOffsets);
assertEquals(1, parsedOffsets.size());
assertNull(parsedOffsets.values().iterator().next());
} |
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object o) {
Object span = request.getAttribute(SpanCustomizer.class.getName());
if (span instanceof SpanCustomizer) {
setHttpRouteAttribute(request);
handlerParser.preHandle(request, o, (SpanCustomizer) span);
}
return true;
} | @Test void preHandle_parsesAndAddsHttpRouteAttribute_coercesNullToEmpty() {
when(request.getAttribute("brave.SpanCustomizer")).thenReturn(span);
interceptor.preHandle(request, response, controller);
verify(request).getAttribute("brave.SpanCustomizer");
verify(request).getAttribute(BEST_MATCHING_PATTERN_ATTRIBUTE);
verify(request).setAttribute("http.route", "");
verify(parser).preHandle(request, controller, span);
verifyNoMoreInteractions(request, response, parser, span);
} |
public final void isEmpty() {
if (!Iterables.isEmpty(checkNotNull(actual))) {
failWithActual(simpleFact("expected to be empty"));
}
} | @Test
public void iterableIsEmpty() {
assertThat(asList()).isEmpty();
} |
@VisibleForTesting
void validateNameUnique(List<MemberLevelDO> list, Long id, String name) {
for (MemberLevelDO levelDO : list) {
if (ObjUtil.notEqual(levelDO.getName(), name)) {
continue;
}
if (id == null || !id.equals(levelDO.getId())) {
throw exception(LEVEL_NAME_EXISTS, levelDO.getName());
}
}
} | @Test
public void testUpdateLevel_nameUnique() {
// 准备参数
Long id = randomLongId();
String name = randomString();
// mock 数据
memberlevelMapper.insert(randomLevelDO(o -> o.setName(name)));
// 调用,校验异常
List<MemberLevelDO> list = memberlevelMapper.selectList();
assertServiceException(() -> levelService.validateNameUnique(list, id, name), LEVEL_NAME_EXISTS, name);
} |
@Override
protected void route(List<SendingMailbox> sendingMailboxes, TransferableBlock block)
throws Exception {
sendBlock(sendingMailboxes.get(0), block);
} | @Test(expectedExceptions = IllegalArgumentException.class)
public void shouldThrowWhenSingletonWithNonLocalMailbox()
throws Exception {
// Given:
ImmutableList<SendingMailbox> destinations = ImmutableList.of(_mailbox2);
// When:
new SingletonExchange(destinations, TransferableBlockUtils::splitBlock).route(destinations, _block);
} |
@Override
public final void getSize(@NonNull SizeReadyCallback cb) {
sizeDeterminer.getSize(cb);
} | @Test
public void testDecreasesDimensionsByViewPadding() {
activity.visible();
view.setLayoutParams(new FrameLayout.LayoutParams(100, 100));
view.setPadding(25, 25, 25, 25);
view.requestLayout();
target.getSize(cb);
verify(cb).onSizeReady(50, 50);
} |
public static ColumnName parse(final String text) {
final SqlBaseLexer lexer = new SqlBaseLexer(
new CaseInsensitiveStream(CharStreams.fromString(text))
);
final CommonTokenStream tokStream = new CommonTokenStream(lexer);
final SqlBaseParser parser = new SqlBaseParser(tokStream);
final PrimaryExpressionContext primaryExpression = parser.primaryExpression();
if (primaryExpression instanceof ColumnReferenceContext) {
return resolve((ColumnReferenceContext) primaryExpression).getColumnName();
}
if (primaryExpression instanceof QualifiedColumnReferenceContext) {
return resolve((QualifiedColumnReferenceContext) primaryExpression).getColumnName();
}
throw new ParseFailedException(
"Cannot parse text that is not column reference.",
"Cannot parse text that is not column reference: " + text,
text
);
} | @Test
public void shouldParseUnquotedIdentifier() {
// When:
final ColumnName result = ColumnReferenceParser.parse("foo");
// Then:
assertThat(result, is(ColumnName.of("FOO")));
} |
@Override
public void trash(final Local file) throws LocalAccessDeniedException {
if(log.isDebugEnabled()) {
log.debug(String.format("Move %s to Trash", file));
}
final ObjCObjectByReference error = new ObjCObjectByReference();
if(!NSFileManager.defaultManager().trashItemAtURL_resultingItemURL_error(
NSURL.fileURLWithPath(file.getAbsolute()), null, error)) {
final NSError f = error.getValueAs(NSError.class);
if(null == f) {
throw new LocalAccessDeniedException(file.getAbsolute());
}
throw new LocalAccessDeniedException(String.format("%s", f.localizedDescription()));
}
} | @Test
public void testTrashOpenDirectoryEnumeration() throws Exception {
final Trash trash = new FileManagerTrashFeature();
final SupportDirectoryFinder finder = new TemporarySupportDirectoryFinder();
final Local temp = finder.find();
final Local directory = LocalFactory.get(temp, UUID.randomUUID().toString());
directory.mkdir();
final Local sub = LocalFactory.get(directory, UUID.randomUUID().toString());
sub.mkdir();
final Local file = LocalFactory.get(sub, UUID.randomUUID().toString());
final Touch touch = LocalTouchFactory.get();
touch.touch(file);
try (final DirectoryStream<Path> stream = Files.newDirectoryStream(Paths.get(sub.getAbsolute()))) {
trash.trash(directory);
}
} |
static Result coerceUserList(
final Collection<Expression> expressions,
final ExpressionTypeManager typeManager
) {
return coerceUserList(expressions, typeManager, Collections.emptyMap());
} | @Test
public void shouldCoerceArrayOfCompatibleLiterals() {
// Given:
final ImmutableList<Expression> expressions = ImmutableList.of(
new CreateArrayExpression(
ImmutableList.of(
new IntegerLiteral(10),
new IntegerLiteral(289476)
)
),
new CreateArrayExpression(
ImmutableList.of(
new StringLiteral("123456789000"),
new StringLiteral("22"),
new StringLiteral("\t -100 \t")
)
)
);
// When:
final Result result = CoercionUtil.coerceUserList(expressions, typeManager);
// Then:
assertThat(result.commonType(), is(Optional.of(SqlTypes.array(SqlTypes.BIGINT))));
assertThat(result.expressions(), is(ImmutableList.of(
cast(new CreateArrayExpression(
ImmutableList.of(
new IntegerLiteral(10),
new IntegerLiteral(289476)
)
), SqlTypes.array(SqlTypes.BIGINT)),
cast(new CreateArrayExpression(
ImmutableList.of(
new StringLiteral("123456789000"),
new StringLiteral("22"),
new StringLiteral("\t -100 \t")
)
), SqlTypes.array(SqlTypes.BIGINT))
)));
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.