focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo,
List<String> partNames, boolean areAllPartsFound) throws MetaException {
checkStatisticsList(colStatsWithSourceInfo);
ColumnStatisticsObj statsObj = null;
String colType;
String colName = null;
// check if all the ColumnStatisticsObjs contain stats and all the ndv are
// bitvectors
boolean doAllPartitionContainStats = partNames.size() == colStatsWithSourceInfo.size();
NumDistinctValueEstimator ndvEstimator = null;
boolean areAllNDVEstimatorsMergeable = true;
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
if (statsObj == null) {
colName = cso.getColName();
colType = cso.getColType();
statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType,
cso.getStatsData().getSetField());
LOG.trace("doAllPartitionContainStats for column: {} is: {}", colName,
doAllPartitionContainStats);
}
DecimalColumnStatsDataInspector columnStatsData = decimalInspectorFromStats(cso);
// check if we can merge NDV estimators
if (columnStatsData.getNdvEstimator() == null) {
areAllNDVEstimatorsMergeable = false;
break;
} else {
NumDistinctValueEstimator estimator = columnStatsData.getNdvEstimator();
if (ndvEstimator == null) {
ndvEstimator = estimator;
} else {
if (!ndvEstimator.canMerge(estimator)) {
areAllNDVEstimatorsMergeable = false;
break;
}
}
}
}
if (areAllNDVEstimatorsMergeable && ndvEstimator != null) {
ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator);
}
LOG.debug("all of the bit vectors can merge for {} is {}", colName, areAllNDVEstimatorsMergeable);
ColumnStatisticsData columnStatisticsData = initColumnStatisticsData();
if (doAllPartitionContainStats || colStatsWithSourceInfo.size() < 2) {
DecimalColumnStatsDataInspector aggregateData = null;
long lowerBound = 0;
long higherBound = 0;
double densityAvgSum = 0.0;
DecimalColumnStatsMerger merger = new DecimalColumnStatsMerger();
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
DecimalColumnStatsDataInspector newData = decimalInspectorFromStats(cso);
lowerBound = Math.max(lowerBound, newData.getNumDVs());
higherBound += newData.getNumDVs();
if (newData.isSetLowValue() && newData.isSetHighValue()) {
densityAvgSum += (MetaStoreServerUtils.decimalToDouble(newData.getHighValue()) - MetaStoreServerUtils
.decimalToDouble(newData.getLowValue())) / newData.getNumDVs();
}
if (areAllNDVEstimatorsMergeable && ndvEstimator != null) {
ndvEstimator.mergeEstimators(newData.getNdvEstimator());
}
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
aggregateData.setLowValue(merger.mergeLowValue(
merger.getLowValue(aggregateData), merger.getLowValue(newData)));
aggregateData.setHighValue(merger.mergeHighValue(
merger.getHighValue(aggregateData), merger.getHighValue(newData)));
aggregateData.setNumNulls(merger.mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls()));
aggregateData.setNumDVs(merger.mergeNumDVs(aggregateData.getNumDVs(), newData.getNumDVs()));
}
}
if (areAllNDVEstimatorsMergeable && ndvEstimator != null) {
// if all the ColumnStatisticsObjs contain bitvectors, we do not need to
// use uniform distribution assumption because we can merge bitvectors
// to get a good estimation.
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
} else {
long estimation;
if (useDensityFunctionForNDVEstimation && aggregateData != null
&& aggregateData.isSetLowValue() && aggregateData.isSetHighValue()) {
// We have estimation, lowerbound and higherbound. We use estimation
// if it is between lowerbound and higherbound.
double densityAvg = densityAvgSum / partNames.size();
estimation = (long) ((MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) - MetaStoreServerUtils
.decimalToDouble(aggregateData.getLowValue())) / densityAvg);
if (estimation < lowerBound) {
estimation = lowerBound;
} else if (estimation > higherBound) {
estimation = higherBound;
}
} else {
estimation = (long) (lowerBound + (higherBound - lowerBound) * ndvTuner);
}
aggregateData.setNumDVs(estimation);
}
columnStatisticsData.setDecimalStats(aggregateData);
} else {
// TODO: bail out if missing stats are over a certain threshold
// we need extrapolation
LOG.debug("start extrapolation for {}", colName);
Map<String, Integer> indexMap = new HashMap<>();
for (int index = 0; index < partNames.size(); index++) {
indexMap.put(partNames.get(index), index);
}
Map<String, Double> adjustedIndexMap = new HashMap<>();
Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<>();
// while we scan the css, we also get the densityAvg, lowerbound and
// higherbound when useDensityFunctionForNDVEstimation is true.
double densityAvgSum = 0.0;
if (!areAllNDVEstimatorsMergeable) {
// if not every partition uses bitvector for ndv, we just fall back to
// the traditional extrapolation methods.
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
String partName = csp.getPartName();
DecimalColumnStatsData newData = cso.getStatsData().getDecimalStats();
if (useDensityFunctionForNDVEstimation && newData.isSetLowValue() && newData.isSetHighValue()) {
densityAvgSum += (MetaStoreServerUtils.decimalToDouble(newData.getHighValue()) - MetaStoreServerUtils
.decimalToDouble(newData.getLowValue())) / newData.getNumDVs();
}
adjustedIndexMap.put(partName, (double) indexMap.get(partName));
adjustedStatsMap.put(partName, cso.getStatsData());
}
} else {
// we first merge all the adjacent bitvectors that we could merge and
// derive new partition names and index.
StringBuilder pseudoPartName = new StringBuilder();
double pseudoIndexSum = 0;
int length = 0;
int curIndex = -1;
DecimalColumnStatsDataInspector aggregateData = null;
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
String partName = csp.getPartName();
DecimalColumnStatsDataInspector newData = decimalInspectorFromStats(cso);
// newData.isSetBitVectors() should be true for sure because we
// already checked it before.
if (indexMap.get(partName) != curIndex) {
// There is bitvector, but it is not adjacent to the previous ones.
if (length > 0) {
// we have to set ndv
adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length);
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
ColumnStatisticsData csd = new ColumnStatisticsData();
csd.setDecimalStats(aggregateData);
adjustedStatsMap.put(pseudoPartName.toString(), csd);
if (useDensityFunctionForNDVEstimation) {
densityAvgSum += (MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) - MetaStoreServerUtils
.decimalToDouble(aggregateData.getLowValue())) / aggregateData.getNumDVs();
}
// reset everything
pseudoPartName = new StringBuilder();
pseudoIndexSum = 0;
length = 0;
ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator);
}
aggregateData = null;
}
curIndex = indexMap.get(partName);
pseudoPartName.append(partName);
pseudoIndexSum += curIndex;
length++;
curIndex++;
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
if (MetaStoreServerUtils.decimalToDouble(aggregateData.getLowValue()) < MetaStoreServerUtils
.decimalToDouble(newData.getLowValue())) {
aggregateData.setLowValue(aggregateData.getLowValue());
} else {
aggregateData.setLowValue(newData.getLowValue());
}
if (MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) > MetaStoreServerUtils
.decimalToDouble(newData.getHighValue())) {
aggregateData.setHighValue(aggregateData.getHighValue());
} else {
aggregateData.setHighValue(newData.getHighValue());
}
aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls());
}
ndvEstimator.mergeEstimators(newData.getNdvEstimator());
}
if (length > 0) {
// we have to set ndv
adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length);
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
ColumnStatisticsData csd = new ColumnStatisticsData();
csd.setDecimalStats(aggregateData);
adjustedStatsMap.put(pseudoPartName.toString(), csd);
if (useDensityFunctionForNDVEstimation) {
densityAvgSum += (MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) - MetaStoreServerUtils
.decimalToDouble(aggregateData.getLowValue())) / aggregateData.getNumDVs();
}
}
}
extrapolate(columnStatisticsData, partNames.size(), colStatsWithSourceInfo.size(),
adjustedIndexMap, adjustedStatsMap, densityAvgSum / adjustedStatsMap.size());
}
LOG.debug(
"Ndv estimation for {} is {}. # of partitions requested: {}. # of partitions found: {}",
colName, columnStatisticsData.getDecimalStats().getNumDVs(), partNames.size(),
colStatsWithSourceInfo.size());
KllHistogramEstimator mergedKllHistogramEstimator = mergeHistograms(colStatsWithSourceInfo);
if (mergedKllHistogramEstimator != null) {
columnStatisticsData.getDecimalStats().setHistogram(mergedKllHistogramEstimator.serialize());
}
statsObj.setStatsData(columnStatisticsData);
return statsObj;
} | @Test
public void testAggregateMultipleStatsWhenSomeNullValues() throws MetaException {
List<String> partitions = Arrays.asList("part1", "part2");
ColumnStatisticsData data1 = new ColStatsBuilder<>(Decimal.class).numNulls(1).numDVs(2)
.low(ONE).high(TWO).hll(1, 2).kll(1, 2).build();
ColumnStatisticsData data2 = new ColStatsBuilder<>(Decimal.class).numNulls(2).numDVs(3).build();
List<ColStatsObjWithSourceInfo> statsList = Arrays.asList(
createStatsWithInfo(data1, TABLE, COL, partitions.get(0)),
createStatsWithInfo(data2, TABLE, COL, partitions.get(1)));
DecimalColumnStatsAggregator aggregator = new DecimalColumnStatsAggregator();
ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, true);
ColumnStatisticsData expectedStats = new ColStatsBuilder<>(Decimal.class).numNulls(3).numDVs(3)
.low(ONE).high(TWO).hll(1, 2).kll(1, 2).build();
assertEqualStatistics(expectedStats, computedStatsObj.getStatsData());
aggregator.useDensityFunctionForNDVEstimation = true;
computedStatsObj = aggregator.aggregate(statsList, partitions, true);
expectedStats = new ColStatsBuilder<>(Decimal.class).numNulls(3).numDVs(4)
.low(ONE).high(TWO).hll(1, 2).kll(1, 2).build();
assertEqualStatistics(expectedStats, computedStatsObj.getStatsData());
aggregator.useDensityFunctionForNDVEstimation = false;
aggregator.ndvTuner = 1;
computedStatsObj = aggregator.aggregate(statsList, partitions, true);
expectedStats = new ColStatsBuilder<>(Decimal.class).numNulls(3).numDVs(5)
.low(ONE).high(TWO).hll(1, 2).kll(1, 2).build();
assertEqualStatistics(expectedStats, computedStatsObj.getStatsData());
} |
@Override
public Integer getInteger(final int columnIndex) {
return values.getInteger(columnIndex - 1);
} | @Test
public void shouldGetInt() {
assertThat(row.getInteger("f_int"), is(2));
} |
@Override
public String toString() {
StringBuilder b = new StringBuilder();
if (StringUtils.isNotBlank(protocol)) {
b.append(protocol);
b.append("://");
}
if (StringUtils.isNotBlank(host)) {
b.append(host);
}
if (!isPortDefault() && port != -1) {
b.append(':');
b.append(port);
}
if (StringUtils.isNotBlank(path)) {
// If no scheme/host/port, leave the path as is
if (b.length() > 0 && !path.startsWith("/")) {
b.append('/');
}
b.append(encodePath(path));
}
if (queryString != null && !queryString.isEmpty()) {
b.append(queryString.toString());
}
if (fragment != null) {
b.append("#");
b.append(encodePath(fragment));
}
return b.toString();
} | @Test
public void testNonHttpProtocolWithPort() {
s = "ftp://ftp.example.com:20/dir";
t = "ftp://ftp.example.com:20/dir";
assertEquals(t, new HttpURL(s).toString());
} |
public static <T> T copyProperties(Object source, Class<T> tClass, String... ignoreProperties) {
if (null == source) {
return null;
}
T target = ReflectUtil.newInstanceIfPossible(tClass);
copyProperties(source, target, CopyOptions.create().setIgnoreProperties(ignoreProperties));
return target;
} | @Test
public void copyBeanPropertiesFunctionFilterTest() {
//https://gitee.com/dromara/hutool/pulls/590
final Person o = new Person();
o.setName("asd");
o.setAge(123);
o.setOpenid("asd");
@SuppressWarnings("unchecked") final CopyOptions copyOptions = CopyOptions.create().setIgnoreProperties(Person::getAge, Person::getOpenid);
final Person n = new Person();
BeanUtil.copyProperties(o, n, copyOptions);
// 是否忽略拷贝属性
assertNotEquals(o.getAge(), n.getAge());
assertNotEquals(o.getOpenid(), n.getOpenid());
} |
public TypeSerializer<T> getElementSerializer() {
// call getSerializer() here to get the initialization check and proper error message
final TypeSerializer<List<T>> rawSerializer = getSerializer();
if (!(rawSerializer instanceof ListSerializer)) {
throw new IllegalStateException();
}
return ((ListSerializer<T>) rawSerializer).getElementSerializer();
} | @Test
void testSerializerDuplication() {
// we need a serializer that actually duplicates for testing (a stateful one)
// we use Kryo here, because it meets these conditions
TypeSerializer<String> statefulSerializer =
new KryoSerializer<>(String.class, new SerializerConfigImpl());
ListStateDescriptor<String> descr = new ListStateDescriptor<>("foobar", statefulSerializer);
TypeSerializer<String> serializerA = descr.getElementSerializer();
TypeSerializer<String> serializerB = descr.getElementSerializer();
// check that the retrieved serializers are not the same
assertThat(serializerB).isNotSameAs(serializerA);
TypeSerializer<List<String>> listSerializerA = descr.getSerializer();
TypeSerializer<List<String>> listSerializerB = descr.getSerializer();
assertThat(listSerializerB).isNotSameAs(listSerializerA);
} |
public static <T> Iterator<T> skipFirst(Iterator<T> iterator, @Nonnull Predicate<? super T> predicate) {
checkNotNull(iterator, "iterator cannot be null.");
while (iterator.hasNext()) {
T object = iterator.next();
if (!predicate.test(object)) {
continue;
}
return prepend(object, iterator);
}
return iterator;
} | @Test
public void skipFirstAll() {
var list = List.of(1, 2, 3, 4, 5, 6);
var actual = IterableUtil.skipFirst(list.iterator(), v -> v > 0);
assertIteratorsEquals(list, actual);
} |
void concatBlocks(INodeFile[] inodes, BlockManager bm) {
int size = this.blocks.length;
int totalAddedBlocks = 0;
for(INodeFile f : inodes) {
Preconditions.checkState(f.isStriped() == this.isStriped());
totalAddedBlocks += f.blocks.length;
}
BlockInfo[] newlist =
new BlockInfo[size + totalAddedBlocks];
System.arraycopy(this.blocks, 0, newlist, 0, size);
for(INodeFile in: inodes) {
System.arraycopy(in.blocks, 0, newlist, size, in.blocks.length);
size += in.blocks.length;
}
setBlocks(newlist);
for(BlockInfo b : blocks) {
b.setBlockCollectionId(getId());
short oldRepl = b.getReplication();
short repl = getPreferredBlockReplication();
if (oldRepl != repl) {
bm.setReplication(oldRepl, repl, b);
}
}
} | @Test
public void testConcatBlocks() {
INodeFile origFile = createINodeFiles(1, "origfile")[0];
assertEquals("Number of blocks didn't match", origFile.numBlocks(), 1L);
INodeFile[] appendFiles = createINodeFiles(4, "appendfile");
BlockManager bm = Mockito.mock(BlockManager.class);
origFile.concatBlocks(appendFiles, bm);
assertEquals("Number of blocks didn't match", origFile.numBlocks(), 5L);
} |
@Override
public void check(Thread currentThread) throws CeTaskInterruptedException {
super.check(currentThread);
computeTimeOutOf(taskOf(currentThread))
.ifPresent(timeout -> {
throw new CeTaskTimeoutException(format("Execution of task timed out after %s ms", timeout));
});
} | @Test
public void check_fails_with_ISE_if_thread_is_not_running_a_CeWorker_with_no_current_CeTask() {
Thread t = newThreadWithRandomName();
mockWorkerOnThread(t, ceWorker);
assertThatThrownBy(() -> underTest.check(t))
.isInstanceOf(IllegalStateException.class)
.hasMessage("Could not find the CeTask being executed in thread '" + t.getName() + "'");
} |
@VisibleForTesting
static void instantiateHeapMemoryMetrics(final MetricGroup metricGroup) {
instantiateMemoryUsageMetrics(
metricGroup, () -> ManagementFactory.getMemoryMXBean().getHeapMemoryUsage());
} | @Test
void testHeapMetricsCompleteness() {
final InterceptingOperatorMetricGroup heapMetrics = new InterceptingOperatorMetricGroup();
MetricUtils.instantiateHeapMemoryMetrics(heapMetrics);
assertThat(heapMetrics.get(MetricNames.MEMORY_USED)).isNotNull();
assertThat(heapMetrics.get(MetricNames.MEMORY_COMMITTED)).isNotNull();
assertThat(heapMetrics.get(MetricNames.MEMORY_MAX)).isNotNull();
} |
public static SchemaAndValue parseString(String value) {
if (value == null) {
return NULL_SCHEMA_AND_VALUE;
}
if (value.isEmpty()) {
return new SchemaAndValue(Schema.STRING_SCHEMA, value);
}
ValueParser parser = new ValueParser(new Parser(value));
return parser.parse(false);
} | @Test
public void shouldParseTimeStringAsDate() throws Exception {
String str = "14:34:54.346Z";
SchemaAndValue result = Values.parseString(str);
assertEquals(Type.INT32, result.schema().type());
assertEquals(Time.LOGICAL_NAME, result.schema().name());
java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIME_FORMAT_PATTERN).parse(str);
assertEquals(expected, result.value());
} |
@Override
public Object getObject(final int columnIndex) throws SQLException {
return mergeResultSet.getValue(columnIndex, Object.class);
} | @Test
void assertGetObjectWithByteArray() throws SQLException {
byte[] result = new byte[0];
when(mergeResultSet.getValue(1, byte[].class)).thenReturn(result);
assertThat(shardingSphereResultSet.getObject(1, byte[].class), is(result));
} |
static Builder builder() {
return new AutoValue_CsvIOParseError.Builder();
} | @Test
public void usableInMultiOutput() {
List<CsvIOParseError> want =
Arrays.asList(
CsvIOParseError.builder()
.setMessage("error message")
.setObservedTimestamp(Instant.now())
.setStackTrace("stack trace")
.build(),
CsvIOParseError.builder()
.setMessage("error message")
.setObservedTimestamp(Instant.now())
.setStackTrace("stack trace")
.setFilename("filename")
.setCsvRecord("csv record")
.build());
TupleTag<CsvIOParseError> errorTag = new TupleTag<CsvIOParseError>() {};
TupleTag<String> anotherTag = new TupleTag<String>() {};
PCollection<CsvIOParseError> errors = pipeline.apply("createWant", Create.of(want));
PCollection<String> anotherPCol = pipeline.apply("createAnother", Create.of("a", "b", "c"));
PCollectionTuple pct = PCollectionTuple.of(errorTag, errors).and(anotherTag, anotherPCol);
PAssert.that(pct.get(errorTag)).containsInAnyOrder(want);
pipeline.run();
} |
@Override
public <T extends Recorder> RetryContext<T> context() {
return new RetryContextImpl<>();
} | @Test
public void contextResultTest() {
final Retry retry = Retry.create(build("context", true));
final RetryContext<Recorder> context = retry.context();
final Recorder recorder = Mockito.mock(Recorder.class);
final boolean result = context.onResult(recorder, new Object(), 100L);
Assert.assertTrue(result);
} |
public Map<String, TableMeta> loadTableMeta(List<String> tables) throws DataXException {
Map<String, TableMeta> tableMetas = new HashMap();
try (Statement stmt = conn.createStatement()) {
ResultSet rs = stmt.executeQuery("show stables");
while (rs.next()) {
TableMeta tableMeta = buildSupTableMeta(rs);
if (!tables.contains(tableMeta.tbname))
continue;
tableMetas.put(tableMeta.tbname, tableMeta);
}
rs = stmt.executeQuery("show tables");
while (rs.next()) {
TableMeta tableMeta = buildSubTableMeta(rs);
if (!tables.contains(tableMeta.tbname))
continue;
tableMetas.put(tableMeta.tbname, tableMeta);
}
for (String tbname : tables) {
if (!tableMetas.containsKey(tbname)) {
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, "table metadata of " + tbname + " is empty!");
}
}
} catch (SQLException e) {
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e.getMessage());
}
return tableMetas;
} | @Test
public void loadTableMeta() throws SQLException {
// given
SchemaManager schemaManager = new SchemaManager(conn);
List<String> tables = Arrays.asList("stb1", "stb2", "tb1", "tb3", "weather");
// when
Map<String, TableMeta> tableMetaMap = schemaManager.loadTableMeta(tables);
// then
TableMeta stb1 = tableMetaMap.get("stb1");
Assert.assertEquals(TableType.SUP_TABLE, stb1.tableType);
Assert.assertEquals("stb1", stb1.tbname);
Assert.assertEquals(3, stb1.columns);
Assert.assertEquals(1, stb1.tags);
Assert.assertEquals(2, stb1.tables);
TableMeta tb3 = tableMetaMap.get("tb3");
Assert.assertEquals(TableType.SUB_TABLE, tb3.tableType);
Assert.assertEquals("tb3", tb3.tbname);
Assert.assertEquals(4, tb3.columns);
Assert.assertEquals("stb2", tb3.stable_name);
TableMeta weather = tableMetaMap.get("weather");
Assert.assertEquals(TableType.NML_TABLE, weather.tableType);
Assert.assertEquals("weather", weather.tbname);
Assert.assertEquals(6, weather.columns);
Assert.assertNull(weather.stable_name);
} |
public DdlCommandResult execute(
final String sql,
final DdlCommand ddlCommand,
final boolean withQuery,
final Set<SourceName> withQuerySources
) {
return execute(sql, ddlCommand, withQuery, withQuerySources, false);
} | @Test
public void shouldAddNormalTableWhenNoTypeIsSpecified() {
// Given:
final CreateTableCommand cmd = buildCreateTable(
SourceName.of("t1"),
false,
null
);
// When:
cmdExec.execute(SQL_TEXT, cmd, true, NO_QUERY_SOURCES);
// Then:
final KsqlTable ksqlTable = (KsqlTable) metaStore.getSource(SourceName.of("t1"));
assertThat(ksqlTable.isSource(), is(false));
} |
public static IndicesBlockStatus parseBlockSettings(final GetSettingsResponse settingsResponse) {
IndicesBlockStatus result = new IndicesBlockStatus();
final var indexToSettingsMap = settingsResponse.getIndexToSettings();
final String[] indicesInResponse = indexToSettingsMap.keySet().toArray(new String[0]);
for (String index : indicesInResponse) {
final Settings blockSettings = indexToSettingsMap.get(index).getByPrefix(BLOCK_SETTINGS_PREFIX);
if (!blockSettings.isEmpty()) {
final Set<String> blockSettingsNames = blockSettings.names();
final Set<String> blockSettingsSetToTrue = blockSettingsNames.stream()
.filter(s -> blockSettings.getAsBoolean(s, false))
.map(s -> BLOCK_SETTINGS_PREFIX + s)
.collect(Collectors.toSet());
if (!blockSettingsSetToTrue.isEmpty()) {
result.addIndexBlocks(index, blockSettingsSetToTrue);
}
}
}
return result;
} | @Test
public void noBlockedIndicesIdentifiedIfEmptySettingsPresent() {
var settingsBuilder = Map.of("index_0", Settings.builder().build());
GetSettingsResponse emptySettingsResponse = new GetSettingsResponse(settingsBuilder, Map.of());
final IndicesBlockStatus indicesBlockStatus = BlockSettingsParser.parseBlockSettings(emptySettingsResponse);
assertNotNull(indicesBlockStatus);
assertEquals(0, indicesBlockStatus.countBlockedIndices());
} |
public static SQLParser newInstance(final String sql, final Class<? extends SQLLexer> lexerClass, final Class<? extends SQLParser> parserClass) {
return createSQLParser(createTokenStream(sql, lexerClass), parserClass);
} | @Test
void assertNewInstance() {
assertThat(SQLParserFactory.newInstance(SQL, mock(LexerFixture.class).getClass(), mock(ParserFixture.class).getClass()), instanceOf(ParserFixture.class));
} |
static CastExpr getCategoricalPredictorExpression(final String categoricalPredictorMapName) {
final String lambdaExpressionMethodName = "evaluateCategoricalPredictor";
final String parameterName = "input";
final MethodCallExpr lambdaMethodCallExpr = new MethodCallExpr();
lambdaMethodCallExpr.setName(lambdaExpressionMethodName);
final NodeList<Expression> arguments = new NodeList<>();
arguments.add(0, new NameExpr(parameterName));
arguments.add(1, new NameExpr(categoricalPredictorMapName));
lambdaMethodCallExpr.setArguments(arguments);
final ExpressionStmt lambdaExpressionStmt = new ExpressionStmt(lambdaMethodCallExpr);
final LambdaExpr lambdaExpr = new LambdaExpr();
final Parameter lambdaParameter = new Parameter(new UnknownType(), parameterName);
lambdaExpr.setParameters(NodeList.nodeList(lambdaParameter));
lambdaExpr.setBody(lambdaExpressionStmt);
lambdaMethodCallExpr.setScope(new NameExpr(KiePMMLRegressionTable.class.getSimpleName()));
final ClassOrInterfaceType serializableFunctionType =
getTypedClassOrInterfaceTypeByTypeNames(SerializableFunction.class.getCanonicalName(),
Arrays.asList(String.class.getSimpleName(),
Double.class.getSimpleName()));
final CastExpr toReturn = new CastExpr();
toReturn.setType(serializableFunctionType);
toReturn.setExpression(lambdaExpr);
return toReturn;
} | @Test
void getCategoricalPredictorExpression() throws IOException {
final String categoricalPredictorMapName = "categoricalPredictorMapName";
CastExpr retrieved =
KiePMMLRegressionTableFactory.getCategoricalPredictorExpression(categoricalPredictorMapName);
String text = getFileContent(TEST_05_SOURCE);
Expression expected = JavaParserUtils.parseExpression(String.format(text, categoricalPredictorMapName));
assertThat(retrieved).isEqualTo(expected);
} |
public String getLegacyColumnName( DatabaseMetaData dbMetaData, ResultSetMetaData rsMetaData, int index ) throws KettleDatabaseException {
if ( dbMetaData == null ) {
throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoDBMetaDataException" ) );
}
if ( rsMetaData == null ) {
throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoRSMetaDataException" ) );
}
try {
return dbMetaData.getDriverMajorVersion() > 3 ? rsMetaData.getColumnLabel( index ) : rsMetaData.getColumnName( index );
} catch ( Exception e ) {
throw new KettleDatabaseException( String.format( "%s: %s", BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameException" ), e.getMessage() ), e );
}
} | @Test( expected = KettleDatabaseException.class )
public void testGetLegacyColumnNameDriverGreaterThanThreeException() throws Exception {
DatabaseMetaData databaseMetaData = mock( DatabaseMetaData.class );
doReturn( 5 ).when( databaseMetaData ).getDriverMajorVersion();
new MySQLDatabaseMeta().getLegacyColumnName( databaseMetaData, getResultSetMetaDataException(), 1 );
} |
public final StringSubject hasMessageThat() {
StandardSubjectBuilder check = check("getMessage()");
if (actual instanceof ErrorWithFacts && ((ErrorWithFacts) actual).facts().size() > 1) {
check =
check.withMessage(
"(Note from Truth: When possible, instead of asserting on the full message, assert"
+ " about individual facts by using ExpectFailure.assertThat.)");
}
return check.that(checkNotNull(actual).getMessage());
} | @Test
public void hasMessageThat_failure() {
NullPointerException actual = new NullPointerException("message");
expectFailureWhenTestingThat(actual).hasMessageThat().isEqualTo("foobar");
assertFailureValue("value of", "throwable.getMessage()");
assertErrorHasActualAsCause(actual, expectFailure.getFailure());
} |
public static String get(@NonNull SymbolRequest request) {
String name = request.getName();
String title = request.getTitle();
String tooltip = request.getTooltip();
String htmlTooltip = request.getHtmlTooltip();
String classes = request.getClasses();
String pluginName = request.getPluginName();
String id = request.getId();
String identifier = (pluginName == null || pluginName.isBlank()) ? "core" : pluginName;
String symbol = SYMBOLS
.computeIfAbsent(identifier, key -> new ConcurrentHashMap<>())
.computeIfAbsent(name, key -> loadSymbol(identifier, key));
if ((tooltip != null && !tooltip.isBlank()) && (htmlTooltip == null || htmlTooltip.isBlank())) {
symbol = symbol.replaceAll("<svg", Matcher.quoteReplacement("<svg tooltip=\"" + Functions.htmlAttributeEscape(tooltip) + "\""));
}
if (htmlTooltip != null && !htmlTooltip.isBlank()) {
symbol = symbol.replaceAll("<svg", Matcher.quoteReplacement("<svg data-html-tooltip=\"" + Functions.htmlAttributeEscape(htmlTooltip) + "\""));
}
if (id != null && !id.isBlank()) {
symbol = symbol.replaceAll("<svg", Matcher.quoteReplacement("<svg id=\"" + Functions.htmlAttributeEscape(id) + "\""));
}
if (classes != null && !classes.isBlank()) {
symbol = symbol.replaceAll("<svg", "<svg class=\"" + Functions.htmlAttributeEscape(classes) + "\"");
}
if (title != null && !title.isBlank()) {
symbol = "<span class=\"jenkins-visually-hidden\">" + Util.xmlEscape(title) + "</span>" + symbol;
}
return symbol;
} | @Test
@DisplayName("HTML tooltip overrides tooltip")
void htmlTooltipOverridesTooltip() {
String symbol = Symbol.get(new SymbolRequest.Builder()
.withName("science")
.withTooltip("Tooltip")
.withHtmlTooltip("<p>Some HTML Tooltip</p>")
.build()
);
assertThat(symbol, containsString(SCIENCE_PATH));
assertThat(symbol, not(containsString("tooltip=\"Tooltip\"")));
assertThat(symbol, containsString("data-html-tooltip=\"<p>Some HTML Tooltip</p>\""));
} |
public static String getTypeName(final int type) {
switch (type) {
case START_EVENT_V3:
return "Start_v3";
case STOP_EVENT:
return "Stop";
case QUERY_EVENT:
return "Query";
case ROTATE_EVENT:
return "Rotate";
case INTVAR_EVENT:
return "Intvar";
case LOAD_EVENT:
return "Load";
case NEW_LOAD_EVENT:
return "New_load";
case SLAVE_EVENT:
return "Slave";
case CREATE_FILE_EVENT:
return "Create_file";
case APPEND_BLOCK_EVENT:
return "Append_block";
case DELETE_FILE_EVENT:
return "Delete_file";
case EXEC_LOAD_EVENT:
return "Exec_load";
case RAND_EVENT:
return "RAND";
case XID_EVENT:
return "Xid";
case USER_VAR_EVENT:
return "User var";
case FORMAT_DESCRIPTION_EVENT:
return "Format_desc";
case TABLE_MAP_EVENT:
return "Table_map";
case PRE_GA_WRITE_ROWS_EVENT:
return "Write_rows_event_old";
case PRE_GA_UPDATE_ROWS_EVENT:
return "Update_rows_event_old";
case PRE_GA_DELETE_ROWS_EVENT:
return "Delete_rows_event_old";
case WRITE_ROWS_EVENT_V1:
return "Write_rows_v1";
case UPDATE_ROWS_EVENT_V1:
return "Update_rows_v1";
case DELETE_ROWS_EVENT_V1:
return "Delete_rows_v1";
case BEGIN_LOAD_QUERY_EVENT:
return "Begin_load_query";
case EXECUTE_LOAD_QUERY_EVENT:
return "Execute_load_query";
case INCIDENT_EVENT:
return "Incident";
case HEARTBEAT_LOG_EVENT:
case HEARTBEAT_LOG_EVENT_V2:
return "Heartbeat";
case IGNORABLE_LOG_EVENT:
return "Ignorable";
case ROWS_QUERY_LOG_EVENT:
return "Rows_query";
case WRITE_ROWS_EVENT:
return "Write_rows";
case UPDATE_ROWS_EVENT:
return "Update_rows";
case DELETE_ROWS_EVENT:
return "Delete_rows";
case GTID_LOG_EVENT:
return "Gtid";
case ANONYMOUS_GTID_LOG_EVENT:
return "Anonymous_Gtid";
case PREVIOUS_GTIDS_LOG_EVENT:
return "Previous_gtids";
case PARTIAL_UPDATE_ROWS_EVENT:
return "Update_rows_partial";
case TRANSACTION_CONTEXT_EVENT :
return "Transaction_context";
case VIEW_CHANGE_EVENT :
return "view_change";
case XA_PREPARE_LOG_EVENT :
return "Xa_prepare";
case TRANSACTION_PAYLOAD_EVENT :
return "transaction_payload";
default:
return "Unknown type:" + type;
}
} | @Test
public void getTypeNameInputPositiveOutputNotNull34() {
// Arrange
final int type = 9;
// Act
final String actual = LogEvent.getTypeName(type);
// Assert result
Assert.assertEquals("Append_block", actual);
} |
@CanIgnoreReturnValue
public final Ordered containsAtLeast(
@Nullable Object firstExpected,
@Nullable Object secondExpected,
@Nullable Object @Nullable ... restOfExpected) {
return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected));
} | @Test
public void iterableContainsAtLeastFailsWithSameToStringAndHomogeneousListWithNull() {
expectFailureWhenTestingThat(asList("null", "abc")).containsAtLeast("abc", null);
assertFailureValue("missing (1)", "null (null type)");
assertFailureValue("though it did contain (1)", "null (java.lang.String)");
} |
public static <T> T parseObject(String text, Class<T> clazz) {
if (StringUtil.isBlank(text)) {
return null;
}
return JSON_FACADE.parseObject(text, clazz);
} | @Test
public void assertParseObjectTypeReference() {
Assert.assertNull(JSONUtil.parseObject(null, new TypeReference<List<Foo>>() {
}));
Assert.assertNull(JSONUtil.parseObject(" ", new TypeReference<List<Foo>>() {
}));
Assert.assertEquals(
EXPECTED_FOO_ARRAY,
JSONUtil.parseObject(EXPECTED_FOO_JSON_ARRAY, new TypeReference<List<Foo>>() {
}));
} |
@Override
@MethodNotAvailable
public LocalMapStats getLocalMapStats() {
throw new MethodNotAvailableException();
} | @Test(expected = MethodNotAvailableException.class)
public void testGetLocalMapStats() {
adapter.getLocalMapStats();
} |
public static int nextPowerOfTwo(final int value) {
return 1 << (32 - Integer.numberOfLeadingZeros(value - 1));
} | @Test
public void testNextLongPowerOfTwo() {
assertEquals(1L, QuickMath.nextPowerOfTwo(-9999999L));
assertEquals(1L, QuickMath.nextPowerOfTwo(-1L));
assertEquals(1L, QuickMath.nextPowerOfTwo(0L));
assertEquals(1L, QuickMath.nextPowerOfTwo(1L));
assertEquals(2L, QuickMath.nextPowerOfTwo(2L));
assertEquals(4L, QuickMath.nextPowerOfTwo(3L));
assertEquals(1L << 62, QuickMath.nextPowerOfTwo((1L << 61) + 1));
} |
@VisibleForTesting
public static JobGraph createJobGraph(StreamGraph streamGraph) {
return new StreamingJobGraphGenerator(
Thread.currentThread().getContextClassLoader(),
streamGraph,
null,
Runnable::run)
.createJobGraph();
} | @Test
void testDefaultJobType() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamGraph streamGraph =
new StreamGraphGenerator(
Collections.emptyList(), env.getConfig(), env.getCheckpointConfig())
.generate();
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph);
assertThat(jobGraph.getJobType()).isEqualTo(JobType.STREAMING);
} |
@Override
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final EncryptRule rule) {
SQLStatement sqlStatement = sqlStatementContext.getSqlStatement();
if (sqlStatement instanceof MySQLExplainStatement || sqlStatement instanceof MySQLShowColumnsStatement) {
return new MergedEncryptShowColumnsMergedResult(queryResult, sqlStatementContext, rule);
}
if (sqlStatement instanceof MySQLShowCreateTableStatement) {
return new MergedEncryptShowCreateTableMergedResult(globalRuleMetaData, queryResult, sqlStatementContext, rule);
}
return new TransparentMergedResult(queryResult);
} | @Test
void assertMergedResultWithShowCreateTableStatement() {
sqlStatementContext = getShowCreateTableStatementContext();
RuleMetaData ruleMetaData = mock(RuleMetaData.class);
when(ruleMetaData.getSingleRule(SQLParserRule.class)).thenReturn(mock(SQLParserRule.class));
EncryptDALResultDecorator encryptDALResultDecorator = new EncryptDALResultDecorator(ruleMetaData);
assertThat(encryptDALResultDecorator.decorate(mock(QueryResult.class), sqlStatementContext, rule), instanceOf(MergedEncryptShowCreateTableMergedResult.class));
assertThat(encryptDALResultDecorator.decorate(mock(MergedResult.class), sqlStatementContext, rule), instanceOf(DecoratedEncryptShowCreateTableMergedResult.class));
} |
@Override
public DdlCommand create(
final String sqlExpression,
final DdlStatement ddlStatement,
final SessionConfig config
) {
return FACTORIES
.getOrDefault(ddlStatement.getClass(), (statement, cf, ci) -> {
throw new KsqlException(
"Unable to find ddl command factory for statement:"
+ statement.getClass()
+ " valid statements:"
+ FACTORIES.keySet()
);
})
.handle(
this,
new CallInfo(sqlExpression, config),
ddlStatement);
} | @Test
public void shouldCreateCommandForAlterSource() {
// Given:
final AlterSource ddlStatement = new AlterSource(SOME_NAME, DataSourceType.KSTREAM, new ArrayList<>());
// When:
final DdlCommand result = commandFactories
.create(sqlExpression, ddlStatement, SessionConfig.of(ksqlConfig, emptyMap()));
// Then:
assertThat(result, is(alterSourceCommand));
verify(alterSourceFactory).create(ddlStatement);
} |
@Override
public boolean test(Pickle pickle) {
URI picklePath = pickle.getUri();
if (!lineFilters.containsKey(picklePath)) {
return true;
}
for (Integer line : lineFilters.get(picklePath)) {
if (Objects.equals(line, pickle.getLocation().getLine())
|| Objects.equals(line, pickle.getScenarioLocation().getLine())
|| pickle.getExamplesLocation().map(Location::getLine).map(line::equals).orElse(false)
|| pickle.getRuleLocation().map(Location::getLine).map(line::equals).orElse(false)
|| pickle.getFeatureLocation().map(Location::getLine).map(line::equals).orElse(false)) {
return true;
}
}
return false;
} | @Test
void matches_pickles_from_files_not_in_the_predicate_map() {
// the argument "path/file.feature another_path/file.feature:8"
// results in only line predicates only for another_path/file.feature,
// but all pickles from path/file.feature shall also be executed.
LinePredicate predicate = new LinePredicate(singletonMap(
URI.create("classpath:another_path/file.feature"),
singletonList(8)));
assertTrue(predicate.test(firstPickle));
} |
public static <T extends TypedSPI> T getService(final Class<T> serviceInterface, final Object type) {
return getService(serviceInterface, type, new Properties());
} | @Test
void assertGetServiceWhenTypeIsNotExist() {
assertThrows(ServiceProviderNotFoundException.class, () -> TypedSPILoader.getService(TypedSPIFixture.class, "NOT_EXISTED"));
} |
private HikariDataSource createHikariDataSource() {
HikariConfig config = new HikariConfig();
config.setJdbcUrl(getJdbcUrl());
config.setUsername(properties.get(JDBCResource.USER));
config.setPassword(properties.get(JDBCResource.PASSWORD));
config.setDriverClassName(getDriverName());
config.setMaximumPoolSize(Config.jdbc_connection_pool_size);
config.setMinimumIdle(Config.jdbc_minimum_idle_connections);
config.setIdleTimeout(Config.jdbc_connection_idle_timeout_ms);
return new HikariDataSource(config);
} | @Test
public void testCreateHikariDataSource() {
properties = new HashMap<>();
properties.put(DRIVER_CLASS, "org.mariadb.jdbc.Driver");
properties.put(JDBCResource.URI, "jdbc:mariadb://127.0.0.1:3306");
properties.put(JDBCResource.USER, "root");
properties.put(JDBCResource.PASSWORD, "123456");
properties.put(JDBCResource.CHECK_SUM, "xxxx");
properties.put(JDBCResource.DRIVER_URL, "xxxx");
new JDBCMetadata(properties, "catalog");
} |
public static byte[] inputStream2Bytes(InputStream is) {
if (is == null) {
return null;
}
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int i;
while ((i = is.read()) != -1) {
baos.write(i);
}
return baos.toByteArray();
} catch (Exception e) {
throw new ShouldNeverHappenException(e);
}
} | @Test
void inputStream2Bytes() {
assertNull(StringUtils.inputStream2Bytes(null));
String data = "abc\n"
+ ":\"klsdf\n"
+ "2ks,x:\".,-3sd˚ø≤ø¬≥";
byte[] bs = data.getBytes(Constants.DEFAULT_CHARSET);
ByteArrayInputStream inputStream = new ByteArrayInputStream(data.getBytes(Constants.DEFAULT_CHARSET));
assertThat(StringUtils.inputStream2Bytes(inputStream)).isEqualTo(bs);
} |
public Node parse() throws ScanException {
if (tokenList == null || tokenList.isEmpty())
return null;
return E();
} | @Test
public void literalVariableLiteral() throws ScanException {
Tokenizer tokenizer = new Tokenizer("a${b}c");
Parser parser = new Parser(tokenizer.tokenize());
Node node = parser.parse();
Node witness = new Node(Node.Type.LITERAL, "a");
witness.next = new Node(Node.Type.VARIABLE, new Node(Node.Type.LITERAL, "b"));
witness.next.next = new Node(Node.Type.LITERAL, "c");
assertEquals(witness, node);
} |
@Override
public Num calculate(BarSeries series, Position position) {
if (position == null || position.getEntry() == null || position.getExit() == null) {
return series.zero();
}
CashFlow cashFlow = new CashFlow(series, position);
return calculateMaximumDrawdown(series, null, cashFlow);
} | @Test
public void calculateWithGainsAndLosses() {
MockBarSeries series = new MockBarSeries(numFunction, 1, 2, 3, 6, 5, 20, 3);
AnalysisCriterion mdd = getCriterion();
TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, series), Trade.sellAt(1, series),
Trade.buyAt(3, series), Trade.sellAt(4, series), Trade.buyAt(5, series), Trade.sellAt(6, series));
assertNumEquals(.875d, mdd.calculate(series, tradingRecord));
} |
public MetricsBuilder enableRegistry(Boolean enableRegistry) {
this.enableRegistry = enableRegistry;
return getThis();
} | @Test
void enableRegistry() {
MetricsBuilder builder = MetricsBuilder.newBuilder();
builder.enableRegistry(true);
Assertions.assertTrue(builder.build().getEnableRegistry());
} |
public static Table resolveCalciteTable(SchemaPlus schemaPlus, List<String> tablePath) {
Schema subSchema = schemaPlus;
// subSchema.getSubschema() for all except last
for (int i = 0; i < tablePath.size() - 1; i++) {
subSchema = subSchema.getSubSchema(tablePath.get(i));
if (subSchema == null) {
throw new IllegalStateException(
String.format(
"While resolving table path %s, no sub-schema found for component %s (\"%s\")",
tablePath, i, tablePath.get(i)));
}
}
// for the final one call getTable()
return subSchema.getTable(Iterables.getLast(tablePath));
} | @Test
public void testMissingFlat() {
String tableName = "fake_table";
when(mockSchemaPlus.getTable(tableName)).thenReturn(null);
Table table = TableResolution.resolveCalciteTable(mockSchemaPlus, ImmutableList.of(tableName));
assertThat(table, Matchers.nullValue());
} |
private Mono<ServerResponse> fetchThemeSetting(ServerRequest request) {
return themeNameInPathVariableOrActivated(request)
.flatMap(name -> client.fetch(Theme.class, name))
.mapNotNull(theme -> theme.getSpec().getSettingName())
.flatMap(settingName -> client.fetch(Setting.class, settingName))
.flatMap(setting -> ServerResponse.ok().bodyValue(setting));
} | @Test
void fetchThemeSetting() {
Theme theme = new Theme();
theme.setMetadata(new Metadata());
theme.getMetadata().setName("fake");
theme.setSpec(new Theme.ThemeSpec());
theme.getSpec().setSettingName("fake-setting");
when(client.fetch(eq(Setting.class), eq("fake-setting")))
.thenReturn(Mono.just(new Setting()));
when(client.fetch(eq(Theme.class), eq("fake"))).thenReturn(Mono.just(theme));
webTestClient.get()
.uri("/themes/fake/setting")
.exchange()
.expectStatus().isOk();
verify(client).fetch(eq(Setting.class), eq("fake-setting"));
verify(client).fetch(eq(Theme.class), eq("fake"));
} |
public synchronized void synchronizeClusterSchemas( ClusterSchema clusterSchema ) {
synchronizeClusterSchemas( clusterSchema, clusterSchema.getName() );
} | @Test
public void synchronizeClusterSchemas_use_case_sensitive_name() throws Exception {
TransMeta transformarion1 = createTransMeta();
ClusterSchema clusterSchema1 = createClusterSchema( "ClusterSchema", true );
transformarion1.setClusterSchemas( Collections.singletonList( clusterSchema1 ) );
spoonDelegates.trans.addTransformation( transformarion1 );
TransMeta transformarion2 = createTransMeta();
ClusterSchema clusterSchema2 = createClusterSchema( "Clusterschema", true );
transformarion2.setClusterSchemas( Collections.singletonList( clusterSchema2 ) );
spoonDelegates.trans.addTransformation( transformarion2 );
clusterSchema2.setDynamic( true );
sharedUtil.synchronizeClusterSchemas( clusterSchema2 );
assertThat( clusterSchema1.isDynamic(), equalTo( false ) );
} |
@JsonCreator
public Range(
@JsonProperty("low") Marker low,
@JsonProperty("high") Marker high)
{
this(low, high, () -> {
if (low.compareTo(high) > 0) {
throw new IllegalArgumentException("low must be less than or equal to high");
}
});
} | @Test
public void testRange()
{
Range range = Range.range(BIGINT, 0L, false, 2L, true);
assertEquals(range.getLow(), Marker.above(BIGINT, 0L));
assertEquals(range.getHigh(), Marker.exactly(BIGINT, 2L));
assertFalse(range.isSingleValue());
assertFalse(range.isAll());
assertEquals(range.getType(), BIGINT);
assertFalse(range.includes(Marker.lowerUnbounded(BIGINT)));
assertFalse(range.includes(Marker.exactly(BIGINT, 0L)));
assertTrue(range.includes(Marker.exactly(BIGINT, 1L)));
assertTrue(range.includes(Marker.exactly(BIGINT, 2L)));
assertFalse(range.includes(Marker.exactly(BIGINT, 3L)));
assertFalse(range.includes(Marker.upperUnbounded(BIGINT)));
} |
public static void sortMessages(Message[] messages, final SortTerm[] sortTerm) {
final List<SortTermWithDescending> sortTermsWithDescending = getSortTermsWithDescending(sortTerm);
sortMessages(messages, sortTermsWithDescending);
} | @Test
public void testSortMessages() throws Exception {
Message[] expected = new Message[] { MESSAGES[0], MESSAGES[1], MESSAGES[2] };
// Sort using all the terms. Message order should be the same no matter what term is used
for (SortTerm term : POSSIBLE_TERMS) {
Message[] actual = MESSAGES.clone();
MailSorter.sortMessages(actual, new SortTerm[] { term });
try {
assertArrayEquals(actual, expected);
} catch (Exception ex) {
throw new Exception("Term: " + term.toString(), ex);
}
}
} |
public Protocol forName(final String identifier) {
return this.forName(identifier, null);
} | @Test
public void testFindProtocolProviderMismatch() {
final TestProtocol dav_provider1 = new TestProtocol(Scheme.dav) {
@Override
public String getIdentifier() {
return "dav";
}
@Override
public String getProvider() {
return "default";
}
@Override
public boolean isBundled() {
return true;
}
};
final TestProtocol dav_provider2 = new TestProtocol(Scheme.dav) {
@Override
public String getIdentifier() {
return "dav";
}
@Override
public String getProvider() {
return "provider_2";
}
};
final ProtocolFactory f = new ProtocolFactory(new LinkedHashSet<>(Arrays.asList(dav_provider1, dav_provider2)));
assertEquals(dav_provider1, f.forName("dav"));
assertEquals(dav_provider1, f.forName("dav", "default"));
assertEquals(dav_provider1, f.forName("dav", "g"));
assertEquals(dav_provider2, f.forName("dav", "provider_2"));
} |
@Override
public void addMeasure(String metricKey, int value) {
Metric metric = metricRepository.getByKey(metricKey);
validateAddMeasure(metric);
measureRepository.add(internalComponent, metric, newMeasureBuilder().create(value));
} | @Test
public void add_double_measure_create_measure_of_type_double_with_right_value() {
MeasureComputerContextImpl underTest = newContext(PROJECT_REF, NCLOC_KEY, DOUBLE_METRIC_KEY);
underTest.addMeasure(DOUBLE_METRIC_KEY, 10d);
Optional<Measure> measure = measureRepository.getAddedRawMeasure(PROJECT_REF, DOUBLE_METRIC_KEY);
assertThat(measure).isPresent();
assertThat(measure.get().getDoubleValue()).isEqualTo(10d);
} |
static int toInteger(final JsonNode object) {
if (object instanceof NumericNode) {
return object.intValue();
}
if (object instanceof TextNode) {
try {
return Integer.parseInt(object.textValue());
} catch (final NumberFormatException e) {
throw failedStringCoercionException(SqlBaseType.INTEGER);
}
}
throw invalidConversionException(object, SqlBaseType.INTEGER);
} | @Test(expected = IllegalArgumentException.class)
public void shouldNotConvertIncorrectStringToInt() {
JsonSerdeUtils.toInteger(JsonNodeFactory.instance.textNode("1!"));
} |
Map<String, Object> getOriginals() {
return originalsWithPrefix("");
} | @Test
public void shouldReturnDifferentMapOnEachCallToOriginals() {
// Given:
final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder()
.putAll(MIN_VALID_CONFIGS)
.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "10")
.build()
);
final Map<String, Object> originals1 = config.getOriginals();
final Map<String, Object> originals2 = config.getOriginals();
// When:
originals1.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "99");
// Then:
assertThat(originals2.get(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG), is("10"));
} |
@Override
public void reset() {
this.size = 0;
this.val0 = 0;
finishCopy().reset();
} | @Test
public void testReset() {
final IntHashCounter intCounter = new AtomicIntHashCounter();
Assert.assertEquals(intCounter.size(), 0);
final int testTimes = 10240;
for (int i = 0; i < testTimes; i++) {
Assert.assertEquals(1, intCounter.addAndGet(i, 1));
}
Assert.assertEquals(testTimes, intCounter.size());
intCounter.reset();
for (int i = 1; i < testTimes; i++) {
Assert.assertEquals(0, intCounter.get(i));
}
Assert.assertEquals(intCounter.size(), 0);
for (int i = 0; i < testTimes; i++) {
Assert.assertEquals(i + 10, intCounter.addAndGet(i, i + 10));
}
Assert.assertEquals(testTimes, intCounter.size());
for (int i = 0; i < testTimes; i++) {
Assert.assertEquals(i + 10, intCounter.get(i));
}
Assert.assertEquals(testTimes, intCounter.size());
} |
@Override
public RemotingCommand processRequest(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
switch (request.getCode()) {
case RequestCode.UPDATE_AND_CREATE_TOPIC:
return this.updateAndCreateTopic(ctx, request);
case RequestCode.UPDATE_AND_CREATE_TOPIC_LIST:
return this.updateAndCreateTopicList(ctx, request);
case RequestCode.DELETE_TOPIC_IN_BROKER:
return this.deleteTopic(ctx, request);
case RequestCode.GET_ALL_TOPIC_CONFIG:
return this.getAllTopicConfig(ctx, request);
case RequestCode.GET_TIMER_CHECK_POINT:
return this.getTimerCheckPoint(ctx, request);
case RequestCode.GET_TIMER_METRICS:
return this.getTimerMetrics(ctx, request);
case RequestCode.UPDATE_BROKER_CONFIG:
return this.updateBrokerConfig(ctx, request);
case RequestCode.GET_BROKER_CONFIG:
return this.getBrokerConfig(ctx, request);
case RequestCode.UPDATE_COLD_DATA_FLOW_CTR_CONFIG:
return this.updateColdDataFlowCtrGroupConfig(ctx, request);
case RequestCode.REMOVE_COLD_DATA_FLOW_CTR_CONFIG:
return this.removeColdDataFlowCtrGroupConfig(ctx, request);
case RequestCode.GET_COLD_DATA_FLOW_CTR_INFO:
return this.getColdDataFlowCtrInfo(ctx);
case RequestCode.SET_COMMITLOG_READ_MODE:
return this.setCommitLogReadaheadMode(ctx, request);
case RequestCode.SEARCH_OFFSET_BY_TIMESTAMP:
return this.searchOffsetByTimestamp(ctx, request);
case RequestCode.GET_MAX_OFFSET:
return this.getMaxOffset(ctx, request);
case RequestCode.GET_MIN_OFFSET:
return this.getMinOffset(ctx, request);
case RequestCode.GET_EARLIEST_MSG_STORETIME:
return this.getEarliestMsgStoretime(ctx, request);
case RequestCode.GET_BROKER_RUNTIME_INFO:
return this.getBrokerRuntimeInfo(ctx, request);
case RequestCode.LOCK_BATCH_MQ:
return this.lockBatchMQ(ctx, request);
case RequestCode.UNLOCK_BATCH_MQ:
return this.unlockBatchMQ(ctx, request);
case RequestCode.UPDATE_AND_CREATE_SUBSCRIPTIONGROUP:
return this.updateAndCreateSubscriptionGroup(ctx, request);
case RequestCode.GET_ALL_SUBSCRIPTIONGROUP_CONFIG:
return this.getAllSubscriptionGroup(ctx, request);
case RequestCode.DELETE_SUBSCRIPTIONGROUP:
return this.deleteSubscriptionGroup(ctx, request);
case RequestCode.GET_TOPIC_STATS_INFO:
return this.getTopicStatsInfo(ctx, request);
case RequestCode.GET_CONSUMER_CONNECTION_LIST:
return this.getConsumerConnectionList(ctx, request);
case RequestCode.GET_PRODUCER_CONNECTION_LIST:
return this.getProducerConnectionList(ctx, request);
case RequestCode.GET_ALL_PRODUCER_INFO:
return this.getAllProducerInfo(ctx, request);
case RequestCode.GET_CONSUME_STATS:
return this.getConsumeStats(ctx, request);
case RequestCode.GET_ALL_CONSUMER_OFFSET:
return this.getAllConsumerOffset(ctx, request);
case RequestCode.GET_ALL_DELAY_OFFSET:
return this.getAllDelayOffset(ctx, request);
case RequestCode.GET_ALL_MESSAGE_REQUEST_MODE:
return this.getAllMessageRequestMode(ctx, request);
case RequestCode.INVOKE_BROKER_TO_RESET_OFFSET:
return this.resetOffset(ctx, request);
case RequestCode.INVOKE_BROKER_TO_GET_CONSUMER_STATUS:
return this.getConsumerStatus(ctx, request);
case RequestCode.QUERY_TOPIC_CONSUME_BY_WHO:
return this.queryTopicConsumeByWho(ctx, request);
case RequestCode.QUERY_TOPICS_BY_CONSUMER:
return this.queryTopicsByConsumer(ctx, request);
case RequestCode.QUERY_SUBSCRIPTION_BY_CONSUMER:
return this.querySubscriptionByConsumer(ctx, request);
case RequestCode.QUERY_CONSUME_TIME_SPAN:
return this.queryConsumeTimeSpan(ctx, request);
case RequestCode.GET_SYSTEM_TOPIC_LIST_FROM_BROKER:
return this.getSystemTopicListFromBroker(ctx, request);
case RequestCode.CLEAN_EXPIRED_CONSUMEQUEUE:
return this.cleanExpiredConsumeQueue();
case RequestCode.DELETE_EXPIRED_COMMITLOG:
return this.deleteExpiredCommitLog();
case RequestCode.CLEAN_UNUSED_TOPIC:
return this.cleanUnusedTopic();
case RequestCode.GET_CONSUMER_RUNNING_INFO:
return this.getConsumerRunningInfo(ctx, request);
case RequestCode.QUERY_CORRECTION_OFFSET:
return this.queryCorrectionOffset(ctx, request);
case RequestCode.CONSUME_MESSAGE_DIRECTLY:
return this.consumeMessageDirectly(ctx, request);
case RequestCode.CLONE_GROUP_OFFSET:
return this.cloneGroupOffset(ctx, request);
case RequestCode.VIEW_BROKER_STATS_DATA:
return ViewBrokerStatsData(ctx, request);
case RequestCode.GET_BROKER_CONSUME_STATS:
return fetchAllConsumeStatsInBroker(ctx, request);
case RequestCode.QUERY_CONSUME_QUEUE:
return queryConsumeQueue(ctx, request);
case RequestCode.UPDATE_AND_GET_GROUP_FORBIDDEN:
return this.updateAndGetGroupForbidden(ctx, request);
case RequestCode.GET_SUBSCRIPTIONGROUP_CONFIG:
return this.getSubscriptionGroup(ctx, request);
case RequestCode.UPDATE_AND_CREATE_ACL_CONFIG:
return updateAndCreateAccessConfig(ctx, request);
case RequestCode.DELETE_ACL_CONFIG:
return deleteAccessConfig(ctx, request);
case RequestCode.GET_BROKER_CLUSTER_ACL_INFO:
return getBrokerAclConfigVersion(ctx, request);
case RequestCode.UPDATE_GLOBAL_WHITE_ADDRS_CONFIG:
return updateGlobalWhiteAddrsConfig(ctx, request);
case RequestCode.RESUME_CHECK_HALF_MESSAGE:
return resumeCheckHalfMessage(ctx, request);
case RequestCode.GET_TOPIC_CONFIG:
return getTopicConfig(ctx, request);
case RequestCode.UPDATE_AND_CREATE_STATIC_TOPIC:
return this.updateAndCreateStaticTopic(ctx, request);
case RequestCode.NOTIFY_MIN_BROKER_ID_CHANGE:
return this.notifyMinBrokerIdChange(ctx, request);
case RequestCode.EXCHANGE_BROKER_HA_INFO:
return this.updateBrokerHaInfo(ctx, request);
case RequestCode.GET_BROKER_HA_STATUS:
return this.getBrokerHaStatus(ctx, request);
case RequestCode.RESET_MASTER_FLUSH_OFFSET:
return this.resetMasterFlushOffset(ctx, request);
case RequestCode.GET_BROKER_EPOCH_CACHE:
return this.getBrokerEpochCache(ctx, request);
case RequestCode.NOTIFY_BROKER_ROLE_CHANGED:
return this.notifyBrokerRoleChanged(ctx, request);
case RequestCode.AUTH_CREATE_USER:
return this.createUser(ctx, request);
case RequestCode.AUTH_UPDATE_USER:
return this.updateUser(ctx, request);
case RequestCode.AUTH_DELETE_USER:
return this.deleteUser(ctx, request);
case RequestCode.AUTH_GET_USER:
return this.getUser(ctx, request);
case RequestCode.AUTH_LIST_USER:
return this.listUser(ctx, request);
case RequestCode.AUTH_CREATE_ACL:
return this.createAcl(ctx, request);
case RequestCode.AUTH_UPDATE_ACL:
return this.updateAcl(ctx, request);
case RequestCode.AUTH_DELETE_ACL:
return this.deleteAcl(ctx, request);
case RequestCode.AUTH_GET_ACL:
return this.getAcl(ctx, request);
case RequestCode.AUTH_LIST_ACL:
return this.listAcl(ctx, request);
default:
return getUnknownCmdResponse(ctx, request);
}
} | @Test
public void testProcessRequest_success() throws RemotingCommandException, UnknownHostException {
RemotingCommand request = createUpdateBrokerConfigCommand();
RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
} |
@VisibleForTesting
static void instantiateGarbageCollectorMetrics(
MetricGroup metrics, List<GarbageCollectorMXBean> garbageCollectors) {
for (final GarbageCollectorMXBean garbageCollector : garbageCollectors) {
MetricGroup gcGroup = metrics.addGroup(garbageCollector.getName());
gcGroup.gauge("Count", garbageCollector::getCollectionCount);
Gauge<Long> timeGauge = gcGroup.gauge("Time", garbageCollector::getCollectionTime);
gcGroup.meter("TimeMsPerSecond", new MeterView(timeGauge));
}
Gauge<Long> totalGcTime =
() ->
garbageCollectors.stream()
.mapToLong(GarbageCollectorMXBean::getCollectionTime)
.sum();
Gauge<Long> totalGcCount =
() ->
garbageCollectors.stream()
.mapToLong(GarbageCollectorMXBean::getCollectionCount)
.sum();
MetricGroup allGroup = metrics.addGroup("All");
allGroup.gauge("Count", totalGcCount);
Gauge<Long> totalTime = allGroup.gauge("Time", totalGcTime);
allGroup.meter("TimeMsPerSecond", new MeterView(totalTime));
} | @Test
public void testGcMetricCompleteness() {
Map<String, InterceptingOperatorMetricGroup> addedGroups = new HashMap<>();
InterceptingOperatorMetricGroup gcGroup =
new InterceptingOperatorMetricGroup() {
@Override
public MetricGroup addGroup(String name) {
return addedGroups.computeIfAbsent(
name, k -> new InterceptingOperatorMetricGroup());
}
};
List<GarbageCollectorMXBean> garbageCollectors = new ArrayList<>();
garbageCollectors.add(new TestGcBean("gc1", 100, 500));
garbageCollectors.add(new TestGcBean("gc2", 50, 250));
MetricUtils.instantiateGarbageCollectorMetrics(gcGroup, garbageCollectors);
assertThat(addedGroups).containsOnlyKeys("gc1", "gc2", "All");
// Make sure individual collector metrics are correct
validateCollectorMetric(addedGroups.get("gc1"), 100, 500L);
validateCollectorMetric(addedGroups.get("gc2"), 50L, 250L);
// Make sure all/total collector metrics are correct
validateCollectorMetric(addedGroups.get("All"), 150L, 750L);
} |
public String toJsonString() {
ObjectMapper objectMapper = new ObjectMapper();
try {
return objectMapper.writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
} | @Test
void toJsonString() throws JsonProcessingException {
ObjectMapper objectMapper = new ObjectMapper();
Instance instance = Instance.getInstance();
Map<String,Object> map = new HashMap<>();
Map<String,Object> mmap = new HashMap<>();
mmap.put("k","v");
map.put("k",mmap);
instance.setMetadata(map);
instance.setControl(new Node.Endpoint("1.1.1.1",888));
instance.setTransaction(new Node.Endpoint("2.2.2.2",999));
System.out.println(instance.toJsonString());
assertEquals(instance.toJsonString(),objectMapper.writeValueAsString(instance));
} |
public RejectState getRejectState() {
return rejectState;
} | @Test
public void testConstructor() {
RpcDeniedReply reply = new RpcDeniedReply(0, ReplyState.MSG_ACCEPTED,
RejectState.AUTH_ERROR, new VerifierNone());
Assert.assertEquals(0, reply.getXid());
Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());
Assert.assertEquals(ReplyState.MSG_ACCEPTED, reply.getState());
Assert.assertEquals(RejectState.AUTH_ERROR, reply.getRejectState());
} |
@Override
public MapperResult getGroupIdList(MapperContext context) {
String sql = "SELECT group_id FROM config_info WHERE tenant_id ='" + NamespaceUtil.getNamespaceDefaultId()
+ "' GROUP BY group_id LIMIT " + context.getStartRow() + "," + context.getPageSize();
return new MapperResult(sql, Collections.emptyList());
} | @Test
void testGetGroupIdList() {
MapperResult mapperResult = configInfoMapperByMySql.getGroupIdList(context);
assertEquals(mapperResult.getSql(),
"SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT " + startRow + "," + pageSize);
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
} |
@Override
public void process(Exchange exchange) throws Exception {
Object payload = exchange.getMessage().getBody();
if (payload == null) {
return;
}
ProtobufSchema answer = computeIfAbsent(exchange);
if (answer != null) {
exchange.setProperty(SchemaHelper.CONTENT_SCHEMA, answer);
exchange.setProperty(SchemaHelper.CONTENT_SCHEMA_TYPE, SchemaType.PROTOBUF.type());
exchange.setProperty(SchemaHelper.CONTENT_CLASS, SchemaHelper.resolveContentClass(exchange, this.contentClass));
}
} | @Test
void shouldReadSchemaFromClasspathResource() throws Exception {
Exchange exchange = new DefaultExchange(camelContext);
exchange.setProperty(SchemaHelper.CONTENT_CLASS, Person.class.getName());
exchange.getMessage().setBody(person);
ProtobufSchemaResolver schemaResolver = new ProtobufSchemaResolver();
schemaResolver.process(exchange);
Assertions.assertNotNull(exchange.getProperty(SchemaHelper.CONTENT_SCHEMA));
Assertions.assertEquals(ProtobufSchema.class, exchange.getProperty(SchemaHelper.CONTENT_SCHEMA).getClass());
Assertions.assertEquals(SchemaType.PROTOBUF.type(), exchange.getProperty(SchemaHelper.CONTENT_SCHEMA_TYPE));
Assertions.assertEquals(Person.class.getName(), exchange.getProperty(SchemaHelper.CONTENT_CLASS));
} |
@Override
public String toString() {
return toString(true);
} | @Test
public void testToStringHumanNoShowQuota() {
long length = Long.MAX_VALUE;
long fileCount = 222222222;
long directoryCount = 33333;
long quota = 222256578;
long spaceConsumed = 55555;
long spaceQuota = Long.MAX_VALUE;
ContentSummary contentSummary = new ContentSummary.Builder().length(length).
fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
String expected = " 32.6 K 211.9 M 8.0 E ";
assertEquals(expected, contentSummary.toString(false, true));
} |
@Override
public String getAuthenticationScheme() {
switch (event.getRequestSource()) {
case API_GATEWAY:
if (event.getRequestContext().getAuthorizer() != null && event.getRequestContext().getAuthorizer().getClaims() != null
&& event.getRequestContext().getAuthorizer().getClaims().getSubject() != null) {
return AUTH_SCHEME_COGNITO_POOL;
} else if (event.getRequestContext().getAuthorizer() != null) {
return AUTH_SCHEME_CUSTOM;
} else if (event.getRequestContext().getIdentity().getAccessKey() != null) {
return AUTH_SCHEME_AWS_IAM;
} else {
return null;
}
case ALB:
if (event.getMultiValueHeaders().containsKey(ALB_ACESS_TOKEN_HEADER)) {
return AUTH_SCHEME_CUSTOM;
}
}
return null;
} | @Test
void authScheme_getAuthenticationScheme_userPool() {
AwsProxySecurityContext context = new AwsProxySecurityContext(null, REQUEST_COGNITO_USER_POOL);
assertNotNull(context.getAuthenticationScheme());
assertEquals(AUTH_SCHEME_COGNITO_POOL, context.getAuthenticationScheme());
} |
public static LinkKey canonicalLinkKey(Link link) {
String sn = link.src().elementId().toString();
String dn = link.dst().elementId().toString();
return sn.compareTo(dn) < 0 ?
linkKey(link.src(), link.dst()) : linkKey(link.dst(), link.src());
} | @Test
public void canonLinkKey() {
LinkKey fb = TopoUtils.canonicalLinkKey(LINK_FU_BAH);
LinkKey bf = TopoUtils.canonicalLinkKey(LINK_BAH_FU);
assertEquals("not canonical", fb, bf);
} |
@Deprecated
@Restricted(DoNotUse.class)
public static String resolve(ConfigurationContext context, String toInterpolate) {
return context.getSecretSourceResolver().resolve(toInterpolate);
} | @Test
public void resolve_nothing() {
assertThat(resolve("FOO"), equalTo("FOO"));
} |
public static HostInfo buildFromEndpoint(final String endPoint) {
if (Utils.isBlank(endPoint)) {
return null;
}
final String host = getHost(endPoint);
final Integer port = getPort(endPoint);
if (host == null || port == null) {
throw new ConfigException(
String.format("Error parsing host address %s. Expected format host:port.", endPoint)
);
}
return new HostInfo(host, port);
} | @Test
public void shouldReturnNullHostInfoForNullEndPoint() {
assertNull(HostInfo.buildFromEndpoint(null));
} |
public String getTopicName() {
return topicName == null ? INVALID_TOPIC_NAME : topicName;
} | @Test
public void shouldUseNameIfNoWIthClause() {
// When:
final TopicProperties properties = new TopicProperties.Builder()
.withName("name")
.withWithClause(Optional.empty(), Optional.of(1), Optional.empty(), Optional.of((long) 100))
.build();
// Then:
assertThat(properties.getTopicName(), equalTo("name"));
} |
@Override
public <VAgg> KTable<K, VAgg> aggregate(final Initializer<VAgg> initializer,
final Aggregator<? super K, ? super V, VAgg> adder,
final Aggregator<? super K, ? super V, VAgg> subtractor,
final Materialized<K, VAgg, KeyValueStore<Bytes, byte[]>> materialized) {
return aggregate(initializer, adder, subtractor, NamedInternal.empty(), materialized);
} | @Test
public void shouldNotAllowNullSubtractorOnAggregate() {
assertThrows(NullPointerException.class, () -> groupedTable.aggregate(
MockInitializer.STRING_INIT,
MockAggregator.TOSTRING_ADDER,
null,
Materialized.as("store")));
} |
public int getErrCode() {
return errCode;
} | @Test
void testConstructorWithErrorCodeAndCause() {
NacosRuntimeException exception = new NacosRuntimeException(NacosException.INVALID_PARAM,
new RuntimeException("test"));
assertEquals(NacosException.INVALID_PARAM, exception.getErrCode());
assertEquals("java.lang.RuntimeException: test", exception.getMessage());
assertTrue(exception.getCause() instanceof RuntimeException);
} |
Map<String, File> scanExistingUsers() throws IOException {
Map<String, File> users = new HashMap<>();
File[] userDirectories = listUserDirectories();
if (userDirectories != null) {
for (File directory : userDirectories) {
String userId = idStrategy.idFromFilename(directory.getName());
users.put(userId, directory);
}
}
addEmptyUsernameIfExists(users);
return users;
} | @Test
public void scanExistingUsersCaseSensitive() throws IOException {
File usersDirectory = createTestDirectory(getClass(), name);
UserIdMigrator migrator = new UserIdMigrator(usersDirectory, new IdStrategy.CaseSensitive());
Map<String, File> userMappings = migrator.scanExistingUsers();
assertThat(userMappings.keySet(), hasSize(3));
assertThat(userMappings.keySet(), hasItems("admin", "Fred", "Jane"));
} |
@Override
public Map<String, Set<String>> getAllIndexAliases() {
final Map<String, Set<String>> indexNamesAndAliases = indices.getIndexNamesAndAliases(getIndexWildcard());
// filter out the restored archives from the result set
return indexNamesAndAliases.entrySet().stream()
.filter(e -> isGraylogDeflectorIndex(e.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
} | @Test
public void nullIndexerDoesNotThrow() {
final Map<String, Set<String>> deflectorIndices = mongoIndexSet.getAllIndexAliases();
assertThat(deflectorIndices).isEmpty();
} |
public static Read read() {
return new Read(null, "", new Scan());
} | @Test
public void testReadingSDF() throws Exception {
final String table = tmpTable.getName();
final int numRows = 1001;
createAndWriteData(table, numRows);
runReadTestLength(HBaseIO.read().withConfiguration(conf).withTableId(table), true, numRows);
} |
@Nullable
public Float getFloatValue(@FloatFormat final int formatType,
@IntRange(from = 0) final int offset) {
if ((offset + getTypeLen(formatType)) > size()) return null;
switch (formatType) {
case FORMAT_SFLOAT -> {
if (mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFE)
return Float.POSITIVE_INFINITY;
if ((mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFF) ||
(mValue[offset + 1] == 0x08 && mValue[offset] == 0x00) ||
(mValue[offset + 1] == 0x08 && mValue[offset] == 0x01))
return Float.NaN;
if (mValue[offset + 1] == 0x08 && mValue[offset] == 0x02)
return Float.NEGATIVE_INFINITY;
return bytesToFloat(mValue[offset], mValue[offset + 1]);
}
case FORMAT_FLOAT -> {
if (mValue[offset + 3] == 0x00) {
if (mValue[offset + 2] == 0x7F && mValue[offset + 1] == (byte) 0xFF) {
if (mValue[offset] == (byte) 0xFE)
return Float.POSITIVE_INFINITY;
if (mValue[offset] == (byte) 0xFF)
return Float.NaN;
} else if (mValue[offset + 2] == (byte) 0x80 && mValue[offset + 1] == 0x00) {
if (mValue[offset] == 0x00 || mValue[offset] == 0x01)
return Float.NaN;
if (mValue[offset] == 0x02)
return Float.NEGATIVE_INFINITY;
}
}
return bytesToFloat(mValue[offset], mValue[offset + 1],
mValue[offset + 2], mValue[offset + 3]);
}
}
return null;
} | @Test
public void setValue_FLOAT_positiveInfinity() {
final MutableData data = new MutableData(new byte[4]);
data.setValue(Float.POSITIVE_INFINITY, Data.FORMAT_FLOAT, 0);
final float value = data.getFloatValue(Data.FORMAT_FLOAT, 0);
assertEquals(Float.POSITIVE_INFINITY, value, 0.00);
} |
@Override
public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer) {
return aggregate(initializer, Materialized.with(null, null));
} | @Test
public void slidingWindowAggregateStreamsTest() {
final KTable<Windowed<String>, String> customers = windowedCogroupedStream.aggregate(
MockInitializer.STRING_INIT, Materialized.with(Serdes.String(), Serdes.String()));
customers.toStream().to(OUTPUT);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> testInputTopic = driver.createInputTopic(
TOPIC, new StringSerializer(), new StringSerializer());
final TestOutputTopic<Windowed<String>, String> testOutputTopic = driver.createOutputTopic(
OUTPUT, new TimeWindowedDeserializer<>(new StringDeserializer(), WINDOW_SIZE_MS), new StringDeserializer());
testInputTopic.pipeInput("k1", "A", 500);
testInputTopic.pipeInput("k2", "A", 500);
testInputTopic.pipeInput("k2", "A", 501);
testInputTopic.pipeInput("k1", "A", 502);
testInputTopic.pipeInput("k1", "B", 503);
testInputTopic.pipeInput("k2", "B", 503);
testInputTopic.pipeInput("k2", "B", 504);
testInputTopic.pipeInput("k1", "B", 504);
final List<TestRecord<Windowed<String>, String>> results = testOutputTopic.readRecordsToList();
final List<TestRecord<Windowed<String>, String>> expected = new LinkedList<>();
// k1-A-500
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(0L, 500L)), "0+A", null, 500L));
// k2-A-500
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(0L, 500L)), "0+A", null, 500L));
// k2-A-501
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(501L, 1001L)), "0+A", null, 501L));
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(1L, 501L)), "0+A+A", null, 501L));
// k1-A-502
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(501L, 1001L)), "0+A", null, 502L));
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(2L, 502L)), "0+A+A", null, 502L));
// k1-B-503
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(501L, 1001L)), "0+A+B", null, 503L));
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(503L, 1003L)), "0+B", null, 503L));
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(3L, 503L)), "0+A+A+B", null, 503L));
// k2-B-503
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(501L, 1001L)), "0+A+B", null, 503L));
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(502L, 1002)), "0+B", null, 503L));
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(3L, 503L)), "0+A+A+B", null, 503L));
// k2-B-504
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(502L, 1002L)), "0+B+B", null, 504L));
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(501L, 1001L)), "0+A+B+B", null, 504L));
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(504L, 1004L)), "0+B", null, 504L));
expected.add(new TestRecord<>(new Windowed<>("k2", new TimeWindow(4L, 504L)), "0+A+A+B+B", null, 504L));
// k1-B-504
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(503L, 1003L)), "0+B+B", null, 504L));
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(501L, 1001L)), "0+A+B+B", null, 504L));
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(504L, 1004L)), "0+B", null, 504L));
expected.add(new TestRecord<>(new Windowed<>("k1", new TimeWindow(4L, 504L)), "0+A+A+B+B", null, 504L));
assertEquals(expected, results);
}
} |
@Override
public void handleTenantMenu(TenantMenuHandler handler) {
// 如果禁用,则不执行逻辑
if (isTenantDisable()) {
return;
}
// 获得租户,然后获得菜单
TenantDO tenant = getTenant(TenantContextHolder.getRequiredTenantId());
Set<Long> menuIds;
if (isSystemTenant(tenant)) { // 系统租户,菜单是全量的
menuIds = CollectionUtils.convertSet(menuService.getMenuList(), MenuDO::getId);
} else {
menuIds = tenantPackageService.getTenantPackage(tenant.getPackageId()).getMenuIds();
}
// 执行处理器
handler.handle(menuIds);
} | @Test
public void testHandleTenantMenu_disable() {
// 准备参数
TenantMenuHandler handler = mock(TenantMenuHandler.class);
// mock 禁用
when(tenantProperties.getEnable()).thenReturn(false);
// 调用
tenantService.handleTenantMenu(handler);
// 断言
verify(handler, never()).handle(any());
} |
@Override
public HttpResponseOutputStream<FileEntity> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
final String uploadUri;
FileUploadPartEntity uploadPartEntity = null;
if(StringUtils.isBlank(status.getUrl())) {
uploadPartEntity = new BrickUploadFeature(session, this).startUpload(file);
uploadUri = uploadPartEntity.getUploadUri();
}
else {
uploadUri = status.getUrl();
}
final HttpResponseOutputStream<FileEntity> stream = this.write(file, status, new DelayedHttpEntityCallable<FileEntity>(file) {
@Override
public FileEntity call(final HttpEntity entity) throws BackgroundException {
try {
final HttpPut request = new HttpPut(uploadUri);
request.setEntity(entity);
request.setHeader(HttpHeaders.CONTENT_TYPE, MimeTypeService.DEFAULT_CONTENT_TYPE);
final HttpResponse response = session.getClient().execute(request);
// Validate response
try {
switch(response.getStatusLine().getStatusCode()) {
case HttpStatus.SC_OK:
if(log.isInfoEnabled()) {
log.info(String.format("Received response %s for part number %d", response, status.getPart()));
}
// Upload complete
if(response.containsHeader("ETag")) {
if(file.getType().contains(Path.Type.encrypted)) {
log.warn(String.format("Skip checksum verification for %s with client side encryption enabled", file));
}
else {
if(HashAlgorithm.md5.equals(status.getChecksum().algorithm)) {
final Checksum etag = Checksum.parse(StringUtils.remove(response.getFirstHeader("ETag").getValue(), '"'));
if(!status.getChecksum().equals(etag)) {
throw new ChecksumException(MessageFormat.format(LocaleFactory.localizedString("Upload {0} failed", "Error"), file.getName()),
MessageFormat.format("Mismatch between {0} hash {1} of uploaded data and ETag {2} returned by the server",
etag.algorithm.toString(), status.getChecksum().hash, etag.hash));
}
}
}
}
else {
if(log.isDebugEnabled()) {
log.debug("No ETag header in response available");
}
}
return null;
default:
EntityUtils.updateEntity(response, new BufferedHttpEntity(response.getEntity()));
throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed",
new HttpResponseException(response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()), file);
}
}
finally {
EntityUtils.consume(response.getEntity());
}
}
catch(HttpResponseException e) {
throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed", e, file);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map("Upload {0} failed", e, file);
}
}
@Override
public long getContentLength() {
return status.getLength();
}
});
if(StringUtils.isBlank(status.getUrl())) {
final String ref = uploadPartEntity.getRef();
return new HttpResponseOutputStream<FileEntity>(new ProxyOutputStream(stream),
new BrickAttributesFinderFeature(session), status) {
private final AtomicBoolean close = new AtomicBoolean();
@Override
public FileEntity getStatus() throws BackgroundException {
return stream.getStatus();
}
@Override
public void close() throws IOException {
if(close.get()) {
log.warn(String.format("Skip double close of stream %s", this));
return;
}
super.close();
try {
new BrickUploadFeature(session, BrickWriteFeature.this)
.completeUpload(file, ref, status, Collections.singletonList(status));
}
catch(BackgroundException e) {
throw new IOException(e.getMessage(), e);
}
finally {
close.set(true);
}
}
};
}
return stream;
} | @Test
public void testWriteSinglePart() throws Exception {
final BrickWriteFeature feature = new BrickWriteFeature(session);
final Path container = new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume));
final long containerTimestamp = new BrickAttributesFinderFeature(session).find(container).getModificationDate();
final Path file = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
final byte[] content = RandomUtils.nextBytes(4 * 1024 * 1024);
final TransferStatus status = new TransferStatus().withLength(content.length);
final HttpResponseOutputStream<FileEntity> out = feature.write(file, status, new DisabledConnectionCallback());
final ByteArrayInputStream in = new ByteArrayInputStream(content);
final TransferStatus progress = new TransferStatus();
final BytecountStreamListener count = new BytecountStreamListener();
new StreamCopier(new TransferStatus(), progress).withListener(count).transfer(in, out);
assertEquals(content.length, count.getSent());
in.close();
out.close();
assertNull(out.getStatus());
assertTrue(new BrickFindFeature(session).find(file));
final PathAttributes attributes = new BrickAttributesFinderFeature(session).find(file);
assertEquals(containerTimestamp, new BrickAttributesFinderFeature(session).find(container).getModificationDate());
assertEquals(content.length, attributes.getSize());
final byte[] compare = new byte[content.length];
final InputStream stream = new BrickReadFeature(session).read(file, new TransferStatus().withLength(content.length), new DisabledConnectionCallback());
IOUtils.readFully(stream, compare);
stream.close();
assertArrayEquals(content, compare);
new BrickDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
@Override
public List<String> getConfig(RedisClusterNode node, String pattern) {
RedisClient entry = getEntry(node);
RFuture<List<String>> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_GET, pattern);
return syncFuture(f);
} | @Test
public void testGetConfig() {
RedisClusterNode master = getFirstMaster();
List<String> config = connection.getConfig(master, "*");
assertThat(config.size()).isGreaterThan(20);
} |
@Override
public Object getParameter(String key) {
return param.get(key);
} | @Test
void getParameter() {
ConfigResponse configResponse = new ConfigResponse();
String dataId = "id";
String group = "group";
String tenant = "n";
String content = "abc";
String custom = "custom";
configResponse.setContent(content);
configResponse.setDataId(dataId);
configResponse.setGroup(group);
configResponse.setTenant(tenant);
configResponse.putParameter(custom, custom);
assertEquals(dataId, configResponse.getParameter("dataId"));
assertEquals(group, configResponse.getParameter("group"));
assertEquals(tenant, configResponse.getParameter("tenant"));
assertEquals(content, configResponse.getParameter("content"));
assertEquals(custom, configResponse.getParameter("custom"));
} |
@InvokeOnHeader(Web3jConstants.SHH_HAS_IDENTITY)
void shhHasIdentity(Message message) throws IOException {
String identityAddress = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class);
Request<?, ShhHasIdentity> request = web3j.shhHasIdentity(identityAddress);
setRequestId(message, request);
ShhHasIdentity response = request.send();
boolean hasError = checkForError(message, response);
if (!hasError) {
message.setBody(response.hasPrivateKeyForIdentity());
}
} | @Test
public void shhHasIdentityTest() throws Exception {
ShhHasIdentity response = Mockito.mock(ShhHasIdentity.class);
Mockito.when(mockWeb3j.shhHasIdentity(any())).thenReturn(request);
Mockito.when(request.send()).thenReturn(response);
Mockito.when(response.hasPrivateKeyForIdentity()).thenReturn(Boolean.TRUE);
Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.SHH_HAS_IDENTITY);
template.send(exchange);
Boolean body = exchange.getIn().getBody(Boolean.class);
assertTrue(body);
} |
public Bson parseSingleExpression(final String filterExpression, final List<EntityAttribute> attributes) {
final Filter filter = singleFilterParser.parseSingleExpression(filterExpression, attributes);
return filter.toBson();
} | @Test
void parsesFilterExpressionCorrectlyForStringType() {
assertEquals(Filters.eq("owner", "baldwin"),
toTest.parseSingleExpression("owner:baldwin",
List.of(EntityAttribute.builder()
.id("owner")
.title("Owner")
.filterable(true)
.build())
));
} |
public static void analyze(CreateTableStmt statement, ConnectContext context) {
final TableName tableNameObject = statement.getDbTbl();
MetaUtils.normalizationTableName(context, tableNameObject);
final String catalogName = tableNameObject.getCatalog();
MetaUtils.checkCatalogExistAndReport(catalogName);
final String tableName = tableNameObject.getTbl();
FeNameFormat.checkTableName(tableName);
Database db = MetaUtils.getDatabase(catalogName, tableNameObject.getDb());
if (statement instanceof CreateTemporaryTableStmt) {
analyzeTemporaryTable(statement, context, catalogName, db, tableName);
} else {
if (db.getTable(tableName) != null && !statement.isSetIfNotExists()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
}
}
analyzeEngineName(statement, catalogName);
analyzeCharsetName(statement);
preCheckColumnRef(statement);
analyzeKeysDesc(statement);
analyzeSortKeys(statement);
analyzePartitionDesc(statement);
analyzeDistributionDesc(statement);
analyzeColumnRef(statement, catalogName);
if (statement.isHasGeneratedColumn()) {
analyzeGeneratedColumn(statement, context);
}
analyzeIndexDefs(statement);
} | @Test
public void testMaxColumn() throws Exception {
Config.max_column_number_per_table = 1;
String sql = "CREATE TABLE test_create_table_db.starrocks_test_table\n" +
"(\n" +
" `tag_id` bigint not null,\n" +
" `tag_name` string\n" +
") DUPLICATE KEY(`tag_id`)\n" +
"PARTITION BY (`tag_id`)\n" +
"DISTRIBUTED BY HASH(`tag_id`)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n";
expectedEx.expect(SemanticException.class);
expectedEx.expectMessage("max_column_number_per_table");
CreateTableStmt createTableStmt = (CreateTableStmt) com.starrocks.sql.parser.SqlParser
.parse(sql, connectContext.getSessionVariable().getSqlMode()).get(0);
CreateTableAnalyzer.analyze(createTableStmt, connectContext);
} |
@Override
public void encode(final ChannelHandlerContext context, final DatabasePacket message, final ByteBuf out) {
boolean isIdentifierPacket = message instanceof PostgreSQLIdentifierPacket;
if (isIdentifierPacket) {
prepareMessageHeader(out, ((PostgreSQLIdentifierPacket) message).getIdentifier().getValue());
}
PostgreSQLPacketPayload payload = new PostgreSQLPacketPayload(out, context.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get());
try {
message.write(payload);
// CHECKSTYLE:OFF
} catch (final RuntimeException ex) {
// CHECKSTYLE:ON
payload.getByteBuf().resetWriterIndex();
// TODO consider what severity to use
OpenGaussErrorResponsePacket errorResponsePacket = new OpenGaussErrorResponsePacket(
PostgreSQLMessageSeverityLevel.ERROR, PostgreSQLVendorError.SYSTEM_ERROR.getSqlState().getValue(), ex.getMessage());
isIdentifierPacket = true;
prepareMessageHeader(out, errorResponsePacket.getIdentifier().getValue());
errorResponsePacket.write(payload);
} finally {
if (isIdentifierPacket) {
updateMessageLength(out);
}
}
} | @Test
void assertEncodePostgreSQLPacket() {
PostgreSQLPacket packet = mock(PostgreSQLPacket.class);
new OpenGaussPacketCodecEngine().encode(context, packet, byteBuf);
verify(packet).write(any(PostgreSQLPacketPayload.class));
} |
@Override
public boolean isTypeOf(Class<?> type) {
checkNotNull(type);
return id.isTypeOf(type);
} | @Test
public void testTypeOf() {
ContinuousResource continuous = Resources.continuous(D1, P1, Bandwidth.class)
.resource(BW1.bps());
assertThat(continuous.isTypeOf(DeviceId.class), is(false));
assertThat(continuous.isTypeOf(PortNumber.class), is(false));
assertThat(continuous.isTypeOf(Bandwidth.class), is(true));
} |
@Override
public int compare(InetSocketAddress addr1, InetSocketAddress addr2) {
if (addr1.equals(addr2)) {
return 0;
}
if (!addr1.isUnresolved() && !addr2.isUnresolved()) {
if (addr1.getAddress().getClass() == addr2.getAddress().getClass()) {
return 0;
}
return preferredAddressType.isAssignableFrom(addr1.getAddress().getClass()) ? -1 : 1;
}
if (addr1.isUnresolved() && addr2.isUnresolved()) {
return 0;
}
return addr1.isUnresolved() ? 1 : -1;
} | @Test
public void testCompareUnresolvedSimple() {
NameServerComparator comparator = new NameServerComparator(Inet4Address.class);
int x = comparator.compare(IPV4ADDRESS1, UNRESOLVED1);
int y = comparator.compare(UNRESOLVED1, IPV4ADDRESS1);
assertEquals(-1, x);
assertEquals(x, -y);
assertEquals(0, comparator.compare(IPV4ADDRESS1, IPV4ADDRESS1));
assertEquals(0, comparator.compare(UNRESOLVED1, UNRESOLVED1));
} |
protected String getNodeId(final Path file, final int chunksize) throws BackgroundException {
if(file.isRoot()) {
return ROOT_NODE_ID;
}
try {
final String type;
if(file.isDirectory()) {
type = "room:folder";
}
else {
type = "file";
}
// Top-level nodes only
int offset = 0;
NodeList nodes;
do {
if(StringUtils.isNoneBlank(file.getParent().attributes().getVersionId())) {
nodes = new NodesApi(session.getClient()).searchNodes(
URIEncoder.encode(normalizer.normalize(file.getName()).toString()),
StringUtils.EMPTY, 0, Long.valueOf(file.getParent().attributes().getVersionId()),
String.format("type:eq:%s", type),
null, offset, chunksize, null);
}
else {
nodes = new NodesApi(session.getClient()).searchNodes(
URIEncoder.encode(normalizer.normalize(file.getName()).toString()),
StringUtils.EMPTY, -1, null,
String.format("type:eq:%s|parentPath:eq:%s/", type, file.getParent().isRoot() ? StringUtils.EMPTY : file.getParent().getAbsolute()),
null, offset, chunksize, null);
}
for(Node node : nodes.getItems()) {
// Case-insensitive
if(node.getName().equalsIgnoreCase(normalizer.normalize(file.getName()).toString())) {
if(log.isInfoEnabled()) {
log.info(String.format("Return node %s for file %s", node.getId(), file));
}
return this.cache(file, node.getId().toString());
}
}
offset += chunksize;
}
while(nodes.getItems().size() == chunksize);
throw new NotfoundException(file.getAbsolute());
}
catch(ApiException e) {
throw new SDSExceptionMappingService(this).map("Failure to read attributes of {0}", e, file);
}
} | @Test
public void getFileIdFile() throws Exception {
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
final Path room = new SDSDirectoryFeature(session, nodeid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final String name = String.format("%s%s", new AlphanumericRandomStringService().random(), new AlphanumericRandomStringService().random());
final Path file = new SDSTouchFeature(session, nodeid).touch(new Path(room, name, EnumSet.of(Path.Type.file)), new TransferStatus());
nodeid.clear();
final String nodeId = nodeid.getNodeId(new Path(room, name, EnumSet.of(Path.Type.file)), 1);
assertNotNull(nodeId);
assertEquals(nodeId, nodeid.getNodeId(new Path(room.withAttributes(PathAttributes.EMPTY), name, EnumSet.of(Path.Type.file)), 1));
assertEquals(nodeId, nodeid.getNodeId(new Path(room, StringUtils.upperCase(name), EnumSet.of(Path.Type.file)), 1));
assertEquals(nodeId, nodeid.getNodeId(new Path(room, StringUtils.lowerCase(name), EnumSet.of(Path.Type.file)), 1));
try {
assertNull(nodeid.getNodeId(new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), 1));
fail();
}
catch(NotfoundException e) {
// Expected
}
try {
assertNull(nodeid.getNodeId(new Path(room, name, EnumSet.of(Path.Type.directory)), 1));
fail();
}
catch(NotfoundException e) {
// Expected
}
new SDSDeleteFeature(session, nodeid).delete(Arrays.asList(file, room), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
private void stop(int numOfServicesStarted, boolean stopOnlyStartedServices) {
// stop in reverse order of start
Exception firstException = null;
List<Service> services = getServices();
for (int i = numOfServicesStarted - 1; i >= 0; i--) {
Service service = services.get(i);
if (LOG.isDebugEnabled()) {
LOG.debug("Stopping service #" + i + ": " + service);
}
STATE state = service.getServiceState();
//depending on the stop police
if (state == STATE.STARTED
|| (!stopOnlyStartedServices && state == STATE.INITED)) {
Exception ex = ServiceOperations.stopQuietly(LOG, service);
if (ex != null && firstException == null) {
firstException = ex;
}
}
}
//after stopping all services, rethrow the first exception raised
if (firstException != null) {
throw ServiceStateException.convert(firstException);
}
} | @Test(timeout = 10000)
public void testAddUninitedChildInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
parent.init(new Configuration());
parent.start();
parent.stop();
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.NOTINITED, child);
} |
@Override
public LoadBalancer newLoadBalancer(final LoadBalancer.Helper helper) {
return new AbstractLoadBalancer(helper) {
@Override
protected AbstractReadyPicker newPicker(final List<LoadBalancer.Subchannel> list) {
return new RandomPicker(list);
}
};
} | @Test
public void testNewLoadBalancer() {
LoadBalancer.Helper helper = new UnitTestReadHelper();
final LoadBalancer loadBalancer = randomLoadBalancerProvider.newLoadBalancer(helper);
assertNotNull(loadBalancer);
} |
@Override
public ClientDetailsEntity saveNewClient(ClientDetailsEntity client) {
if (client.getId() != null) { // if it's not null, it's already been saved, this is an error
throw new IllegalArgumentException("Tried to save a new client with an existing ID: " + client.getId());
}
if (client.getRegisteredRedirectUri() != null) {
for (String uri : client.getRegisteredRedirectUri()) {
if (blacklistedSiteService.isBlacklisted(uri)) {
throw new IllegalArgumentException("Client URI is blacklisted: " + uri);
}
}
}
// assign a random clientid if it's empty
// NOTE: don't assign a random client secret without asking, since public clients have no secret
if (Strings.isNullOrEmpty(client.getClientId())) {
client = generateClientId(client);
}
// make sure that clients with the "refresh_token" grant type have the "offline_access" scope, and vice versa
ensureRefreshTokenConsistency(client);
// make sure we don't have both a JWKS and a JWKS URI
ensureKeyConsistency(client);
// check consistency when using HEART mode
checkHeartMode(client);
// timestamp this to right now
client.setCreatedAt(new Date());
// check the sector URI
checkSectorIdentifierUri(client);
ensureNoReservedScopes(client);
ClientDetailsEntity c = clientRepository.saveClient(client);
statsService.resetCache();
return c;
} | @Test(expected = IllegalArgumentException.class)
public void heartMode_authcode_invalidGrants() {
Mockito.when(config.isHeartMode()).thenReturn(true);
ClientDetailsEntity client = new ClientDetailsEntity();
Set<String> grantTypes = new LinkedHashSet<>();
grantTypes.add("authorization_code");
grantTypes.add("implicit");
grantTypes.add("client_credentials");
client.setGrantTypes(grantTypes);
client.setTokenEndpointAuthMethod(AuthMethod.PRIVATE_KEY);
client.setRedirectUris(Sets.newHashSet("https://foo.bar/"));
client.setJwksUri("https://foo.bar/jwks");
service.saveNewClient(client);
} |
public void incTopicPutSize(final String topic, final int size) {
this.statsTable.get(Stats.TOPIC_PUT_SIZE).addValue(topic, size, 1);
} | @Test
public void testIncTopicPutSize() {
brokerStatsManager.incTopicPutSize(TOPIC, 2);
assertThat(brokerStatsManager.getStatsItem(TOPIC_PUT_SIZE, TOPIC).getValue().doubleValue()).isEqualTo(2L);
} |
@Bean
public TimerRegistry timerRegistry(
TimerConfigurationProperties timerConfigurationProperties,
EventConsumerRegistry<TimerEvent> timerEventConsumerRegistry,
RegistryEventConsumer<Timer> timerRegistryEventConsumer,
@Qualifier("compositeTimerCustomizer") CompositeCustomizer<TimerConfigCustomizer> compositeTimerCustomizer,
@Autowired(required = false) MeterRegistry registry
) {
TimerRegistry timerRegistry = createTimerRegistry(timerConfigurationProperties, timerRegistryEventConsumer, compositeTimerCustomizer, registry);
registerEventConsumer(timerRegistry, timerEventConsumerRegistry, timerConfigurationProperties);
initTimerRegistry(timerRegistry, timerConfigurationProperties, compositeTimerCustomizer);
return timerRegistry;
} | @Test
public void shouldConfigureInstancesUsingDedicatedConfigs() {
InstanceProperties instanceProperties1 = new InstanceProperties()
.setMetricNames("resilience4j.timer.operations1")
.setOnFailureTagResolver(QualifiedClassNameOnFailureTagResolver.class)
.setEventConsumerBufferSize(1);
InstanceProperties instanceProperties2 = new InstanceProperties()
.setMetricNames("resilience4j.timer.operations2")
.setOnFailureTagResolver(FixedOnFailureTagResolver.class)
.setEventConsumerBufferSize(10);
TimerConfigurationProperties configurationProperties = new TimerConfigurationProperties();
configurationProperties.getInstances().put("backend1", instanceProperties1);
configurationProperties.getInstances().put("backend2", instanceProperties2);
configurationProperties.setTimerAspectOrder(200);
TimerConfiguration configuration = new TimerConfiguration();
DefaultEventConsumerRegistry<TimerEvent> eventConsumerRegistry = new DefaultEventConsumerRegistry<>();
TimerRegistry registry = configuration.timerRegistry(
configurationProperties, eventConsumerRegistry, new CompositeRegistryEventConsumer<>(emptyList()), new CompositeCustomizer<>(emptyList()), new SimpleMeterRegistry()
);
assertThat(configurationProperties.getTimerAspectOrder()).isEqualTo(200);
assertThat(registry.getAllTimers().count()).isEqualTo(2);
Timer timer1 = registry.timer("backend1");
assertThat(timer1).isNotNull();
assertThat(timer1.getTimerConfig().getMetricNames()).isEqualTo("resilience4j.timer.operations1");
assertThat(timer1.getTimerConfig().getOnFailureTagResolver()).isInstanceOf(QualifiedClassNameOnFailureTagResolver.class);
Timer timer2 = registry.timer("backend2");
assertThat(timer2).isNotNull();
assertThat(timer2.getTimerConfig().getMetricNames()).isEqualTo("resilience4j.timer.operations2");
assertThat(timer2.getTimerConfig().getOnFailureTagResolver()).isInstanceOf(FixedOnFailureTagResolver.class);
assertThat(eventConsumerRegistry.getAllEventConsumer()).hasSize(2);
} |
@Override
public void rotate(IndexSet indexSet) {
indexRotator.rotate(indexSet, this::shouldRotate);
} | @Test
public void testRotate() {
when(indices.numberOfMessages("name")).thenReturn(10L);
when(indexSet.getNewestIndex()).thenReturn("name");
when(indexSet.getConfig()).thenReturn(indexSetConfig);
when(indexSetConfig.rotationStrategyConfig()).thenReturn(MessageCountRotationStrategyConfig.create(5));
final MessageCountRotationStrategy strategy = createStrategy();
strategy.rotate(indexSet);
verify(indexSet, times(1)).cycle();
reset(indexSet);
} |
protected static byte rho(long x, int k) {
return (byte) (Long.numberOfLeadingZeros((x << k) | (1 << (k - 1))) + 1);
} | @Test
public void testRhoL() {
assertEquals(49, AdaptiveCounting.rho(0L, 16));
assertEquals(48, AdaptiveCounting.rho(1L, 16));
assertEquals(47, AdaptiveCounting.rho(2L, 16));
assertEquals(1, AdaptiveCounting.rho(0x80008000L, 32));
assertEquals(55, AdaptiveCounting.rho(0L, 10));
assertEquals(54, AdaptiveCounting.rho(1L, 10));
assertEquals(53, AdaptiveCounting.rho(2L, 10));
assertEquals(1, AdaptiveCounting.rho(0x0020000000000000L, 10));
assertEquals(3, AdaptiveCounting.rho(0xDEA07EEFFEEDCAFEL, 15));
} |
@Override
public InternalCompletableFuture<ClientMessage> exceptionally(@Nonnull Function<Throwable, ? extends ClientMessage> fn) {
return super.exceptionally(new CallIdTrackingFunction(fn));
} | @Test
public void test_exceptionally() throws Exception {
CompletableFuture nextStage = invocationFuture.exceptionally((t) -> response);
invocationFuture.completeExceptionally(new IllegalStateException());
assertEquals(response, nextStage.get(ASSERT_TRUE_EVENTUALLY_TIMEOUT, TimeUnit.SECONDS));
verify(callIdSequence).forceNext();
verify(callIdSequence, times(2)).complete();
} |
@Override
public synchronized void reconnect() throws RemotingException {
client.reconnect();
} | @Test
void testReconnect() {
HeaderExchangeClient headerExchangeClient = new HeaderExchangeClient(Mockito.mock(Client.class), false);
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=true")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=tRue")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=30000")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=0")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=-1")));
Assertions.assertFalse(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=false")));
Assertions.assertFalse(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=FALSE")));
} |
@Override
public SortedSet<IndexRange> find(DateTime begin, DateTime end) {
final DBQuery.Query query = DBQuery.or(
DBQuery.and(
DBQuery.notExists("start"), // "start" has been used by the old index ranges in MongoDB
DBQuery.lessThanEquals(IndexRange.FIELD_BEGIN, end.getMillis()),
DBQuery.greaterThanEquals(IndexRange.FIELD_END, begin.getMillis())
),
DBQuery.and(
DBQuery.notExists("start"), // "start" has been used by the old index ranges in MongoDB
DBQuery.lessThanEquals(IndexRange.FIELD_BEGIN, 0L),
DBQuery.greaterThanEquals(IndexRange.FIELD_END, 0L)
)
);
try (DBCursor<MongoIndexRange> indexRanges = collection.find(query)) {
return ImmutableSortedSet.copyOf(IndexRange.COMPARATOR, (Iterator<? extends IndexRange>) indexRanges);
}
} | @Test
@MongoDBFixtures("MongoIndexRangeServiceTest-LegacyIndexRanges.json")
public void findIgnoresLegacyIndexRanges() throws Exception {
when(indices.waitForRecovery("graylog_1")).thenReturn(HealthStatus.Green);
final DateTime begin = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC);
final DateTime end = new DateTime(2015, 2, 1, 0, 0, DateTimeZone.UTC);
final SortedSet<IndexRange> indexRanges = indexRangeService.find(begin, end);
assertThat(indexRanges).containsOnly(
MongoIndexRange.create(new ObjectId("55e0261a0cc6980000000003"), "graylog_1", new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC), new DateTime(2015, 1, 2, 0, 0, DateTimeZone.UTC), new DateTime(2015, 1, 2, 0, 0, DateTimeZone.UTC), 42)
);
} |
public final void doesNotContainEntry(@Nullable Object key, @Nullable Object value) {
checkNoNeedToDisplayBothValues("entrySet()")
.that(checkNotNull(actual).entrySet())
.doesNotContain(immutableEntry(key, value));
} | @Test
public void doesNotContainEntry() {
ImmutableMap<String, String> actual = ImmutableMap.of("kurt", "kluever");
assertThat(actual).doesNotContainEntry("greg", "kick");
assertThat(actual).doesNotContainEntry(null, null);
assertThat(actual).doesNotContainEntry("kurt", null);
assertThat(actual).doesNotContainEntry(null, "kluever");
} |
@Override
public String render(String text) {
if (StringUtils.isBlank(text)) {
return "";
}
if (regex.isEmpty() || link.isEmpty()) {
Comment comment = new Comment();
comment.escapeAndAdd(text);
return comment.render();
}
try {
Matcher matcher = Pattern.compile(regex).matcher(text);
int start = 0;
Comment comment = new Comment();
while (hasMatch(matcher)) {
comment.escapeAndAdd(text.substring(start, matcher.start()));
comment.add(dynamicLink(matcher));
start = matcher.end();
}
comment.escapeAndAdd(text.substring(start));
return comment.render();
} catch (PatternSyntaxException e) {
LOGGER.warn("Illegal regular expression: {} - {}", regex, e.getMessage());
}
return text;
} | @Test
public void shouldReturnMatchedStringFromFirstGroupIfMultipleGroupsAreDefined() throws Exception {
String link = "http://mingle05/projects/cce/cards/${ID}";
String regex = "(\\d+)-(evo\\d+)";
trackingTool = new DefaultCommentRenderer(link, regex);
String result = trackingTool.render("1020-evo1: checkin message");
assertThat(result, is("<a href=\"" + "http://mingle05/projects/cce/cards/1020\" "
+ "target=\"story_tracker\">1020-evo1</a>: checkin message"));
} |
@Override
public List<Runnable> shutdownNow() {
return delegate.shutdownNow();
} | @Test
public void shutdownNow_delegates_to_executorService() {
underTest.shutdownNow();
inOrder.verify(executorService).shutdownNow();
inOrder.verifyNoMoreInteractions();
} |
public JmxCollector register() {
return register(PrometheusRegistry.defaultRegistry);
} | @Test
public void testSnakeCaseAttrName() throws Exception {
new JmxCollector(
"\n---\nrules:\n- pattern: `^hadoop<service=DataNode, name=DataNodeActivity-ams-hdd001-50010><>replace_block_op_min_time:`\n name: foo\n attrNameSnakeCase: true"
.replace('`', '"'))
.register(prometheusRegistry);
assertEquals(200, getSampleValue("foo", new String[] {}, new String[] {}), .001);
} |
@Override
public void writeBytes(Slice source)
{
writeBytes(source, 0, source.length());
} | @Test
public void testWriteBytesEmptyBytes()
{
OrcOutputBuffer orcOutputBuffer = createOrcOutputBuffer(new DataSize(256, KILOBYTE));
orcOutputBuffer.writeBytes(new byte[0]); // EMPTY_SLICE has null byte buffer
assertCompressedContent(orcOutputBuffer, new byte[0], ImmutableList.of());
orcOutputBuffer = createOrcOutputBuffer(new DataSize(256, KILOBYTE));
orcOutputBuffer.writeBytes(new byte[0], 0, 0);
assertCompressedContent(orcOutputBuffer, new byte[0], ImmutableList.of());
} |
@Override
public void apply(final Record<KOut, Change<ValueAndTimestamp<VOut>>> record) {
@SuppressWarnings("rawtypes") final ProcessorNode prev = context.currentNode();
context.setCurrentNode(myNode);
try {
context.forward(
record
.withValue(
new Change<>(
getValueOrNull(record.value().newValue),
getValueOrNull(record.value().oldValue),
record.value().isLatest))
.withTimestamp(
record.value().newValue != null ? record.value().newValue.timestamp()
: record.timestamp())
);
} finally {
context.setCurrentNode(prev);
}
} | @Test
public void shouldForwardParameterTimestampIfNewValueIsNull() {
@SuppressWarnings("unchecked")
final InternalProcessorContext<String, Change<String>> context = mock(InternalProcessorContext.class);
doNothing().when(context).forward(
new Record<>(
"key",
new Change<>(null, "oldValue"),
73L));
new TimestampedCacheFlushListener<>(context).apply(
new Record<>(
"key",
new Change<>(null, ValueAndTimestamp.make("oldValue", 21L)),
73L));
verify(context, times(2)).setCurrentNode(null);
} |
public void removePipelineMetrics(String pipelineId) {
registry.removeMatching(MetricFilter.startsWith(name(
pipelinesPrefix,
requireNonBlank(pipelineId, "pipelineId is blank")
)));
} | @Test
void removePipelineMetrics() {
final var metricRegistry = new MetricRegistry();
final var registry = PipelineMetricRegistry.create(metricRegistry, "PIPELINE", "RULE");
registry.registerPipelineMeter("pipeline-1", "executed");
registry.registerStageMeter("pipeline-1", 5, "executed");
registry.registerLocalRuleMeter("pipeline-1", 5, "rule-1", "executed");
registry.registerGlobalRuleMeter("rule-1", "executed");
assertThat(metricRegistry.getMeters().keySet()).containsExactlyInAnyOrder(
"RULE.rule-1.executed",
"RULE.rule-1.pipeline-1.5.executed",
"PIPELINE.pipeline-1.executed",
"PIPELINE.pipeline-1.stage.5.executed"
);
registry.removePipelineMetrics("pipeline-1");
assertThat(metricRegistry.getMeters().keySet()).containsExactlyInAnyOrder(
"RULE.rule-1.executed",
"RULE.rule-1.pipeline-1.5.executed"
);
registry.removeRuleMetrics("rule-1");
assertThat(metricRegistry.getMeters().keySet()).isEmpty();
} |
static String[] toConfigModelsPluginDir(String configModelsPluginDirString) {
return multiValueParameterStream(configModelsPluginDirString).toArray(String[]::new);
} | @Test
public void string_arrays_are_split_on_spaces() {
String[] parsed = toConfigModelsPluginDir("/home/vespa/foo /home/vespa/bar ");
assertEquals(2, parsed.length);
} |
public static String sha3(String hexInput) {
byte[] bytes = Numeric.hexStringToByteArray(hexInput);
byte[] result = sha3(bytes);
return Numeric.toHexString(result);
} | @Test
public void testSha3HashHex() {
assertEquals(
Hash.sha3(""),
("0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"));
assertEquals(
Hash.sha3("68656c6c6f20776f726c64"),
("0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad"));
} |
@Override
protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) {
MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get());
if (handshakeReceived) {
MySQLPacket responsePacket = decodeResponsePacket(payload);
if (responsePacket instanceof MySQLOKPacket) {
ctx.channel().pipeline().remove(this);
}
out.add(responsePacket);
} else {
out.add(decodeHandshakePacket(payload));
handshakeReceived = true;
}
} | @Test
void assertDecodeAuthMoreDataPacket() throws ReflectiveOperationException {
MySQLNegotiatePackageDecoder negotiatePackageDecoder = new MySQLNegotiatePackageDecoder();
Plugins.getMemberAccessor().set(MySQLNegotiatePackageDecoder.class.getDeclaredField("handshakeReceived"), negotiatePackageDecoder, true);
List<Object> actual = new LinkedList<>();
negotiatePackageDecoder.decode(channelHandlerContext, authMoreDataPacket(), actual);
assertPacketByType(actual, MySQLAuthMoreDataPacket.class);
} |
@Override
@CacheEvict(value = RedisKeyConstants.ROLE, key = "#updateReqVO.id")
@LogRecord(type = SYSTEM_ROLE_TYPE, subType = SYSTEM_ROLE_UPDATE_SUB_TYPE, bizNo = "{{#updateReqVO.id}}",
success = SYSTEM_ROLE_UPDATE_SUCCESS)
public void updateRole(RoleSaveReqVO updateReqVO) {
// 1.1 校验是否可以更新
RoleDO role = validateRoleForUpdate(updateReqVO.getId());
// 1.2 校验角色的唯一字段是否重复
validateRoleDuplicate(updateReqVO.getName(), updateReqVO.getCode(), updateReqVO.getId());
// 2. 更新到数据库
RoleDO updateObj = BeanUtils.toBean(updateReqVO, RoleDO.class);
roleMapper.updateById(updateObj);
// 3. 记录操作日志上下文
LogRecordContext.putVariable("role", role);
} | @Test
public void testUpdateRole() {
// mock 数据
RoleDO roleDO = randomPojo(RoleDO.class, o -> o.setType(RoleTypeEnum.CUSTOM.getType()));
roleMapper.insert(roleDO);
// 准备参数
Long id = roleDO.getId();
RoleSaveReqVO reqVO = randomPojo(RoleSaveReqVO.class, o -> o.setId(id));
// 调用
roleService.updateRole(reqVO);
// 断言
RoleDO newRoleDO = roleMapper.selectById(id);
assertPojoEquals(reqVO, newRoleDO);
} |
public static URI generateSegmentMetadataURI(String segmentTarPath, String segmentName)
throws URISyntaxException {
URI segmentTarURI = URI.create(segmentTarPath);
URI metadataTarGzFilePath = new URI(
segmentTarURI.getScheme(),
segmentTarURI.getUserInfo(),
segmentTarURI.getHost(),
segmentTarURI.getPort(),
new File(segmentTarURI.getPath()).getParentFile() + File.separator + segmentName
+ Constants.METADATA_TAR_GZ_FILE_EXT,
segmentTarURI.getQuery(),
segmentTarURI.getFragment());
return metadataTarGzFilePath;
} | @Test
public void testGenerateSegmentMetadataURI()
throws URISyntaxException {
assertEquals(
SegmentPushUtils.generateSegmentMetadataURI("/a/b/c/my-segment.tar.gz", "my-segment"),
URI.create("/a/b/c/my-segment.metadata.tar.gz"));
assertEquals(
SegmentPushUtils.generateSegmentMetadataURI("s3://a/b/c/my-segment.tar.gz", "my-segment"),
URI.create("s3://a/b/c/my-segment.metadata.tar.gz"));
assertEquals(
SegmentPushUtils.generateSegmentMetadataURI("hdfs://a/b/c/my-segment.tar.gz", "my-segment"),
URI.create("hdfs://a/b/c/my-segment.metadata.tar.gz"));
} |
public static boolean matchesInternalTopicFormat(final String topicName) {
return topicName.endsWith("-changelog") || topicName.endsWith("-repartition")
|| topicName.endsWith("-subscription-registration-topic")
|| topicName.endsWith("-subscription-response-topic")
|| topicName.matches(".+-KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-\\d+-topic")
|| topicName.matches(".+-KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-\\d+-topic");
} | @Test
public void shouldDetermineInternalTopicBasedOnTopicName1() {
assertTrue(StreamsResetter.matchesInternalTopicFormat("appId-named-subscription-response-topic"));
assertTrue(StreamsResetter.matchesInternalTopicFormat("appId-named-subscription-registration-topic"));
assertTrue(StreamsResetter.matchesInternalTopicFormat("appId-KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-12323232-topic"));
assertTrue(StreamsResetter.matchesInternalTopicFormat("appId-KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-12323232-topic"));
} |
@Override
public void lock() {
try {
lockInterruptibly(-1, null);
} catch (InterruptedException e) {
throw new IllegalStateException();
}
} | @Test
public void testIsLocked() {
RLock lock = redisson.getSpinLock("lock");
Assertions.assertFalse(lock.isLocked());
lock.lock();
Assertions.assertTrue(lock.isLocked());
lock.unlock();
Assertions.assertFalse(lock.isLocked());
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.