focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
public double[][] test(DataFrame data) {
DataFrame x = formula.x(data);
int n = x.nrow();
int ntrees = trees.length;
double[][] prediction = new double[ntrees][n];
for (int j = 0; j < n; j++) {
Tuple xj = x.get(j);
double base = b;
for (int i = 0; i < ntrees; i++) {
base += shrinkage * trees[i].predict(xj);
prediction[i][j] = base;
}
}
return prediction;
} | @Test
public void testAutoMPGLAD() {
test(Loss.lad(), "autoMPG", AutoMPG.formula, AutoMPG.data, 3.0979);
} |
public static List<UpdateRequirement> forUpdateTable(
TableMetadata base, List<MetadataUpdate> metadataUpdates) {
Preconditions.checkArgument(null != base, "Invalid table metadata: null");
Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null");
Builder builder = new Builder(base, false);
builder.require(new UpdateRequirement.AssertTableUUID(base.uuid()));
metadataUpdates.forEach(builder::update);
return builder.build();
} | @Test
public void setAndRemoveStatistics() {
List<UpdateRequirement> requirements =
UpdateRequirements.forUpdateTable(
metadata,
ImmutableList.of(new MetadataUpdate.SetStatistics(0L, mock(StatisticsFile.class))));
requirements.forEach(req -> req.validate(metadata));
assertThat(requirements)
.hasSize(1)
.hasOnlyElementsOfTypes(UpdateRequirement.AssertTableUUID.class);
assertTableUUID(requirements);
requirements =
UpdateRequirements.forUpdateTable(
metadata, ImmutableList.of(new MetadataUpdate.RemoveStatistics(0L)));
requirements.forEach(req -> req.validate(metadata));
assertThat(requirements)
.hasSize(1)
.hasOnlyElementsOfTypes(UpdateRequirement.AssertTableUUID.class);
assertTableUUID(requirements);
} |
public static String readFile(String path) throws IOException {
ClassPathResource classPathResource = new ClassPathResource(path);
if (classPathResource.exists() && classPathResource.isReadable()) {
try (InputStream inputStream = classPathResource.getInputStream()) {
return StreamUtils.copyToString(inputStream, StandardCharsets.UTF_8);
}
}
return "";
} | @Test
public void testReadExistedFile() throws IOException {
String content = ResourceFileUtils.readFile("test.txt");
assertThat(content).isEqualTo("just for test");
} |
@Override
public void submitPopConsumeRequest(
final List<MessageExt> msgs,
final PopProcessQueue processQueue,
final MessageQueue messageQueue) {
final int consumeBatchSize = this.defaultMQPushConsumer.getConsumeMessageBatchMaxSize();
if (msgs.size() <= consumeBatchSize) {
ConsumeRequest consumeRequest = new ConsumeRequest(msgs, processQueue, messageQueue);
try {
this.consumeExecutor.submit(consumeRequest);
} catch (RejectedExecutionException e) {
this.submitConsumeRequestLater(consumeRequest);
}
} else {
for (int total = 0; total < msgs.size(); ) {
List<MessageExt> msgThis = new ArrayList<>(consumeBatchSize);
for (int i = 0; i < consumeBatchSize; i++, total++) {
if (total < msgs.size()) {
msgThis.add(msgs.get(total));
} else {
break;
}
}
ConsumeRequest consumeRequest = new ConsumeRequest(msgThis, processQueue, messageQueue);
try {
this.consumeExecutor.submit(consumeRequest);
} catch (RejectedExecutionException e) {
for (; total < msgs.size(); total++) {
msgThis.add(msgs.get(total));
}
this.submitConsumeRequestLater(consumeRequest);
}
}
}
} | @Test
public void testSubmitPopConsumeRequestWithMultiMsg() throws IllegalAccessException {
List<MessageExt> msgs = Arrays.asList(createMessageExt(), createMessageExt());
PopProcessQueue processQueue = mock(PopProcessQueue.class);
MessageQueue messageQueue = mock(MessageQueue.class);
ThreadPoolExecutor consumeExecutor = mock(ThreadPoolExecutor.class);
FieldUtils.writeDeclaredField(popService, "consumeExecutor", consumeExecutor, true);
when(defaultMQPushConsumer.getConsumeMessageBatchMaxSize()).thenReturn(1);
popService.submitPopConsumeRequest(msgs, processQueue, messageQueue);
verify(consumeExecutor, times(2)).submit(any(Runnable.class));
} |
@Override
public Health check(Set<NodeHealth> nodeHealths) {
Set<NodeHealth> appNodes = nodeHealths.stream()
.filter(s -> s.getDetails().getType() == NodeDetails.Type.APPLICATION)
.collect(Collectors.toSet());
return Arrays.stream(AppNodeClusterHealthSubChecks.values())
.map(s -> s.check(appNodes))
.reduce(Health.GREEN, HealthReducer::merge);
} | @Test
public void status_YELLOW_when_single_GREEN_application_node() {
Set<NodeHealth> nodeHealths = nodeHealths(GREEN).collect(toSet());
Health check = underTest.check(nodeHealths);
assertThat(check)
.forInput(nodeHealths)
.hasStatus(Health.Status.YELLOW)
.andCauses("There should be at least two application nodes");
} |
public static byte[] signMessage(
final RawPrivateTransaction privateTransaction, final Credentials credentials) {
final byte[] encodedTransaction = encode(privateTransaction);
final Sign.SignatureData signatureData =
Sign.signMessage(encodedTransaction, credentials.getEcKeyPair());
return encode(privateTransaction, signatureData);
} | @Test
public void testSignBesuTransaction() {
final String expected =
"0xf8b1808203e8832dc6c094627306090abab3a6e1400e9345bc60c78a8bef578080820fe7a060c70c3f989ef5459021142959f8fc1ad6e5fe8542cf238484c6d6b8c8a6dbcca075727642ce691c4bf5ae945523cdd172d44b451ddfe11ae67c376f1e5c7069eea0035695b4cc4b0941e60551d7a19cf30603db5bfc23e5ac43a56f57f25f75486aa00f200e885ff29e973e2576b6600181d1b0a2b5294e30d9be4a1981ffb33a0b8c8a72657374726963746564";
final RawPrivateTransaction privateTransactionCreation =
RawPrivateTransaction.createTransaction(
BigInteger.ZERO,
BigInteger.valueOf(1000),
BigInteger.valueOf(3000000),
"0x627306090abab3a6e1400e9345bc60c78a8bef57",
"0x",
MOCK_ENCLAVE_KEY,
MOCK_PRIVACY_GROUP_ID,
RESTRICTED);
final long chainId = 2018;
final String privateKey =
"8f2a55949038a9610f50fb23b5883af3b4ecb3c3bb792cbcefbd1542c692be63";
final Credentials credentials = Credentials.create(privateKey);
final String privateRawTransaction =
Numeric.toHexString(
PrivateTransactionEncoder.signMessage(
privateTransactionCreation, chainId, credentials));
assertEquals(expected, privateRawTransaction);
} |
@Override
public void v(String tag, String message, Object... args) { } | @Test
public void infoNotLogged() {
String expectedTag = "TestTag";
logger.v(expectedTag, "Hello %s", "World");
assertNotLogged();
} |
@Override
public int sizeInBytes() {
return LOG_OVERHEAD + buffer.getInt(LENGTH_OFFSET);
} | @Test
public void testSizeInBytes() {
Header[] headers = new Header[] {
new RecordHeader("foo", "value".getBytes()),
new RecordHeader("bar", null)
};
long timestamp = System.currentTimeMillis();
SimpleRecord[] records = new SimpleRecord[] {
new SimpleRecord(timestamp, "key".getBytes(), "value".getBytes()),
new SimpleRecord(timestamp + 30000, null, "value".getBytes()),
new SimpleRecord(timestamp + 60000, "key".getBytes(), null),
new SimpleRecord(timestamp + 60000, "key".getBytes(), "value".getBytes(), headers)
};
int actualSize = MemoryRecords.withRecords(Compression.NONE, records).sizeInBytes();
assertEquals(actualSize, DefaultRecordBatch.sizeInBytes(Arrays.asList(records)));
} |
@Override
public void run() {
log.info("Starting file watcher to watch for changes: " + file);
try {
while (!shutdown && key.isValid()) {
try {
handleNextWatchNotification();
} catch (Exception e) {
log.error("Watch service caught exception, will continue:" + e);
}
}
} finally {
log.info("Stopped watching for TLS cert changes");
try {
watchService.close();
} catch (IOException e) {
log.info("Error closing watch service", e);
}
}
} | @Test
public void shouldDetectFileCreated() throws Exception {
// Given:
watcher = new FileWatcher(filePath, callback);
watcher.start();
// When:
Files.write(filePath, someBytes, StandardOpenOption.CREATE_NEW);
// Then:
verify(callback, new Timeout(TimeUnit.MINUTES.toMillis(1), atLeastOnce())).run();
} |
static AnnotatedClusterState generatedStateFrom(final Params params) {
final ContentCluster cluster = params.cluster;
final ClusterState workingState = ClusterState.emptyState();
final Map<Node, NodeStateReason> nodeStateReasons = new HashMap<>();
for (final NodeInfo nodeInfo : cluster.getNodeInfos()) {
final NodeState nodeState = computeEffectiveNodeState(nodeInfo, params, nodeStateReasons);
workingState.setNodeState(nodeInfo.getNode(), nodeState);
}
takeDownGroupsWithTooLowAvailability(workingState, nodeStateReasons, params);
final Optional<ClusterStateReason> reasonToBeDown = clusterDownReason(workingState, params);
if (reasonToBeDown.isPresent()) {
workingState.setClusterState(State.DOWN);
}
workingState.setDistributionBits(inferDistributionBitCount(cluster, workingState, params));
return new AnnotatedClusterState(workingState, reasonToBeDown, nodeStateReasons);
} | @Test
void distribution_bits_bounded_by_config_parameter() {
final ClusterFixture fixture = ClusterFixture.forFlatCluster(3).bringEntireClusterUp();
final ClusterStateGenerator.Params params = fixture.generatorParams().idealDistributionBits(12);
final AnnotatedClusterState state = ClusterStateGenerator.generatedStateFrom(params);
assertThat(state.toString(), equalTo("bits:12 distributor:3 storage:3"));
} |
@VisibleForTesting
Path getBuildFile() {
if (buildFileUnprocessed == null) {
return contextRoot.resolve("jib.yaml");
}
return buildFileUnprocessed;
} | @Test
public void testParse_buildFileDefaultForContext() {
Build buildCommand =
CommandLine.populateCommand(
new Build(), "--target", "test-image-ref", "--context", "test-context");
assertThat(buildCommand.buildFileUnprocessed).isNull();
assertThat(buildCommand.getBuildFile()).isEqualTo(Paths.get("test-context/jib.yaml"));
assertThat(buildCommand.contextRoot).isEqualTo(Paths.get("test-context"));
} |
public static char toCloseByNumber(int number) {
if (number > 20) {
throw new IllegalArgumentException("Number must be [1-20]");
}
return (char) ('①' + number - 1);
} | @Test
public void toCloseByNumberTest(){
assertEquals('②', CharUtil.toCloseByNumber(2));
assertEquals('⑫', CharUtil.toCloseByNumber(12));
assertEquals('⑳', CharUtil.toCloseByNumber(20));
} |
@Override
public T next() {
try {
return item;
} finally {
item = null;
}
} | @Test
public void when_acceptNotCalled_thenNextReturnsNull() {
// When
Integer item = trav.next();
// Then
assertNull(item);
} |
public B version(String version) {
this.version = version;
return getThis();
} | @Test
void version() {
ServiceBuilder builder = new ServiceBuilder();
builder.version("version");
Assertions.assertEquals("version", builder.build().getVersion());
} |
@Override
public String createRandomNickname() {
return NicknamePrefix.getPrefix() +
NicknameCandidate.getCandidate() +
"_" +
generateUUID();
} | @Test
void 닉네임이_성공적으로_생성된다() {
// when
String createdNickname = nicknameGenerator.createRandomNickname();
// then
assertThat(createdNickname).isNotBlank();
} |
@Override
public Set<ComponentDerivationRecord> getChangeRecords( GetXMLDataMeta meta )
throws MetaverseAnalyzerException {
Set<ComponentDerivationRecord> changes = new HashSet<>();
boolean isInFields = meta.isInFields();
boolean isAFile = meta.getIsAFile();
boolean isAUrl = meta.isReadUrl();
// if we are getting xml from a field, we need to add the "derives" links from the xml to the output fields
if ( isInFields && !isAFile && !isAUrl ) {
GetXMLDataField[] fields = baseStepMeta.getInputFields();
if ( getInputs() != null ) {
Set<StepField> inputFields = getInputs().getFieldNames();
for ( StepField inputField : inputFields ) {
if ( inputField.getFieldName().equals( meta.getXMLField() ) ) {
// link this to all of the outputs that come from the xml
for ( GetXMLDataField field : fields ) {
ComponentDerivationRecord change = new ComponentDerivationRecord( meta.getXMLField(), field.getName() );
changes.add( change );
}
break;
}
}
}
}
return changes;
} | @Test
public void testGetChangeRecords() throws Exception {
when( meta.isInFields() ).thenReturn( true );
when( meta.getIsAFile() ).thenReturn( false );
when( meta.isReadUrl() ).thenReturn( false );
when( meta.getXMLField() ).thenReturn( "xml" );
analyzer.setBaseStepMeta( meta );
GetXMLDataField[] fields = new GetXMLDataField[2];
GetXMLDataField field1 = new GetXMLDataField( "name" );
GetXMLDataField field2 = new GetXMLDataField( "age" );
field1.setXPath( "field1/xpath" );
field2.setElementType( 1 );
field1.setResultType( 1 );
field2.setRepeated( true );
fields[0] = field1;
fields[1] = field2;
when( meta.getInputFields() ).thenReturn( fields );
StepNodes inputs = new StepNodes();
inputs.addNode( "previousStep", "xml", node );
doReturn( inputs ).when( analyzer ).getInputs();
Set<ComponentDerivationRecord> changeRecords = analyzer.getChangeRecords( meta );
assertNotNull( changeRecords );
assertEquals( 2, changeRecords.size() );
} |
public static void notNull(Object object, String message) {
if (object == null) {
throw new IllegalArgumentException(message);
}
} | @Test(expected = IllegalArgumentException.class)
public void assertNotNullAndMessageIsNull() {
Assert.notNull(null);
} |
public static Map<String, String> resolveAttachments(Object invocation, boolean isApache) {
if (invocation == null) {
return Collections.emptyMap();
}
final Map<String, String> attachments = new HashMap<>();
if (isApache) {
attachments.putAll(getAttachmentsFromContext(APACHE_RPC_CONTEXT));
} else {
attachments.putAll(getAttachmentsFromContext(ALIBABA_RPC_CONTEXT));
}
final Optional<Object> fieldValue = ReflectUtils.getFieldValue(invocation, ATTACHMENTS_FIELD);
if (fieldValue.isPresent() && fieldValue.get() instanceof Map) {
attachments.putAll((Map<String, String>) fieldValue.get());
}
return Collections.unmodifiableMap(attachments);
} | @Test
public void testAlibabaInvocation() {
final TestStringInvocation testStringInvocation = new TestStringInvocation(null);
String key = "a";
String value = "c";
com.alibaba.dubbo.rpc.RpcContext.getContext().getAttachments().put(key, value);
final Map<String, String> map = DubboAttachmentsHelper.resolveAttachments(testStringInvocation, false);
Assert.assertEquals(map.get(key), value);
com.alibaba.dubbo.rpc.RpcContext.getContext().getAttachments().clear();
} |
public void process()
throws Exception {
if (_segmentMetadata.getTotalDocs() == 0) {
LOGGER.info("Skip preprocessing empty segment: {}", _segmentMetadata.getName());
return;
}
// Segment processing has to be done with a local directory.
File indexDir = new File(_indexDirURI);
// This fixes the issue of temporary files not getting deleted after creating new inverted indexes.
removeInvertedIndexTempFiles(indexDir);
try (SegmentDirectory.Writer segmentWriter = _segmentDirectory.createWriter()) {
// Update default columns according to the schema.
if (_schema != null) {
DefaultColumnHandler defaultColumnHandler = DefaultColumnHandlerFactory
.getDefaultColumnHandler(indexDir, _segmentMetadata, _indexLoadingConfig, _schema, segmentWriter);
defaultColumnHandler.updateDefaultColumns();
_segmentMetadata = new SegmentMetadataImpl(indexDir);
_segmentDirectory.reloadMetadata();
} else {
LOGGER.warn("Skip creating default columns for segment: {} without schema", _segmentMetadata.getName());
}
// Update single-column indices, like inverted index, json index etc.
List<IndexHandler> indexHandlers = new ArrayList<>();
// We cannot just create all the index handlers in a random order.
// Specifically, ForwardIndexHandler needs to be executed first. This is because it modifies the segment metadata
// while rewriting forward index to create a dictionary. Some other handlers (like the range one) assume that
// metadata was already been modified by ForwardIndexHandler.
IndexHandler forwardHandler = createHandler(StandardIndexes.forward());
indexHandlers.add(forwardHandler);
forwardHandler.updateIndices(segmentWriter);
// Now that ForwardIndexHandler.updateIndices has been updated, we can run all other indexes in any order
_segmentMetadata = new SegmentMetadataImpl(indexDir);
_segmentDirectory.reloadMetadata();
for (IndexType<?, ?, ?> type : IndexService.getInstance().getAllIndexes()) {
if (type != StandardIndexes.forward()) {
IndexHandler handler = createHandler(type);
indexHandlers.add(handler);
handler.updateIndices(segmentWriter);
// Other IndexHandler classes may modify the segment metadata while creating a temporary forward
// index to generate their respective indexes from if the forward index was disabled. This new metadata is
// needed to construct other indexes like RangeIndex.
_segmentMetadata = _segmentDirectory.getSegmentMetadata();
}
}
// Perform post-cleanup operations on the index handlers.
for (IndexHandler handler : indexHandlers) {
handler.postUpdateIndicesCleanup(segmentWriter);
}
// Add min/max value to column metadata according to the prune mode.
ColumnMinMaxValueGeneratorMode columnMinMaxValueGeneratorMode =
_indexLoadingConfig.getColumnMinMaxValueGeneratorMode();
if (columnMinMaxValueGeneratorMode != ColumnMinMaxValueGeneratorMode.NONE) {
ColumnMinMaxValueGenerator columnMinMaxValueGenerator =
new ColumnMinMaxValueGenerator(_segmentMetadata, segmentWriter, columnMinMaxValueGeneratorMode);
columnMinMaxValueGenerator.addColumnMinMaxValue();
// NOTE: This step may modify the segment metadata. When adding new steps after this, un-comment the next line.
// _segmentMetadata = new SegmentMetadataImpl(indexDir);
}
segmentWriter.save();
}
// Startree creation will load the segment again, so we need to close and re-open the segment writer to make sure
// that the other required indices (e.g. forward index) are up-to-date.
try (SegmentDirectory.Writer segmentWriter = _segmentDirectory.createWriter()) {
// Create/modify/remove star-trees if required.
processStarTrees(indexDir);
_segmentDirectory.reloadMetadata();
segmentWriter.save();
}
} | @Test
public void testV1CleanupIndices()
throws Exception {
constructV1Segment(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(),
Collections.emptyList());
SegmentMetadataImpl segmentMetadata = new SegmentMetadataImpl(_indexDir);
assertEquals(segmentMetadata.getVersion(), SegmentVersion.v1);
// Need to create two default columns with Bytes and JSON string for H3 and JSON index.
// Other kinds of indices can all be put on column3 with String values.
String strColumn = "column3";
IndexLoadingConfig indexLoadingConfig = getDefaultIndexLoadingConfig();
indexLoadingConfig.setInvertedIndexColumns(new HashSet<>(Collections.singletonList(strColumn)));
indexLoadingConfig.setRangeIndexColumns(new HashSet<>(Collections.singletonList(strColumn)));
indexLoadingConfig.setTextIndexColumns(new HashSet<>(Collections.singletonList(strColumn)));
indexLoadingConfig.setFSTIndexColumns(new HashSet<>(Collections.singletonList(strColumn)));
indexLoadingConfig.setBloomFilterConfigs(ImmutableMap.of(strColumn, new BloomFilterConfig(0.1, 1024, true)));
// V1 use separate file for each column index.
File iiFile = new File(_indexDir, strColumn + V1Constants.Indexes.BITMAP_INVERTED_INDEX_FILE_EXTENSION);
File rgFile = new File(_indexDir, strColumn + V1Constants.Indexes.BITMAP_RANGE_INDEX_FILE_EXTENSION);
File txtFile = new File(_indexDir, strColumn + V1Constants.Indexes.LUCENE_V99_TEXT_INDEX_FILE_EXTENSION);
File fstFile = new File(_indexDir, strColumn + V1Constants.Indexes.LUCENE_V99_FST_INDEX_FILE_EXTENSION);
File bfFile = new File(_indexDir, strColumn + V1Constants.Indexes.BLOOM_FILTER_FILE_EXTENSION);
assertFalse(iiFile.exists());
assertFalse(rgFile.exists());
assertFalse(txtFile.exists());
assertFalse(fstFile.exists());
assertFalse(bfFile.exists());
// Create all kinds of indices.
try (SegmentDirectory segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader()
.load(_indexDir.toURI(),
new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build());
SegmentPreProcessor processor = new SegmentPreProcessor(segmentDirectory, indexLoadingConfig, null)) {
processor.process();
}
assertTrue(iiFile.exists());
assertTrue(rgFile.exists());
assertTrue(txtFile.exists());
assertTrue(fstFile.exists());
assertTrue(bfFile.exists());
// Remove all kinds of indices.
try (SegmentDirectory segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader()
.load(_indexDir.toURI(),
new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build());
SegmentPreProcessor processor = new SegmentPreProcessor(segmentDirectory, getDefaultIndexLoadingConfig(),
null)) {
processor.process();
}
assertFalse(iiFile.exists());
assertFalse(rgFile.exists());
assertFalse(txtFile.exists());
assertFalse(fstFile.exists());
assertFalse(bfFile.exists());
} |
@Override
public Object convert(String value) {
if (isNullOrEmpty(value)) {
return value;
}
if (value.contains("=")) {
final Map<String, String> fields = new HashMap<>();
Matcher m = PATTERN.matcher(value);
while (m.find()) {
if (m.groupCount() != 2) {
continue;
}
fields.put(removeQuotes(m.group(1)), removeQuotes(m.group(2)));
}
return fields;
} else {
return Collections.emptyMap();
}
} | @Test
public void testFilterRetainsNestedDoubleQuotesInSingleQuotedValues() {
TokenizerConverter f = new TokenizerConverter(new HashMap<String, Object>());
@SuppressWarnings("unchecked")
Map<String, String> result = (Map<String, String>) f.convert("otters in k1= v1 k2=' \"v2\"' k3=' \"v3\" ' more otters");
assertThat(result)
.hasSize(3)
.containsEntry("k1", "v1")
.containsEntry("k2", " \"v2\"")
.containsEntry("k3", " \"v3\" ");
} |
public MediaType detect(InputStream input, Metadata metadata) {
// Look for a resource name in the input metadata
String name = metadata.get(TikaCoreProperties.RESOURCE_NAME_KEY);
if (name != null) {
// If the name is a URL, skip the trailing query
int question = name.indexOf('?');
if (question != -1) {
name = name.substring(0, question);
}
// If the name is a URL or a path, skip all but the last component
int slash = name.lastIndexOf('/');
if (slash != -1) {
name = name.substring(slash + 1);
}
int backslash = name.lastIndexOf('\\');
if (backslash != -1) {
name = name.substring(backslash + 1);
}
// Strip any fragments from the end, but only ones after the extension
int hash = name.lastIndexOf('#');
int dot = name.lastIndexOf('.');
if (hash != -1) {
if (dot == -1 || hash > dot) {
name = name.substring(0, hash);
}
}
// Decode any potential URL encoding
int percent = name.indexOf('%');
if (percent != -1) {
try {
name = URLDecoder.decode(name, UTF_8.name());
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException("UTF-8 not supported", e);
}
}
// Skip any leading or trailing whitespace
name = name.trim();
if (name.length() > 0) {
// Match the name against the registered patterns
for (Map.Entry<Pattern, MediaType> entry : patterns.entrySet()) {
if (entry.getKey().matcher(name).matches()) {
return entry.getValue();
}
}
}
}
return MediaType.OCTET_STREAM;
} | @Test
public void testDetect() {
assertDetect(MediaType.TEXT_PLAIN, "text.txt");
assertDetect(MediaType.TEXT_PLAIN, "text.txt "); // trailing space
assertDetect(MediaType.TEXT_PLAIN, "text.txt\n"); // trailing newline
assertDetect(MediaType.TEXT_PLAIN, "text.txt?a=b"); // URL query
assertDetect(MediaType.TEXT_PLAIN, "text.txt#abc"); // URL fragment
assertDetect(MediaType.TEXT_PLAIN, "text%2Etxt"); // URL encoded
assertDetect(MediaType.TEXT_PLAIN, "text.TXT"); // case insensitive
assertDetect(MediaType.OCTET_STREAM, "text.txt.gz");
assertDetect(MediaType.TEXT_PLAIN, "README");
assertDetect(MediaType.TEXT_PLAIN, " README "); // space around
assertDetect(MediaType.TEXT_PLAIN, "\tREADME\n"); // other whitespace
assertDetect(MediaType.TEXT_PLAIN, "/a/README"); // leading path
assertDetect(MediaType.TEXT_PLAIN, "\\b\\README"); // windows path
assertDetect(MediaType.OCTET_STREAM, "ReadMe"); // case sensitive
assertDetect(MediaType.OCTET_STREAM, "README.NOW");
// TIKA-1928 # in the filename
assertDetect(MediaType.TEXT_PLAIN, "text.txt");
assertDetect(MediaType.TEXT_PLAIN, "text#.txt"); // # before extension
assertDetect(MediaType.TEXT_PLAIN, "text#123.txt");// # before extension
assertDetect(MediaType.TEXT_PLAIN, "text.txt#pdf");// # after extension
// TIKA-3783 # before the final .
assertDetect(MediaType.TEXT_PLAIN, "ABC#192.168.0.1#2.txt");
// Check # as URL fragment too
assertDetect(MediaType.TEXT_PLAIN, "http://foo/test.txt?1=2#pdf");
assertDetect(MediaType.TEXT_PLAIN, "http://foo/test.txt#pdf");
// tough one
assertDetect(MediaType.TEXT_PLAIN, " See http://www.example.com:1234/README.txt?a=b#c \n");
assertDetect(MediaType.TEXT_PLAIN, "See README.txt"); // even this!
assertDetect(MediaType.OCTET_STREAM, "See README"); // but not this
assertDetect(MediaType.application("envi.hdr"), "ang20150420t182050_corr_v1e_img.hdr");
// test also the zero input cases
assertDetect(MediaType.OCTET_STREAM, "");
assertDetect(MediaType.OCTET_STREAM, null);
try {
assertEquals(MediaType.OCTET_STREAM, detector.detect(null, new Metadata()));
} catch (IOException e) {
fail("NameDetector should never throw an IOException");
}
} |
protected void showModels(EpoxyModel<?>... models) {
showModels(Arrays.asList(models));
} | @Test
public void testShowModels() {
TestModel testModel1 = new TestModel();
testModel1.hide();
TestModel testModel2 = new TestModel();
testModel2.hide();
testAdapter.addModels(testModel1, testModel2);
testAdapter.showModels(testAdapter.models);
verify(observer).onItemRangeChanged(0, 1, null);
verify(observer).onItemRangeChanged(1, 1, null);
assertTrue(testModel1.isShown());
assertTrue(testModel2.isShown());
checkDifferState();
} |
@Bean
public PluginDataHandler websocketPluginDataHandler() {
return new WebSocketPluginDataHandler();
} | @Test
public void testWebSocketPluginDataHandler() {
applicationContextRunner.run(context -> {
PluginDataHandler handler = context.getBean("websocketPluginDataHandler", PluginDataHandler.class);
assertNotNull(handler);
}
);
} |
public Statement buildStatement(final ParserRuleContext parseTree) {
return build(Optional.of(getSources(parseTree)), parseTree);
} | @Test
public void shouldHandleAliasedDataSources() {
// Given:
final SingleStatementContext stmt = givenQuery("SELECT * FROM TEST1 t;");
// When:
final Query result = (Query) builder.buildStatement(stmt);
// Then:
assertThat(result.getFrom(), is(new AliasedRelation(TEST1, SourceName.of("T"))));
} |
public static String extractFromURIParams(String paramsRule, String uri) {
Multimap<String, String> criteriaMap = TreeMultimap.create();
if (uri.contains("?") && uri.contains("=")) {
String parameters = uri.substring(uri.indexOf("?") + 1);
for (String parameter : parameters.split("&")) {
String[] pair = parameter.split("=");
if (pair.length > 1) {
String key = URLDecoder.decode(pair[0], StandardCharsets.UTF_8);
String value = URLDecoder.decode(pair[1], StandardCharsets.UTF_8);
criteriaMap.put(key, value);
}
}
// Just appends sorted entries, separating them with ?.
StringBuilder result = new StringBuilder();
for (Map.Entry<String, String> criteria : criteriaMap.entries()) {
if (paramsRule.contains(criteria.getKey())) {
result.append("?").append(criteria.getKey()).append("=").append(criteria.getValue());
}
}
return result.toString();
}
return "";
} | @Test
void testExtractFromURIParams() {
// Check with parameters in no particular order.
String requestPath = "/v2/pet/findByDate/2017/01/04?user_key=998bac0775b1d5f588e0a6ca7c11b852&status=available";
// Only 1 parameter should be taken into account according to rules.
String dispatchCriteria = DispatchCriteriaHelper.extractFromURIParams("user_key", requestPath);
assertEquals("?user_key=998bac0775b1d5f588e0a6ca7c11b852", dispatchCriteria);
// 2 parameters should be considered and sorted according to rules.
dispatchCriteria = DispatchCriteriaHelper.extractFromURIParams("user_key && status", requestPath);
assertEquals("?status=available?user_key=998bac0775b1d5f588e0a6ca7c11b852", dispatchCriteria);
} |
static Schema getSchema(Class<? extends Message> clazz) {
return getSchema(ProtobufUtil.getDescriptorForClass(clazz));
} | @Test
public void testOneOfSchema() {
assertEquals(
TestProtoSchemas.ONEOF_SCHEMA,
ProtoSchemaTranslator.getSchema(Proto3SchemaMessages.OneOf.class));
} |
@PublicAPI(usage = ACCESS)
public static Set<Location> ofPackage(String pkg) {
ImmutableSet.Builder<Location> result = ImmutableSet.builder();
for (Location location : getLocationsOf(asResourceName(pkg))) {
result.add(location);
}
return result.build();
} | @Test
@SuppressWarnings("EmptyTryBlock")
public void locations_of_packages_within_JAR_URIs_that_do_not_contain_package_folder() throws Exception {
independentClasspathRule.configureClasspath();
Set<Location> locations = Locations.ofPackage(independentClasspathRule.getIndependentTopLevelPackage());
ClassFileSource source = getOnlyElement(locations).asClassFileSource(new ImportOptions());
for (ClassFileLocation classFileLocation : source) {
try (InputStream ignored = classFileLocation.openStream()) {
// we only care that we can open the stream
}
}
assertThat(source)
.as("URIs in " + independentClasspathRule.getIndependentTopLevelPackage())
.hasSize(independentClasspathRule.getNamesOfClasses().size());
} |
@Override
public void run() {
try { // make sure we call afterRun() even on crashes
// and operate countdown latches, else we may hang the parallel runner
if (steps == null) {
beforeRun();
}
if (skipped) {
return;
}
int count = steps.size();
int index = 0;
while ((index = nextStepIndex()) < count) {
currentStep = steps.get(index);
execute(currentStep);
if (currentStepResult != null) { // can be null if debug step-back or hook skip
result.addStepResult(currentStepResult);
}
}
} catch (Exception e) {
if (currentStepResult != null) {
result.addStepResult(currentStepResult);
}
logError("scenario [run] failed\n" + StringUtils.throwableToString(e));
currentStepResult = result.addFakeStepResult("scenario [run] failed", e);
} finally {
if (!skipped) {
afterRun();
if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) {
featureRuntime.suite.abort();
}
}
if (caller.isNone()) {
logAppender.close(); // reclaim memory
}
}
} | @Test
void testCallJsFunctionSharedJson() {
run(
"def myFn = function(x){ return { myVar: x.foo } }",
"call myFn { foo: 'bar' }"
);
assertEquals(get("myVar"), "bar");
} |
private synchronized boolean validateClientAcknowledgement(long h) {
if (h < 0) {
throw new IllegalArgumentException("Argument 'h' cannot be negative, but was: " + h);
}
if (h > MASK) {
throw new IllegalArgumentException("Argument 'h' cannot be larger than 2^32 -1, but was: " + h);
}
final long oldH = clientProcessedStanzas.get();
final Long lastUnackedX = unacknowledgedServerStanzas.isEmpty() ? null : unacknowledgedServerStanzas.getLast().x;
return validateClientAcknowledgement(h, oldH, lastUnackedX);
} | @Test
public void testValidateClientAcknowledgement_clientAcksSentStanza() throws Exception
{
// Setup test fixture.
final long h = 1;
final long oldH = 0;
final Long lastUnackedX = 1L;
// Execute system under test.
final boolean result = StreamManager.validateClientAcknowledgement(h, oldH, lastUnackedX);
// Verify results.
assertTrue(result);
} |
@Deprecated
public static Duration toDuration(Time time) {
return Duration.of(time.getSize(), toChronoUnit(time.getUnit()));
} | @Test
void testToDuration() {
final Time time = Time.of(1337, TimeUnit.MICROSECONDS);
final Duration duration = TimeUtils.toDuration(time);
assertThat(duration.toNanos()).isEqualTo(time.getUnit().toNanos(time.getSize()));
} |
public static String getActualIndexName(final String logicIndexName, final String actualTableName) {
return Strings.isNullOrEmpty(actualTableName) ? logicIndexName : logicIndexName + UNDERLINE + actualTableName;
} | @Test
void assertGetActualIndexNameWithoutActualTableName() {
assertThat(IndexMetaDataUtils.getActualIndexName("order_index", null), is("order_index"));
} |
@Override
public String getColumnName(int columnIndex) {
return _columnsArray.get(columnIndex).asText();
} | @Test
public void testGetColumnName() {
// Run the test
final String result = _selectionResultSetUnderTest.getColumnName(0);
// Verify the results
assertEquals("column1", result);
} |
@Override
public void checkCanDropSchema(Identity identity, AccessControlContext context, CatalogSchemaName schema)
{
if (!isSchemaOwner(identity, schema)) {
denyDropSchema(schema.toString());
}
} | @Test
public void testSchemaRulesForCheckCanDropSchema() throws IOException
{
TransactionManager transactionManager = createTestTransactionManager();
AccessControlManager accessControlManager = newAccessControlManager(transactionManager, "file-based-system-access-schema.json");
transaction(transactionManager, accessControlManager)
.execute(transactionId -> {
accessControlManager.checkCanDropSchema(transactionId, bob, context, new CatalogSchemaName("alice-catalog", "bob"));
accessControlManager.checkCanDropSchema(transactionId, bob, context, new CatalogSchemaName("bob-catalog", "bob"));
accessControlManager.checkCanDropSchema(transactionId, admin, context, new CatalogSchemaName("some-catalog", "bob"));
accessControlManager.checkCanDropSchema(transactionId, admin, context, new CatalogSchemaName("some-catalog", "alice"));
accessControlManager.checkCanDropSchema(transactionId, admin, context, new CatalogSchemaName("some-catalog", "some-schema"));
});
assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> {
accessControlManager.checkCanDropSchema(transactionId, bob, context, new CatalogSchemaName("alice-catalog", "alice"));
}));
assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> {
accessControlManager.checkCanDropSchema(transactionId, bob, context, new CatalogSchemaName("bob-catalog", "alice"));
}));
assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> {
accessControlManager.checkCanDropSchema(transactionId, bob, context, new CatalogSchemaName("secret-catalog", "secret"));
}));
assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> {
accessControlManager.checkCanDropSchema(transactionId, alice, context, new CatalogSchemaName("secret-catalog", "secret"));
}));
assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> {
accessControlManager.checkCanDropSchema(transactionId, admin, context, new CatalogSchemaName("secret-catalog", "secret"));
}));
assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> {
accessControlManager.checkCanDropSchema(transactionId, alice, context, new CatalogSchemaName("alice-catalog", "alice"));
}));
} |
@VisibleForTesting
protected void copyFromHost(MapHost host) throws IOException {
// reset retryStartTime for a new host
retryStartTime = 0;
// Get completed maps on 'host'
List<TaskAttemptID> maps = scheduler.getMapsForHost(host);
// Sanity check to catch hosts with only 'OBSOLETE' maps,
// especially at the tail of large jobs
if (maps.size() == 0) {
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Fetcher " + id + " going to fetch from " + host + " for: " + maps);
}
// List of maps to be fetched yet
Set<TaskAttemptID> remaining = new HashSet<TaskAttemptID>(maps);
// Construct the url and connect
URL url = getMapOutputURL(host, maps);
DataInputStream input = null;
try {
input = openShuffleUrl(host, remaining, url);
if (input == null) {
return;
}
// Loop through available map-outputs and fetch them
// On any error, faildTasks is not null and we exit
// after putting back the remaining maps to the
// yet_to_be_fetched list and marking the failed tasks.
TaskAttemptID[] failedTasks = null;
while (!remaining.isEmpty() && failedTasks == null) {
try {
failedTasks = copyMapOutput(host, input, remaining, fetchRetryEnabled);
} catch (IOException e) {
IOUtils.cleanupWithLogger(LOG, input);
//
// Setup connection again if disconnected by NM
connection.disconnect();
// Get map output from remaining tasks only.
url = getMapOutputURL(host, remaining);
input = openShuffleUrl(host, remaining, url);
if (input == null) {
return;
}
}
}
if(failedTasks != null && failedTasks.length > 0) {
LOG.warn("copyMapOutput failed for tasks "+Arrays.toString(failedTasks));
scheduler.hostFailed(host.getHostName());
for(TaskAttemptID left: failedTasks) {
scheduler.copyFailed(left, host, true, false);
}
}
// Sanity check
if (failedTasks == null && !remaining.isEmpty()) {
throw new IOException("server didn't return all expected map outputs: "
+ remaining.size() + " left.");
}
input.close();
input = null;
} finally {
if (input != null) {
IOUtils.cleanupWithLogger(LOG, input);
input = null;
}
for (TaskAttemptID left : remaining) {
scheduler.putBackKnownMapOutput(host, left);
}
}
} | @SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostOnAnyException() throws Exception {
InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(immo);
doThrow(new ArrayIndexOutOfBoundsException()).when(immo)
.shuffle(any(MapHost.class), any(InputStream.class), anyLong(),
anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));
underTest.copyFromHost(host);
verify(connection)
.addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH,
encHash);
verify(ss, times(1)).copyFailed(map1ID, host, true, false);
} |
public void processRow(GenericRow decodedRow, Result reusedResult)
throws Exception {
reusedResult.reset();
if (_complexTypeTransformer != null) {
// TODO: consolidate complex type transformer into composite type transformer
decodedRow = _complexTypeTransformer.transform(decodedRow);
}
Collection<GenericRow> rows = (Collection<GenericRow>) decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
if (rows != null) {
for (GenericRow row : rows) {
processPlainRow(row, reusedResult);
}
} else {
processPlainRow(decodedRow, reusedResult);
}
} | @Test
public void testSingleRow()
throws Exception {
TableConfig config = createTestTableConfig();
Schema schema = Fixtures.createSchema();
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow simpleRow = Fixtures.createSingleRow(9527);
TransformPipeline.Result result = new TransformPipeline.Result();
pipeline.processRow(simpleRow, result);
Assert.assertNotNull(result);
Assert.assertEquals(result.getTransformedRows().size(), 1);
Assert.assertEquals(result.getSkippedRowCount(), 0);
Assert.assertEquals(result.getTransformedRows().get(0), simpleRow);
} |
@Override
public final synchronized V get() throws InterruptedException, ExecutionException {
while (result == null && exception == null && !cancelled) {
futureWait();
}
return getOrThrowExecutionException();
} | @Test(expected = TimeoutException.class)
public void simpleSmackFutureTimeoutTest() throws InterruptedException, ExecutionException, TimeoutException {
InternalProcessStanzaSmackFuture<Boolean, Exception> future = new SimpleInternalProcessStanzaSmackFuture<Boolean, Exception>() {
@Override
protected void handleStanza(Stanza stanza) {
}
};
future.get(5, TimeUnit.SECONDS);
} |
@Override
public Object plugin(final Object target) {
return Plugin.wrap(target, this);
} | @Test
public void pluginTest() {
final PostgreSQLQueryInterceptor postgreSQLQueryInterceptor = new PostgreSQLQueryInterceptor();
Assertions.assertDoesNotThrow(() -> postgreSQLQueryInterceptor.plugin(new Object()));
} |
public static boolean isUri(String potentialUri) {
if (StringUtils.isBlank(potentialUri)) {
return false;
}
try {
URI uri = new URI(potentialUri);
return uri.getScheme() != null && uri.getHost() != null;
} catch (URISyntaxException e) {
return false;
}
} | @Test public void
returns_false_when_uri_doesnt_contain_scheme() {
assertThat(UriValidator.isUri("127.0.0.1"), is(false));
} |
public HollowOrdinalIterator findKeysWithPrefix(String prefix) {
TST current;
HollowOrdinalIterator it;
do {
current = prefixIndexVolatile;
it = current.findKeysWithPrefix(prefix);
} while (current != this.prefixIndexVolatile);
return it;
} | @Test
public void testSetReference() throws Exception {
MovieSetReference movieSetReference = new MovieSetReference(1, 1999, "The Matrix", new HashSet<String>(Arrays.asList("Keanu Reeves", "Laurence Fishburne", "Carrie-Anne Moss")));
objectMapper.add(movieSetReference);
StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine);
HollowPrefixIndex prefixIndex = new HollowPrefixIndex(readStateEngine, "MovieSetReference", "actors.element");
Set<Integer> ordinals = toSet(prefixIndex.findKeysWithPrefix("kea"));
Assert.assertTrue(ordinals.size() == 1);
} |
@Override
public Properties loadProperties() {
final Properties answer = new Properties();
final Config config = ConfigProvider.getConfig();
for (String name : config.getPropertyNames()) {
try {
if (isValidForActiveProfiles(name)) {
answer.put(name, config.getValue(name, String.class));
}
} catch (NoSuchElementException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Failed to resolve property {} due to {}", name, e.getMessage());
}
}
}
return answer;
} | @Test
public void testLoadAll() {
PropertiesComponent pc = context.getPropertiesComponent();
Properties properties = pc.loadProperties();
Assertions.assertThat(properties.get("start")).isEqualTo("direct:start");
Assertions.assertThat(properties.get("hi")).isEqualTo("World");
Assertions.assertThat(properties.get("my-mock")).isEqualTo("result");
Assertions.assertThat(properties.get("empty")).isNull();
Assertions.assertThat(properties.get("test-non-active-profile")).isNull();
Assertions.assertThat(properties.get("test-profile-a")).isEqualTo("Profile A");
Assertions.assertThat(properties.get("test-profile-b")).isEqualTo("Profile B");
} |
@SuppressWarnings("unchecked")
@Udf
public <T> List<T> union(
@UdfParameter(description = "First array of values") final List<T> left,
@UdfParameter(description = "Second array of values") final List<T> right) {
if (left == null || right == null) {
return null;
}
final Set<T> combined = Sets.newLinkedHashSet(left);
combined.addAll(right);
return (List<T>) Arrays.asList(combined.toArray());
} | @Test
public void shouldReturnNullForNullRightInput() {
final List<String> input1 = Arrays.asList("foo");
final List<String> result = udf.union(null, input1);
assertThat(result, is(nullValue()));
} |
public final void setData(T data) {
currentData = data;
allowModelBuildRequests = true;
requestModelBuild();
allowModelBuildRequests = false;
} | @Test
public void setData() {
TestTypedController controller = new TestTypedController();
controller.setData("data");
controller.setData("data");
controller.cancelPendingModelBuild();
controller.setData("data");
controller.setData("data");
assertEquals(4, controller.numTimesBuiltModels);
} |
public static BytesInput from(InputStream in, int bytes) {
return new StreamBytesInput(in, bytes);
} | @Test
public void testFromByteArray() throws IOException {
byte[] data = new byte[1000];
RANDOM.nextBytes(data);
byte[] input = new byte[data.length + 20];
RANDOM.nextBytes(input);
System.arraycopy(data, 0, input, 10, data.length);
Supplier<BytesInput> factory = () -> BytesInput.from(input, 10, data.length);
validate(data, factory);
} |
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) {
String fieldToNullName = field;
if ( isUseExtId ) {
// verify if the field has correct syntax
if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field,
fieldToNullName ) );
}
return fieldToNullName;
}
String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 );
// working with custom objects and relationship
// cut off _r and then add _c in the end of the name
if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) {
fieldToNullName =
lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() )
+ CUSTOM_OBJECT_SUFFIX;
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) );
}
return fieldToNullName;
}
fieldToNullName = lookupField + "Id";
}
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) );
}
return fieldToNullName;
} | @Test
public void testIncorrectExternalKeySyntaxWarnIsNotLoggedInNotDebugMode() {
when( logMock.isDebug() ).thenReturn( false );
inputFieldName = "AccountId";
verify( logMock, never() ).logDebug( anyString() );
SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true );
verify( logMock, never() ).logDebug(
"The field has incorrect external key syntax: AccountId. Syntax for external key should be : object:externalId/lookupField. Trying to use fieldToNullName=AccountId." );
} |
@Override
protected boolean isStepCompleted(@NonNull Context context) {
return SetupSupport.isThisKeyboardEnabled(context);
} | @Test
public void testClickToEnableReachesSettings() {
WizardPageEnableKeyboardFragment fragment = startFragment();
Assert.assertFalse(fragment.isStepCompleted(getApplicationContext()));
final View linkToClick = fragment.getView().findViewById(R.id.go_to_language_settings_action);
View.OnClickListener linkClickHandler = Shadows.shadowOf(linkToClick).getOnClickListener();
Assert.assertNotNull(linkClickHandler);
linkClickHandler.onClick(linkToClick);
final Intent nextStartedActivity =
Shadows.shadowOf((Application) ApplicationProvider.getApplicationContext())
.getNextStartedActivity();
Assert.assertEquals(Settings.ACTION_INPUT_METHOD_SETTINGS, nextStartedActivity.getAction());
} |
@Override
@SuppressWarnings("unchecked")
public <T> T get(final PluginConfigSpec<T> configSpec) {
if (rawSettings.containsKey(configSpec.name())) {
Object o = rawSettings.get(configSpec.name());
if (configSpec.type().isAssignableFrom(o.getClass())) {
return (T) o;
} else if (configSpec.type() == Double.class && o.getClass() == Long.class) {
return configSpec.type().cast(((Long)o).doubleValue());
} else if (configSpec.type() == Boolean.class && o instanceof String) {
return configSpec.type().cast(Boolean.parseBoolean((String) o));
} else if (configSpec.type() == Codec.class && o instanceof String && pluginFactory != null) {
Codec codec = pluginFactory.buildDefaultCodec((String) o);
return configSpec.type().cast(codec);
} else if (configSpec.type() == Codec.class && o instanceof RubyObject && RubyCodecDelegator.isRubyCodecSubclass((RubyObject) o)) {
Codec codec = pluginFactory.buildRubyCodecWrapper((RubyObject) o);
return configSpec.type().cast(codec);
} else if (configSpec.type() == URI.class && o instanceof String) {
try {
URI uri = new URI((String) o);
return configSpec.type().cast(uri);
} catch (URISyntaxException ex) {
throw new IllegalStateException(
String.format("Invalid URI specified for '%s'", configSpec.name()));
}
} else if (configSpec.type() == Password.class && o instanceof String) {
Password p = new Password((String) o);
return configSpec.type().cast(p);
} else {
throw new IllegalStateException(
String.format("Setting value for '%s' of type '%s' incompatible with defined type of '%s'",
configSpec.name(), o.getClass(), configSpec.type()));
}
} else if (configSpec.type() == Codec.class && configSpec.getRawDefaultValue() != null && pluginFactory != null) {
Codec codec = pluginFactory.buildDefaultCodec(configSpec.getRawDefaultValue());
return configSpec.type().cast(codec);
} else if (configSpec.type() == URI.class && configSpec.getRawDefaultValue() != null) {
try {
URI uri = new URI(configSpec.getRawDefaultValue());
return configSpec.type().cast(uri);
} catch (URISyntaxException ex) {
throw new IllegalStateException(
String.format("Invalid default URI specified for '%s'", configSpec.name()));
}
} else if (configSpec.type() == Password.class && configSpec.getRawDefaultValue() != null) {
Password p = new Password(configSpec.getRawDefaultValue());
return configSpec.type().cast(p);
} else {
return configSpec.defaultValue();
}
} | @Test
public void testDefaultValues() {
Configuration unsetConfig = new ConfigurationImpl(new HashMap<>());
String defaultStringValue = "defaultStringValue";
long defaultLongValue = 43L;
boolean defaultBooleanValue = false;
PluginConfigSpec<String> stringConfig = PluginConfigSpec.stringSetting(stringKey, defaultStringValue, false, false);
PluginConfigSpec<Long> numberConfig = PluginConfigSpec.numSetting(numberKey, defaultLongValue, false, false);
PluginConfigSpec<Boolean> booleanConfig = PluginConfigSpec.booleanSetting(booleanKey, defaultBooleanValue, false, false);
Assert.assertEquals(defaultStringValue, unsetConfig.get(stringConfig));
Assert.assertEquals(defaultLongValue, (long) unsetConfig.get(numberConfig));
Assert.assertEquals(defaultBooleanValue, unsetConfig.get(booleanConfig));
Configuration config = getTestConfiguration();
Assert.assertNotEquals(defaultStringValue, config.get(stringConfig));
Assert.assertNotEquals(defaultLongValue, (long) config.get(numberConfig));
Assert.assertNotEquals(defaultBooleanValue, config.get(booleanConfig));
} |
@Override
protected CompletableFuture<LogListInfo> handleRequest(
@Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway)
throws RestHandlerException {
if (logDir == null) {
return CompletableFuture.completedFuture(new LogListInfo(Collections.emptyList()));
}
final File[] logFiles = logDir.listFiles();
if (logFiles == null) {
return FutureUtils.completedExceptionally(
new IOException("Could not list files in " + logDir));
}
final List<LogInfo> logs =
Arrays.stream(logFiles)
.filter(File::isFile)
.map(
logFile ->
new LogInfo(
logFile.getName(),
logFile.length(),
logFile.lastModified()))
.collect(Collectors.toList());
return CompletableFuture.completedFuture(new LogListInfo(logs));
} | @Test
void testGetJobManagerLogsListWhenLogDirIsNull() throws Exception {
JobManagerLogListHandler jobManagerLogListHandler = createHandler(null);
LogListInfo logListInfo =
jobManagerLogListHandler.handleRequest(testRequest, dispatcherGateway).get();
assertThat(logListInfo.getLogInfos()).isEmpty();
} |
static String headerLine(CSVFormat csvFormat) {
return String.join(String.valueOf(csvFormat.getDelimiter()), csvFormat.getHeader());
} | @Test
public void givenHeaderComment_isNoop() {
CSVFormat csvFormat = csvFormat().withHeaderComments("abc", "def", "xyz");
PCollection<String> input =
pipeline.apply(Create.of(headerLine(csvFormat), "a,1,1.1", "b,2,2.2", "c,3,3.3"));
CsvIOStringToCsvRecord underTest = new CsvIOStringToCsvRecord(csvFormat);
CsvIOParseResult<List<String>> result = input.apply(underTest);
PAssert.that(result.getOutput())
.containsInAnyOrder(
Arrays.asList(
Arrays.asList("a", "1", "1.1"),
Arrays.asList("b", "2", "2.2"),
Arrays.asList("c", "3", "3.3")));
PAssert.that(result.getErrors()).empty();
pipeline.run();
} |
public static long parseBytes(String text) throws IllegalArgumentException {
Objects.requireNonNull(text, "text cannot be null");
final String trimmed = text.trim();
if (trimmed.isEmpty()) {
throw new IllegalArgumentException("argument is an empty- or whitespace-only string");
}
final int len = trimmed.length();
int pos = 0;
char current;
while (pos < len && (current = trimmed.charAt(pos)) >= '0' && current <= '9') {
pos++;
}
final String number = trimmed.substring(0, pos);
final String unit = trimmed.substring(pos).trim().toLowerCase(Locale.US);
if (number.isEmpty()) {
throw new NumberFormatException("text does not start with a number");
}
final long value;
try {
value = Long.parseLong(number); // this throws a NumberFormatException on overflow
} catch (NumberFormatException e) {
throw new IllegalArgumentException(
"The value '"
+ number
+ "' cannot be re represented as 64bit number (numeric overflow).");
}
final long multiplier = parseUnit(unit).map(MemoryUnit::getMultiplier).orElse(1L);
final long result = value * multiplier;
// check for overflow
if (result / multiplier != value) {
throw new IllegalArgumentException(
"The value '"
+ text
+ "' cannot be re represented as 64bit number of bytes (numeric overflow).");
}
return result;
} | @Test
void testParseNumberTimeUnitOverflow() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> MemorySize.parseBytes("100000000000000 tb"));
} |
@Override
public KsMaterializedQueryResult<WindowedRow> get(
final GenericKey key,
final int partition,
final Range<Instant> windowStart,
final Range<Instant> windowEnd,
final Optional<Position> position
) {
try {
final WindowRangeQuery<GenericKey, GenericRow> query = WindowRangeQuery.withKey(key);
StateQueryRequest<KeyValueIterator<Windowed<GenericKey>, GenericRow>> request =
inStore(stateStore.getStateStoreName()).withQuery(query);
if (position.isPresent()) {
request = request.withPositionBound(PositionBound.at(position.get()));
}
final StateQueryResult<KeyValueIterator<Windowed<GenericKey>, GenericRow>> result =
stateStore.getKafkaStreams().query(request);
final QueryResult<KeyValueIterator<Windowed<GenericKey>, GenericRow>> queryResult =
result.getPartitionResults().get(partition);
if (queryResult.isFailure()) {
throw failedQueryException(queryResult);
}
try (KeyValueIterator<Windowed<GenericKey>, GenericRow> it =
queryResult.getResult()) {
final Builder<WindowedRow> builder = ImmutableList.builder();
while (it.hasNext()) {
final KeyValue<Windowed<GenericKey>, GenericRow> next = it.next();
final Window wnd = next.key.window();
if (!windowStart.contains(wnd.startTime())) {
continue;
}
if (!windowEnd.contains(wnd.endTime())) {
continue;
}
final long rowTime = wnd.end();
final WindowedRow row = WindowedRow.of(
stateStore.schema(),
next.key,
next.value,
rowTime
);
builder.add(row);
}
return KsMaterializedQueryResult.rowIteratorWithPosition(
builder.build().iterator(), queryResult.getPosition());
}
} catch (final NotUpToBoundException | MaterializationException e) {
throw e;
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
} | @Test
public void shouldReturnAllSessionsForRangeAll() {
// Given:
givenSingleSession(Instant.now().minusSeconds(1000), Instant.now().plusSeconds(1000));
givenSingleSession(Instant.now().minusSeconds(1000), Instant.now().plusSeconds(1000));
// When:
final KsMaterializedQueryResult<WindowedRow> result =
table.get(A_KEY, PARTITION, Range.all(), Range.all());
// Then:
final Iterator<WindowedRow> rowIterator = result.getRowIterator();
assertThat(rowIterator.hasNext(), is(true));
final List<WindowedRow> resultList = Lists.newArrayList(rowIterator);
assertThat(resultList, hasSize(2));
assertThat(result.getPosition(), not(Optional.empty()));
assertThat(result.getPosition().get(), is(POSITION));
} |
public static Statement sanitize(
final Statement node,
final MetaStore metaStore) {
return sanitize(node, metaStore, true);
} | @Test
public void shouldAddAliasForExpression() {
// Given:
final Statement stmt = givenQuery("SELECT 1 + 2 FROM ORDERS;");
// When:
final Query result = (Query) AstSanitizer.sanitize(stmt, META_STORE);
// Then:
final SingleColumn col = (SingleColumn) result.getSelect().getSelectItems().get(0);
assertThat(col.getAlias(), equalTo(Optional.of(ColumnName.of("KSQL_COL_0"))));
} |
@ApiOperation(value = "添加功能按钮资源", notes = "返回新增id")
@PostMapping
public ApiResult<Long> add(@Validated @RequestBody ActionForm actionForm){
return ApiResult.<Long>success().data(baseActionService.addAction(actionForm));
} | @Test
void add() {
} |
static String trimFieldsAndRemoveEmptyFields(String str) {
char[] chars = str.toCharArray();
char[] res = new char[chars.length];
/*
* set when reading the first non trimmable char after a separator char (or the beginning of the string)
* unset when reading a separator
*/
boolean inField = false;
boolean inQuotes = false;
int i = 0;
int resI = 0;
for (; i < chars.length; i++) {
boolean isSeparator = chars[i] == ',';
if (!inQuotes && isSeparator) {
// exiting field (may already be unset)
inField = false;
if (resI > 0) {
resI = retroTrim(res, resI);
}
} else {
boolean isTrimmed = !inQuotes && istrimmable(chars[i]);
if (isTrimmed && !inField) {
// we haven't meet any non trimmable char since the last separator yet
continue;
}
boolean isEscape = isEscapeChar(chars[i]);
if (isEscape) {
inQuotes = !inQuotes;
}
// add separator as we already had one field
if (!inField && resI > 0) {
res[resI] = ',';
resI++;
}
// register in field (may already be set)
inField = true;
// copy current char
res[resI] = chars[i];
resI++;
}
}
// inQuotes can only be true at this point if quotes are unbalanced
if (!inQuotes) {
// trim end of str
resI = retroTrim(res, resI);
}
return new String(res, 0, resI);
} | @Test
@UseDataProvider("plains")
public void trimFieldsAndRemoveEmptyFields_ignores_EmptyFields(String str) {
assertThat(trimFieldsAndRemoveEmptyFields("")).isEmpty();
assertThat(trimFieldsAndRemoveEmptyFields(str)).isEqualTo(str);
assertThat(trimFieldsAndRemoveEmptyFields(',' + str)).isEqualTo(str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ',')).isEqualTo(str);
assertThat(trimFieldsAndRemoveEmptyFields(",,," + str)).isEqualTo(str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ",,,")).isEqualTo(str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ',' + str)).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ",,," + str)).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(',' + str + ',' + str)).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields("," + str + ",,," + str)).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(",,," + str + ",,," + str)).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ',' + str + ',')).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ",,," + str + ",")).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ",,," + str + ",,")).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(',' + str + ',' + str + ',')).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(",," + str + ',' + str + ',')).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(',' + str + ",," + str + ',')).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(',' + str + ',' + str + ",,")).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(",,," + str + ",,," + str + ",,")).isEqualTo(str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ',' + str + ',' + str)).isEqualTo(str + ',' + str + ',' + str);
assertThat(trimFieldsAndRemoveEmptyFields(str + ',' + str + ',' + str)).isEqualTo(str + ',' + str + ',' + str);
} |
public boolean hasPersistentLocalStore() {
for (final StateStore store : stateStores) {
if (store.persistent()) {
return true;
}
}
return false;
} | @Test
public void inMemoryStoreShouldNotResultInPersistentLocalStore() {
final ProcessorTopology processorTopology = createLocalStoreTopology(Stores.inMemoryKeyValueStore("my-store"));
assertFalse(processorTopology.hasPersistentLocalStore());
} |
@Override
public int run(String[] args) throws Exception {
YarnConfiguration yarnConf =
getConf() == null ? new YarnConfiguration() : new YarnConfiguration(
getConf());
boolean isHAEnabled =
yarnConf.getBoolean(YarnConfiguration.RM_HA_ENABLED,
YarnConfiguration.DEFAULT_RM_HA_ENABLED);
if (args.length < 1) {
printUsage("", isHAEnabled);
return -1;
}
int exitCode = -1;
int i = 0;
String cmd = args[i++];
exitCode = 0;
if ("-help".equals(cmd)) {
if (i < args.length) {
printUsage(args[i], isHAEnabled);
} else {
printHelp("", isHAEnabled);
}
return exitCode;
}
if (USAGE.containsKey(cmd)) {
if (isHAEnabled) {
return super.run(args);
}
System.out.println("Cannot run " + cmd
+ " when ResourceManager HA is not enabled");
return -1;
}
//
// verify that we have enough command line parameters
//
String subClusterId = StringUtils.EMPTY;
if ("-refreshAdminAcls".equals(cmd) || "-refreshQueues".equals(cmd) ||
"-refreshNodesResources".equals(cmd) ||
"-refreshServiceAcl".equals(cmd) ||
"-refreshUserToGroupsMappings".equals(cmd) ||
"-refreshSuperUserGroupsConfiguration".equals(cmd) ||
"-refreshClusterMaxPriority".equals(cmd)) {
subClusterId = parseSubClusterId(args, isHAEnabled);
// If we enable Federation mode, the number of args may be either one or three.
// Example: -refreshQueues or -refreshQueues -subClusterId SC-1
if (isYarnFederationEnabled(getConf()) && args.length != 1 && args.length != 3) {
printUsage(cmd, isHAEnabled);
return exitCode;
} else if (!isYarnFederationEnabled(getConf()) && args.length != 1) {
// If Federation mode is not enabled, then the number of args can only be one.
// Example: -refreshQueues
printUsage(cmd, isHAEnabled);
return exitCode;
}
}
// If it is federation mode, we will print federation mode information
if (isYarnFederationEnabled(getConf())) {
System.out.println("Using YARN Federation mode.");
}
try {
if ("-refreshQueues".equals(cmd)) {
exitCode = refreshQueues(subClusterId);
} else if ("-refreshNodes".equals(cmd)) {
exitCode = handleRefreshNodes(args, cmd, isHAEnabled);
} else if ("-refreshNodesResources".equals(cmd)) {
exitCode = refreshNodesResources(subClusterId);
} else if ("-refreshUserToGroupsMappings".equals(cmd)) {
exitCode = refreshUserToGroupsMappings(subClusterId);
} else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) {
exitCode = refreshSuperUserGroupsConfiguration(subClusterId);
} else if ("-refreshAdminAcls".equals(cmd)) {
exitCode = refreshAdminAcls(subClusterId);
} else if ("-refreshServiceAcl".equals(cmd)) {
exitCode = refreshServiceAcls(subClusterId);
} else if ("-refreshClusterMaxPriority".equals(cmd)) {
exitCode = refreshClusterMaxPriority(subClusterId);
} else if ("-getGroups".equals(cmd)) {
String[] usernames = Arrays.copyOfRange(args, i, args.length);
exitCode = getGroups(usernames);
} else if ("-updateNodeResource".equals(cmd)) {
exitCode = handleUpdateNodeResource(args, cmd, isHAEnabled, subClusterId);
} else if ("-addToClusterNodeLabels".equals(cmd)) {
exitCode = handleAddToClusterNodeLabels(args, cmd, isHAEnabled);
} else if ("-removeFromClusterNodeLabels".equals(cmd)) {
exitCode = handleRemoveFromClusterNodeLabels(args, cmd, isHAEnabled);
} else if ("-replaceLabelsOnNode".equals(cmd)) {
exitCode = handleReplaceLabelsOnNodes(args, cmd, isHAEnabled);
} else {
exitCode = -1;
System.err.println(cmd.substring(1) + ": Unknown command");
printUsage("", isHAEnabled);
}
} catch (IllegalArgumentException arge) {
exitCode = -1;
System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage());
printUsage(cmd, isHAEnabled);
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error message, ignore the stack trace.
exitCode = -1;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
System.err.println(cmd.substring(1) + ": "
+ content[0]);
} catch (Exception ex) {
System.err.println(cmd.substring(1) + ": "
+ ex.getLocalizedMessage());
}
} catch (Exception e) {
exitCode = -1;
System.err.println(cmd.substring(1) + ": "
+ e.getLocalizedMessage());
}
if (null != localNodeLabelsManager) {
localNodeLabelsManager.stop();
}
return exitCode;
} | @Test
public void testAccessRemoteNodeLabelManager() throws Exception {
String[] args =
{ "-addToClusterNodeLabels", "x,y" };
assertEquals(0, rmAdminCLI.run(args));
// localNodeLabelsManager shouldn't accessed
assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().isEmpty());
// remote node labels manager accessed
assertTrue(remoteAdminServiceAccessed);
} |
@Override
public Boolean parse(final String value) {
return Boolean.valueOf(value);
} | @Test
void assertParse() {
assertTrue(new PostgreSQLBoolValueParser().parse("true"));
} |
static int calcSetBitSeq(int by, int startBit, int bitSize, int val)
{
int mask = ((1 << bitSize) - 1);
int truncatedVal = val & mask;
mask = ~(mask << startBit);
return (by & mask) | (truncatedVal << startBit);
} | @Test
void testCalcSetBitSeq()
{
assertEquals(Integer.parseInt("00000000", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 0, 8, 0));
assertEquals(Integer.parseInt("00000001", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 0, 8, 1));
assertEquals(Integer.parseInt("11111111", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 0, 1, 1));
assertEquals(Integer.parseInt("11111101", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 0, 2, 1));
assertEquals(Integer.parseInt("11111001", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 0, 3, 1));
assertEquals(Integer.parseInt("00000001", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 0, 2, 1));
assertEquals(Integer.parseInt("11110001", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 0, 4, 1));
assertEquals(Integer.parseInt("11100011", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 1, 4, 1));
assertEquals(Integer.parseInt("00000010", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 1, 1, 1));
assertEquals(Integer.parseInt("11111111", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 7, 1, 1));
assertEquals(Integer.parseInt("01111111", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 7, 1, 0));
assertEquals(Integer.parseInt("10000000", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 7, 1, 1));
assertEquals(Integer.parseInt("00000000", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 7, 1, 0));
assertEquals(Integer.parseInt("01000000", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 6, 1, 1));
assertEquals(Integer.parseInt("00000000", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 6, 1, 0));
assertEquals(Integer.parseInt("00110000", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 3, 3, 6));
assertEquals(Integer.parseInt("01100000", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 4, 3, 6));
assertEquals(Integer.parseInt("11000000", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 5, 3, 6));
assertEquals(Integer.parseInt("11111111", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 0, 8, 0xFF));
assertEquals(Integer.parseInt("11111111", 2), calcSetBitSeq(Integer.parseInt("11111111", 2), 0, 8, 0xFF));
assertEquals(0x7E, calcSetBitSeq(0xA5, 0, 8, 0xD9 + 0xA5));
// check truncation
assertEquals(Integer.parseInt("00000010", 2), calcSetBitSeq(Integer.parseInt("00000000", 2), 1, 1, 3));
} |
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".groovy");
} | @Test
void testGroovyFileFilter() {
GroovyFileFilter filter = new GroovyFileFilter();
assertFalse(filter.accept(new File("/"), "file.mikey"));
assertTrue(filter.accept(new File("/"), "file.groovy"));
} |
public PipelineConfigs getFirstEditablePartOrNull() {
for (PipelineConfigs part : parts) {
if (isEditable(part))
return part;
}
return null;
} | @Test
public void shouldReturnNullWhenFirstEditablePartNotExists() {
PipelineConfig pipe1 = PipelineConfigMother.pipelineConfig("pipeline1");
BasicPipelineConfigs part1 = new BasicPipelineConfigs(pipe1);
MergePipelineConfigs group = new MergePipelineConfigs(
part1, new BasicPipelineConfigs());
assertNull(group.getFirstEditablePartOrNull());
} |
public void checkForUpgradeAndExtraProperties() throws IOException {
if (upgradesEnabled()) {
checkForUpgradeAndExtraProperties(systemEnvironment.getAgentMd5(), systemEnvironment.getGivenAgentLauncherMd5(),
systemEnvironment.getAgentPluginsMd5(), systemEnvironment.getTfsImplMd5());
} else {
LOGGER.debug("[Agent Upgrade] Skipping check as there is no wrapping launcher to relaunch the agent JVM...");
}
} | @Test
void checkForUpgradeShouldKillAgentIfPluginZipMd5doesNotMatch() {
when(systemEnvironment.getAgentMd5()).thenReturn("not-changing");
expectHeaderValue(SystemEnvironment.AGENT_CONTENT_MD5_HEADER, "not-changing");
when(systemEnvironment.getGivenAgentLauncherMd5()).thenReturn("not-changing");
expectHeaderValue(SystemEnvironment.AGENT_LAUNCHER_CONTENT_MD5_HEADER, "not-changing");
when(systemEnvironment.getAgentPluginsMd5()).thenReturn("old-plugins-md5");
expectHeaderValue(SystemEnvironment.AGENT_PLUGINS_ZIP_MD5_HEADER, "new-plugins-md5");
RuntimeException toBeThrown = new RuntimeException("Boo!");
doThrow(toBeThrown).when(jvmExitter).jvmExit(anyString(), anyString(), anyString());
try {
agentUpgradeService.checkForUpgradeAndExtraProperties();
fail("should have done jvm exit");
} catch (Exception e) {
assertThat(toBeThrown).isSameAs(e);
}
verify(jvmExitter).jvmExit("plugins", "old-plugins-md5", "new-plugins-md5");
} |
@Override
public SelType call(String methodName, SelType[] args) {
if (args.length == 0) {
if ("getAsText".equals(methodName)) {
return SelString.of(val.getAsText(Locale.US));
} else if ("withMinimumValue".equals(methodName)) {
return SelJodaDateTime.of(val.withMinimumValue());
} else if ("withMaximumValue".equals(methodName)) {
return SelJodaDateTime.of(val.withMaximumValue());
} else if ("get".equals(methodName)) {
return SelLong.of((long) val.get());
}
}
throw new UnsupportedOperationException(
type()
+ " DO NOT support calling method: "
+ methodName
+ " with args: "
+ Arrays.toString(args));
} | @Test(expected = UnsupportedOperationException.class)
public void testInvalidCallMethod() {
one.call("getAsString", new SelType[] {});
} |
@Override
public void setQueues(List<CSQueue> queues) {
this.queues = queues;
} | @Test
public void testPriorityUtilizationOrdering() {
PriorityUtilizationQueueOrderingPolicy policy =
new PriorityUtilizationQueueOrderingPolicy(true);
// Case 1, one queue
policy.setQueues(mockCSQueues(new String[] { "a" }, new int[] { 1 },
new float[] { 0.1f }, new float[] {0.2f}, ""));
verifyOrder(policy, "", new String[] { "a" });
// Case 2, 2 queues, both under utilized, same priority
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 1 },
new float[] { 0.2f, 0.1f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "b", "a" });
// Case 3, 2 queues, both over utilized, same priority
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 1 },
new float[] { 1.1f, 1.2f },new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
// Case 4, 2 queues, one under and one over, same priority
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 1 },
new float[] { 0.1f, 1.2f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
// Case 5, 2 queues, both over utilized, different priority
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 2 },
new float[] { 1.1f, 1.2f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "b", "a" });
// Case 6, 2 queues, both under utilized, different priority
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 2 },
new float[] { 0.1f, 0.2f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "b", "a" });
// Case 7, 2 queues, one under utilized and one over utilized,
// different priority (1)
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 2 },
new float[] { 0.1f, 1.2f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
// Case 8, 2 queues, one under utilized and one over utilized,
// different priority (1)
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 2, 1 },
new float[] { 0.1f, 1.2f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
// Case 9, 2 queues, one under utilized and one meet, different priority (1)
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 2 },
new float[] { 0.1f, 1.0f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
// Case 10, 2 queues, one under utilized and one meet, different priority (2)
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 2, 1 },
new float[] { 0.1f, 1.0f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
// Case 11, 2 queues, one under utilized and one meet, same priority
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 1 },
new float[] { 0.1f, 1.0f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
// Case 12, 2 queues, both meet, different priority
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 2 },
new float[] { 1.0f, 1.0f }, new float[] {0.2f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "b", "a" });
// Case 13, 5 queues, different priority
policy.setQueues(mockCSQueues(new String[] { "a", "b", "c", "d", "e" },
new int[] { 1, 2, 0, 0, 3 },
new float[] { 1.2f, 1.0f, 0.2f, 1.1f, 0.2f },
new float[] { 0.2f, 0.1f, 0.1f, 0.3f, 0.3f }, ""));
verifyOrder(policy, "", new String[] { "e", "c", "b", "a", "d" });
// Case 14, 5 queues, different priority,
// partition default - abs capacity is 0;
policy.setQueues(mockCSQueues(new String[] { "a", "b", "c", "d", "e" },
new int[] { 1, 2, 0, 0, 3 },
new float[] { 1.2f, 1.0f, 0.2f, 1.1f, 0.2f },
new float[] { 0.2f, 0.1f, 0.1f, 0.3f, 0.3f }, "x"));
verifyOrder(policy, "", new String[] { "e", "b", "a", "c", "d" });
// Case 15, 5 queues, different priority, partition x;
policy.setQueues(mockCSQueues(new String[] { "a", "b", "c", "d", "e" },
new int[] { 1, 2, 0, 0, 3 },
new float[] { 1.2f, 1.0f, 0.2f, 1.1f, 0.2f },
new float[] { 0.2f, 0.1f, 0.1f, 0.3f, 0.3f }, "x"));
verifyOrder(policy, "x", new String[] { "e", "c", "b", "a", "d" });
// Case 16, 5 queues, different priority, partition x; and different
// accessibility
List<CSQueue> queues = mockCSQueues(new String[] { "a", "b", "c", "d", "e" },
new int[] { 1, 2, 0, 0, 3 },
new float[] { 1.2f, 1.0f, 0.2f, 1.1f, 0.2f },
new float[] { 0.2f, 0.1f, 0.1f, 0.3f, 0.3f }, "x");
// Only a/d has access to x
when(queues.get(0).getAccessibleNodeLabels()).thenReturn(
ImmutableSet.of("x"));
when(queues.get(3).getAccessibleNodeLabels()).thenReturn(
ImmutableSet.of("x"));
policy.setQueues(queues);
verifyOrder(policy, "x", new String[] { "a", "d", "e", "c", "b" });
// Case 17, 2 queues, one's abs capacity is 0
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 1 },
new float[] { 0.1f, 1.2f }, new float[] {0.0f, 0.3f}, ""));
verifyOrder(policy, "", new String[] { "b", "a" });
// Case 18, 2 queues, one's abs capacity is 0
policy.setQueues(mockCSQueues(new String[] { "a", "b" }, new int[] { 1, 1 },
new float[] { 0.1f, 1.2f }, new float[] {0.3f, 0.0f}, ""));
verifyOrder(policy, "", new String[] { "a", "b" });
//Case 19, 5 queues with 2 having abs capacity 0 are prioritized last
policy.setQueues(mockCSQueues(new String[] { "a", "b", "c", "d", "e" },
new int[] { 1, 2, 0, 0, 3 },
new float[] { 1.2f, 1.0f, 0.2f, 1.1f, 0.2f },
new float[] { 0.0f, 0.0f, 0.1f, 0.3f, 0.3f }, "x"));
verifyOrder(policy, "x", new String[] { "e", "c", "d", "b", "a" });
} |
public static void insert(
final UnsafeBuffer termBuffer, final int termOffset, final UnsafeBuffer packet, final int length)
{
if (0 == termBuffer.getInt(termOffset))
{
termBuffer.putBytes(termOffset + HEADER_LENGTH, packet, HEADER_LENGTH, length - HEADER_LENGTH);
termBuffer.putLong(termOffset + 24, packet.getLong(24));
termBuffer.putLong(termOffset + 16, packet.getLong(16));
termBuffer.putLong(termOffset + 8, packet.getLong(8));
termBuffer.putLongOrdered(termOffset, packet.getLong(0));
}
} | @Test
void shouldFillGapButNotMoveTailOrHwm()
{
final int frameLength = 50;
final int alignedFrameLength = BitUtil.align(frameLength, FRAME_ALIGNMENT);
final int srcOffset = 0;
final UnsafeBuffer packet = new UnsafeBuffer(ByteBuffer.allocate(alignedFrameLength));
final int termOffset = alignedFrameLength * 2;
TermRebuilder.insert(termBuffer, termOffset, packet, alignedFrameLength);
verify(termBuffer).putBytes(
(alignedFrameLength * 2) + HEADER_LENGTH,
packet,
srcOffset + HEADER_LENGTH,
alignedFrameLength - HEADER_LENGTH);
} |
public void steal() {
method.steal();
} | @Test
void testSteal() {
final var method = spy(StealingMethod.class);
final var thief = new HalflingThief(method);
thief.steal();
verify(method).steal();
String target = verify(method).pickTarget();
verify(method).confuseTarget(target);
verify(method).stealTheItem(target);
verifyNoMoreInteractions(method);
} |
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
boolean addFieldName, boolean addCr ) {
String retval = "";
String fieldname = v.getName();
int length = v.getLength();
// Unused in vertica
// int precision = v.getPrecision();
if ( addFieldName ) {
retval += fieldname + " ";
}
int type = v.getType();
switch ( type ) {
case ValueMetaInterface.TYPE_DATE:
case ValueMetaInterface.TYPE_TIMESTAMP:
retval += "TIMESTAMP";
break;
case ValueMetaInterface.TYPE_BOOLEAN:
retval += "BOOLEAN";
break;
case ValueMetaInterface.TYPE_NUMBER:
case ValueMetaInterface.TYPE_BIGNUMBER:
retval += "FLOAT";
break;
case ValueMetaInterface.TYPE_INTEGER:
retval += "INTEGER";
break;
case ValueMetaInterface.TYPE_STRING:
retval += ( length < 1 ) ? "VARCHAR" : "VARCHAR(" + length + ")";
break;
case ValueMetaInterface.TYPE_BINARY:
retval += ( length < 1 ) ? "VARBINARY" : "VARBINARY(" + length + ")";
break;
default:
retval += " UNKNOWN";
break;
}
if ( addCr ) {
retval += Const.CR;
}
return retval;
} | @Test
public void testGetFieldDefinition() throws Exception {
assertEquals( "FOO TIMESTAMP",
nativeMeta.getFieldDefinition( new ValueMetaDate( "FOO" ), "", "", false, true, false ) );
assertEquals( "TIMESTAMP",
nativeMeta.getFieldDefinition( new ValueMetaTimestamp( "FOO" ), "", "", false, false, false ) );
assertEquals( "BOOLEAN",
nativeMeta.getFieldDefinition( new ValueMetaBoolean( "FOO" ), "", "", false, false, false ) );
assertEquals( "FLOAT",
nativeMeta.getFieldDefinition( new ValueMetaNumber( "FOO" ), "", "", false, false, false ) );
assertEquals( "FLOAT",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO" ), "", "", false, false, false ) );
assertEquals( "INTEGER",
nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO" ), "", "", false, false, false ) );
assertEquals( "VARCHAR",
nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 0, 0 ), "", "", false, false, false ) );
assertEquals( "VARCHAR(15)",
nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 15, 0 ), "", "", false, false, false ) );
assertEquals( "VARBINARY",
nativeMeta.getFieldDefinition( new ValueMetaBinary( "FOO", 0, 0 ), "", "", false, false, false ) );
assertEquals( "VARBINARY(50)",
nativeMeta.getFieldDefinition( new ValueMetaBinary( "FOO", 50, 0 ), "", "", false, false, false ) );
assertEquals( " UNKNOWN",
nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, false ) );
assertEquals( " UNKNOWN" + System.getProperty( "line.separator" ),
nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, true ) );
} |
@Override
public void writeChar(final int v) throws IOException {
ensureAvailable(CHAR_SIZE_IN_BYTES);
MEM.putChar(buffer, ARRAY_BYTE_BASE_OFFSET + pos, (char) v);
pos += CHAR_SIZE_IN_BYTES;
} | @Test
public void testWriteCharV() throws Exception {
char expected = 100;
out.writeChar(expected);
char actual = Bits.readChar(out.buffer, 0, ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN);
assertEquals(expected, actual);
} |
@Bean
public PluginDataHandler redirectPluginDataHandler() {
return new RedirectPluginDataHandler();
} | @Test
public void testRedirectPluginDataHandler() {
new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(RedirectPluginConfiguration.class))
.withBean(RedirectPluginConfigurationTest.class)
.withBean(DispatcherHandler.class)
.withPropertyValues("debug=true")
.run(context -> {
PluginDataHandler handler = context.getBean("redirectPluginDataHandler", PluginDataHandler.class);
assertNotNull(handler);
});
} |
public static long getDateTime(String timestamp) throws ParseException, IllegalArgumentException {
if (timestamp == null) {
throw new IllegalArgumentException("Error parsing timestamp with null value");
}
final String[] timestampParts = timestamp.split("T");
if (timestampParts.length < 2) {
throw new ParseException("Error parsing timestamp. It does not contain a 'T' according to ISO8601 format", timestamp.length());
}
final String secondPart = timestampParts[1];
if (!(secondPart.contains("+") || secondPart.contains("-") || secondPart.contains("Z"))) {
timestamp = timestamp + "Z";
}
SimpleDateFormat simpleDateFormat = new SimpleDateFormat();
// strictly parsing the date/time format
simpleDateFormat.setLenient(false);
try {
simpleDateFormat.applyPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
final Date date = simpleDateFormat.parse(timestamp);
return date.getTime();
} catch (final ParseException e) {
simpleDateFormat.applyPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX");
final Date date = simpleDateFormat.parse(timestamp);
return date.getTime();
}
} | @Test
public void shouldThrowOnInvalidDateFormatOrNullTimestamp() {
// check some invalid formats
// test null timestamp
assertTrue(assertThrows(IllegalArgumentException.class, () ->
Utils.getDateTime(null)
).getMessage().contains("Error parsing timestamp with null value"));
// test pattern: yyyy-MM-dd'T'HH:mm:ss.X
checkExceptionForGetDateTimeMethod(() ->
invokeGetDateTimeMethod(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.X"))
);
// test pattern: yyyy-MM-dd HH:mm:ss
assertTrue(assertThrows(ParseException.class, () ->
invokeGetDateTimeMethod(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"))
).getMessage().contains("It does not contain a 'T' according to ISO8601 format"));
// KAFKA-10685: use DateTimeFormatter generate micro/nano second timestamp
final DateTimeFormatter formatter = new DateTimeFormatterBuilder()
.appendPattern("yyyy-MM-dd'T'HH:mm:ss")
.appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true)
.toFormatter();
final LocalDateTime timestampWithNanoSeconds = LocalDateTime.of(2020, 11, 9, 12, 34, 56, 123456789);
final LocalDateTime timestampWithMicroSeconds = timestampWithNanoSeconds.truncatedTo(ChronoUnit.MICROS);
final LocalDateTime timestampWithSeconds = timestampWithNanoSeconds.truncatedTo(ChronoUnit.SECONDS);
// test pattern: yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS
checkExceptionForGetDateTimeMethod(() ->
Utils.getDateTime(formatter.format(timestampWithNanoSeconds))
);
// test pattern: yyyy-MM-dd'T'HH:mm:ss.SSSSSS
checkExceptionForGetDateTimeMethod(() ->
Utils.getDateTime(formatter.format(timestampWithMicroSeconds))
);
// test pattern: yyyy-MM-dd'T'HH:mm:ss
checkExceptionForGetDateTimeMethod(() ->
Utils.getDateTime(formatter.format(timestampWithSeconds))
);
} |
@Override
public <VR> KTable<Windowed<K>, VR> aggregate(final Initializer<VR> initializer,
final Aggregator<? super K, ? super V, VR> aggregator) {
return aggregate(initializer, aggregator, Materialized.with(keySerde, null));
} | @Test
public void shouldThrowIllegalArgumentWhenRetentionIsTooSmall() {
assertThrows(IllegalArgumentException.class, () -> windowedStream
.aggregate(
MockInitializer.STRING_INIT,
MockAggregator.TOSTRING_ADDER,
Materialized
.<String, String, WindowStore<Bytes, byte[]>>as("aggregated")
.withKeySerde(Serdes.String())
.withValueSerde(Serdes.String())
.withRetention(ofMillis(1L))
)
);
} |
public static List<LionBundle> generateBundles(String base, String... tags) {
List<LionBundle> bundles = new ArrayList<>(tags.length);
BundleStitcher stitcher = new BundleStitcher(base);
for (String tag : tags) {
try {
LionBundle lion = stitcher.stitch(tag);
bundles.add(lion);
log.info("Generated LION bundle: {}", lion);
log.debug(" Dumped: {}", lion.dump());
} catch (IllegalArgumentException e) {
log.warn("Unable to generate bundle: {} / {}", base, tag);
log.debug("BOOM!", e);
}
}
return ImmutableList.copyOf(bundles);
} | @Test
public void generateBundles() {
title("generateBundles");
List<LionBundle> bundles =
BundleStitcher.generateBundles(LION_BASE, LION_TAGS);
print(bundles);
assertEquals("missing the bundle", 1, bundles.size());
LionBundle b = bundles.get(0);
assertEquals("wrong id", "CardGame1", b.id());
assertEquals("unexpected item count", 12, b.size());
assertEquals("missing 3oak", "Three of a Kind", b.getValue("three_oak"));
assertEquals("missing queen", "Queen", b.getValue("queen"));
assertEquals("missing clubs", "Clubs", b.getValue("clubs"));
} |
@Override
public void addPermits(int permits) {
get(addPermitsAsync(permits));
} | @Test
public void testAddPermits() throws InterruptedException {
RPermitExpirableSemaphore s = redisson.getPermitExpirableSemaphore("test");
s.trySetPermits(10);
s.addPermits(5);
assertThat(s.availablePermits()).isEqualTo(15);
s.addPermits(-10);
assertThat(s.availablePermits()).isEqualTo(5);
s.addPermits(-10);
assertThat(s.availablePermits()).isEqualTo(-5);
} |
@Override
public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) {
OSMValueExtractor.extractTons(edgeId, edgeIntAccess, way, maxAxleLoadEncoder, Collections.singletonList("maxaxleload"));
} | @Test
public void testRounding() {
ReaderWay readerWay = new ReaderWay(1);
EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1);
int edgeId = 0;
readerWay.setTag("maxaxleload", "4.8");
parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags);
assertEquals(5.0, malEnc.getDecimal(false, edgeId, edgeIntAccess), .01);
edgeIntAccess = new ArrayEdgeIntAccess(1);
readerWay.setTag("maxaxleload", "3.6");
parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags);
assertEquals(3.5, malEnc.getDecimal(false, edgeId, edgeIntAccess), .01);
edgeIntAccess = new ArrayEdgeIntAccess(1);
readerWay.setTag("maxaxleload", "2.4");
parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags);
assertEquals(2.5, malEnc.getDecimal(false, edgeId, edgeIntAccess), .01);
} |
public FEELFnResult<Object> invoke(@ParameterName("list") List list) {
if ( list == null || list.isEmpty() ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null or empty"));
} else {
try {
return FEELFnResult.ofResult(Collections.min(list, new InterceptNotComparableComparator()));
} catch (ClassCastException e) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "contains items that are not comparable"));
}
}
} | @Test
void invokeEmptyList() {
FunctionTestUtil.assertResultError(minFunction.invoke(Collections.emptyList()), InvalidParametersEvent.class);
} |
public CoercedExpressionResult coerce() {
final Class<?> leftClass = left.getRawClass();
final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass);
final Class<?> rightClass = right.getRawClass();
final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass);
boolean sameClass = leftClass == rightClass;
boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression;
if (sameClass || isUnificationExpression) {
return new CoercedExpressionResult(left, right);
}
if (!canCoerce()) {
throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass));
}
if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) {
CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression());
return new CoercedExpressionResult(
new TypedExpression(castExpression, double.class, left.getType()),
right,
false);
}
final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass );
final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass);
boolean rightAsStaticField = false;
final Expression rightExpression = right.getExpression();
final TypedExpression coercedRight;
if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) {
final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass);
coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType);
coercedRight.setType( leftClass );
} else if (shouldCoerceBToString(left, right)) {
coercedRight = coerceToString(right);
} else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) {
coercedRight = castToClass(leftClass);
} else if (leftClass == long.class && rightClass == int.class) {
coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression()));
} else if (leftClass == Date.class && rightClass == String.class) {
coercedRight = coerceToDate(right);
rightAsStaticField = true;
} else if (leftClass == LocalDate.class && rightClass == String.class) {
coercedRight = coerceToLocalDate(right);
rightAsStaticField = true;
} else if (leftClass == LocalDateTime.class && rightClass == String.class) {
coercedRight = coerceToLocalDateTime(right);
rightAsStaticField = true;
} else if (shouldCoerceBToMap()) {
coercedRight = castToClass(toNonPrimitiveType(leftClass));
} else if (isBoolean(leftClass) && !isBoolean(rightClass)) {
coercedRight = coerceBoolean(right);
} else {
coercedRight = right;
}
final TypedExpression coercedLeft;
if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) {
coercedLeft = coerceToString(left);
} else {
coercedLeft = left;
}
return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField);
} | @Test
public void doNotCastNumberLiteralDouble() {
final TypedExpression left = expr("getValue()", java.lang.Object.class);
final TypedExpression right = expr("20", double.class);
final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, false).coerce();
assertThat(coerce.getCoercedRight()).isEqualTo(expr("20", double.class));
} |
public static URL getResourceUrl(String resource) throws IOException {
if (resource.startsWith(CLASSPATH_PREFIX)) {
String path = resource.substring(CLASSPATH_PREFIX.length());
ClassLoader classLoader = ResourceUtils.class.getClassLoader();
URL url = (classLoader != null ? classLoader.getResource(path) : ClassLoader.getSystemResource(path));
if (url == null) {
throw new FileNotFoundException("Resource [" + resource + "] does not exist");
}
return url;
}
try {
return new URL(resource);
} catch (MalformedURLException ex) {
return new File(resource).toURI().toURL();
}
} | @Test
void testGetResourceUrlFromLoader() throws IOException {
URL url = ResourceUtils.getResourceUrl(this.getClass().getClassLoader(), "test-tls-cert.pem");
assertNotNull(url);
} |
public void assignStates() {
checkStateMappingCompleteness(allowNonRestoredState, operatorStates, tasks);
Map<OperatorID, OperatorState> localOperators = new HashMap<>(operatorStates);
// find the states of all operators belonging to this task and compute additional
// information in first pass
for (ExecutionJobVertex executionJobVertex : tasks) {
List<OperatorIDPair> operatorIDPairs = executionJobVertex.getOperatorIDs();
Map<OperatorID, OperatorState> operatorStates =
CollectionUtil.newHashMapWithExpectedSize(operatorIDPairs.size());
for (OperatorIDPair operatorIDPair : operatorIDPairs) {
OperatorID operatorID =
operatorIDPair
.getUserDefinedOperatorID()
.filter(localOperators::containsKey)
.orElse(operatorIDPair.getGeneratedOperatorID());
OperatorState operatorState = localOperators.remove(operatorID);
if (operatorState == null) {
operatorState =
new OperatorState(
operatorID,
executionJobVertex.getParallelism(),
executionJobVertex.getMaxParallelism());
}
operatorStates.put(operatorIDPair.getGeneratedOperatorID(), operatorState);
}
final TaskStateAssignment stateAssignment =
new TaskStateAssignment(
executionJobVertex,
operatorStates,
consumerAssignment,
vertexAssignments);
vertexAssignments.put(executionJobVertex, stateAssignment);
for (final IntermediateResult producedDataSet : executionJobVertex.getInputs()) {
consumerAssignment.put(producedDataSet.getId(), stateAssignment);
}
}
// repartition state
for (TaskStateAssignment stateAssignment : vertexAssignments.values()) {
if (stateAssignment.hasNonFinishedState
// FLINK-31963: We need to run repartitioning for stateless operators that have
// upstream output or downstream input states.
|| stateAssignment.hasUpstreamOutputStates()
|| stateAssignment.hasDownstreamInputStates()) {
assignAttemptState(stateAssignment);
}
}
// actually assign the state
for (TaskStateAssignment stateAssignment : vertexAssignments.values()) {
// If upstream has output states or downstream has input states, even the empty task
// state should be assigned for the current task in order to notify this task that the
// old states will send to it which likely should be filtered.
if (stateAssignment.hasNonFinishedState
|| stateAssignment.isFullyFinished
|| stateAssignment.hasUpstreamOutputStates()
|| stateAssignment.hasDownstreamInputStates()) {
assignTaskStateToExecutionJobVertices(stateAssignment);
}
}
} | @Test
void testChannelStateAssignmentNoRescale() throws JobException, JobExecutionException {
List<OperatorID> operatorIds = buildOperatorIds(2);
Map<OperatorID, OperatorState> states = buildOperatorStates(operatorIds, 2);
Map<OperatorID, ExecutionJobVertex> vertices =
buildVertices(operatorIds, 2, RANGE, ROUND_ROBIN);
new StateAssignmentOperation(0, new HashSet<>(vertices.values()), states, false)
.assignStates();
for (OperatorID operatorId : operatorIds) {
// input is range partitioned, so there is an overlap
assertState(
vertices, operatorId, states, 0, OperatorSubtaskState::getInputChannelState, 0);
assertState(
vertices, operatorId, states, 1, OperatorSubtaskState::getInputChannelState, 1);
// output is round robin redistributed
assertState(
vertices,
operatorId,
states,
0,
OperatorSubtaskState::getResultSubpartitionState,
0);
assertState(
vertices,
operatorId,
states,
1,
OperatorSubtaskState::getResultSubpartitionState,
1);
}
assertThat(
getAssignedState(vertices.get(operatorIds.get(0)), operatorIds.get(0), 0)
.getOutputRescalingDescriptor())
.isEqualTo(InflightDataRescalingDescriptor.NO_RESCALE);
assertThat(
getAssignedState(vertices.get(operatorIds.get(0)), operatorIds.get(0), 1)
.getOutputRescalingDescriptor())
.isEqualTo(InflightDataRescalingDescriptor.NO_RESCALE);
assertThat(
getAssignedState(vertices.get(operatorIds.get(1)), operatorIds.get(1), 0)
.getInputRescalingDescriptor())
.isEqualTo(InflightDataRescalingDescriptor.NO_RESCALE);
assertThat(
getAssignedState(vertices.get(operatorIds.get(1)), operatorIds.get(1), 1)
.getInputRescalingDescriptor())
.isEqualTo(InflightDataRescalingDescriptor.NO_RESCALE);
} |
@Override
public Checksum compute(final InputStream in, final TransferStatus status) throws BackgroundException {
return new Checksum(HashAlgorithm.sha512, this.digest("SHA-512",
this.normalize(in, status), status));
} | @Test
public void testCompute() throws Exception {
assertEquals("cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
new SHA512ChecksumCompute().compute(new NullInputStream(0), new TransferStatus()).hash);
} |
public static ResourceModel processResource(final Class<?> resourceClass)
{
return processResource(resourceClass, null);
} | @Test(expectedExceptions = ResourceConfigException.class)
public void failsOnSimpleResourceWithCollectionLevelAction() {
@RestLiSimpleResource(name = "simpleResourceWithUnsupportedMethod")
class LocalClass extends SimpleResourceTemplate<EmptyRecord>
{
@Action(name = "badAction", resourceLevel = ResourceLevel.COLLECTION)
public void badAction(@ActionParam("someId") String someId) {
}
}
RestLiAnnotationReader.processResource(LocalClass.class);
Assert.fail("#addActionResourceMethod should fail throwing a ResourceConfigException");
} |
public boolean matches(Evidence evidence) {
return sourceMatches(evidence)
&& confidenceMatches(evidence)
&& name.equalsIgnoreCase(evidence.getName())
&& valueMatches(evidence);
} | @Test
public void testWildcardSourceMatching() throws Exception {
final EvidenceMatcher wildcardSourceMatcher = new EvidenceMatcher(null, "name", "value", false, Confidence.MEDIUM);
assertFalse("wildcard source matcher should not match EVIDENCE_HIGHEST", wildcardSourceMatcher.matches(EVIDENCE_HIGHEST));
assertFalse("wildcard source matcher should not match EVIDENCE_HIGH", wildcardSourceMatcher.matches(EVIDENCE_HIGH));
assertTrue("wildcard source matcher should match EVIDENCE_MEDIUM", wildcardSourceMatcher.matches(EVIDENCE_MEDIUM));
assertTrue("wildcard source matcher should match EVIDENCE_MEDIUM_SECOND_SOURCE", wildcardSourceMatcher.matches(EVIDENCE_MEDIUM_SECOND_SOURCE));
assertFalse("wildcard source matcher should not match EVIDENCE_LOW", wildcardSourceMatcher.matches(EVIDENCE_LOW));
} |
@SuppressWarnings("MethodLength")
static void dissectControlRequest(
final ArchiveEventCode eventCode,
final MutableDirectBuffer buffer,
final int offset,
final StringBuilder builder)
{
int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder);
HEADER_DECODER.wrap(buffer, offset + encodedLength);
encodedLength += MessageHeaderDecoder.ENCODED_LENGTH;
switch (eventCode)
{
case CMD_IN_CONNECT:
CONNECT_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendConnect(builder);
break;
case CMD_IN_CLOSE_SESSION:
CLOSE_SESSION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendCloseSession(builder);
break;
case CMD_IN_START_RECORDING:
START_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartRecording(builder);
break;
case CMD_IN_STOP_RECORDING:
STOP_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopRecording(builder);
break;
case CMD_IN_REPLAY:
REPLAY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplay(builder);
break;
case CMD_IN_STOP_REPLAY:
STOP_REPLAY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopReplay(builder);
break;
case CMD_IN_LIST_RECORDINGS:
LIST_RECORDINGS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecordings(builder);
break;
case CMD_IN_LIST_RECORDINGS_FOR_URI:
LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecordingsForUri(builder);
break;
case CMD_IN_LIST_RECORDING:
LIST_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecording(builder);
break;
case CMD_IN_EXTEND_RECORDING:
EXTEND_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendExtendRecording(builder);
break;
case CMD_IN_RECORDING_POSITION:
RECORDING_POSITION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendRecordingPosition(builder);
break;
case CMD_IN_TRUNCATE_RECORDING:
TRUNCATE_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendTruncateRecording(builder);
break;
case CMD_IN_STOP_RECORDING_SUBSCRIPTION:
STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopRecordingSubscription(builder);
break;
case CMD_IN_STOP_POSITION:
STOP_POSITION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopPosition(builder);
break;
case CMD_IN_FIND_LAST_MATCHING_RECORD:
FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendFindLastMatchingRecord(builder);
break;
case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS:
LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecordingSubscriptions(builder);
break;
case CMD_IN_START_BOUNDED_REPLAY:
BOUNDED_REPLAY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartBoundedReplay(builder);
break;
case CMD_IN_STOP_ALL_REPLAYS:
STOP_ALL_REPLAYS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopAllReplays(builder);
break;
case CMD_IN_REPLICATE:
REPLICATE_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplicate(builder);
break;
case CMD_IN_STOP_REPLICATION:
STOP_REPLICATION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopReplication(builder);
break;
case CMD_IN_START_POSITION:
START_POSITION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartPosition(builder);
break;
case CMD_IN_DETACH_SEGMENTS:
DETACH_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendDetachSegments(builder);
break;
case CMD_IN_DELETE_DETACHED_SEGMENTS:
DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendDeleteDetachedSegments(builder);
break;
case CMD_IN_PURGE_SEGMENTS:
PURGE_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendPurgeSegments(builder);
break;
case CMD_IN_ATTACH_SEGMENTS:
ATTACH_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendAttachSegments(builder);
break;
case CMD_IN_MIGRATE_SEGMENTS:
MIGRATE_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendMigrateSegments(builder);
break;
case CMD_IN_AUTH_CONNECT:
AUTH_CONNECT_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendAuthConnect(builder);
break;
case CMD_IN_KEEP_ALIVE:
KEEP_ALIVE_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendKeepAlive(builder);
break;
case CMD_IN_TAGGED_REPLICATE:
TAGGED_REPLICATE_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendTaggedReplicate(builder);
break;
case CMD_IN_START_RECORDING2:
START_RECORDING_REQUEST2_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartRecording2(builder);
break;
case CMD_IN_EXTEND_RECORDING2:
EXTEND_RECORDING_REQUEST2_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendExtendRecording2(builder);
break;
case CMD_IN_STOP_RECORDING_BY_IDENTITY:
STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopRecordingByIdentity(builder);
break;
case CMD_IN_PURGE_RECORDING:
PURGE_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendPurgeRecording(builder);
break;
case CMD_IN_REPLICATE2:
REPLICATE_REQUEST2_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplicate2(builder);
break;
case CMD_IN_REQUEST_REPLAY_TOKEN:
REPLAY_TOKEN_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplayToken(builder);
break;
default:
builder.append(": unknown command");
}
} | @Test
void controlRequestFindLastMatchingRecording()
{
internalEncodeLogHeader(buffer, 0, 90, 90, () -> 10_325_000_000L);
final FindLastMatchingRecordingRequestEncoder requestEncoder = new FindLastMatchingRecordingRequestEncoder();
requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder)
.controlSessionId(1)
.correlationId(2)
.minRecordingId(3)
.sessionId(4)
.streamId(5)
.channel("this is a channel");
dissectControlRequest(CMD_IN_FIND_LAST_MATCHING_RECORD, buffer, 0, builder);
assertEquals("[10.325000000] " + CONTEXT + ": " + CMD_IN_FIND_LAST_MATCHING_RECORD.name() + " [90/90]:" +
" controlSessionId=1" +
" correlationId=2" +
" minRecordingId=3" +
" sessionId=4" +
" streamId=5" +
" channel=this is a channel",
builder.toString());
} |
@Override
public void onMetadataUpdate(
MetadataDelta delta,
MetadataImage newImage,
LoaderManifest manifest
) {
boolean checkBrokerRegistration = false;
if (delta.featuresDelta() != null) {
if (delta.metadataVersionChanged().isPresent()) {
if (log.isTraceEnabled()) {
log.trace("Metadata version change is present: {}",
delta.metadataVersionChanged());
}
checkBrokerRegistration = true;
}
}
if (delta.clusterDelta() != null) {
if (delta.clusterDelta().changedBrokers().get(id) != null) {
if (log.isTraceEnabled()) {
log.trace("Broker change is present: {}",
delta.clusterDelta().changedBrokers().get(id));
}
checkBrokerRegistration = true;
}
}
if (checkBrokerRegistration) {
if (brokerRegistrationNeedsRefresh(newImage.features().metadataVersion(),
delta.clusterDelta().broker(id))) {
refreshRegistrationCallback.run();
}
}
} | @Test
public void testBrokerUpdateWithoutNewMvDoesNothing() {
BrokerRegistrationTrackerTestContext ctx = new BrokerRegistrationTrackerTestContext();
MetadataDelta delta = ctx.newDelta();
delta.replay(new RegisterBrokerRecord().
setBrokerId(1).
setIncarnationId(INCARNATION_ID).
setLogDirs(Arrays.asList(A, B, C)));
ctx.onMetadataUpdate(delta);
assertEquals(0, ctx.numCalls.get());
} |
public ProducerBuilderImpl(PulsarClientImpl client, Schema<T> schema) {
this(client, new ProducerConfigurationData(), schema);
} | @Test
public void testProducerBuilderImpl() throws PulsarClientException {
Map<String, String> properties = new HashMap<>();
properties.put("Test-Key2", "Test-Value2");
producerBuilderImpl = new ProducerBuilderImpl<>(client, Schema.BYTES);
Producer<?> producer = producerBuilderImpl.topic(TOPIC_NAME)
.producerName("Test-Producer")
.maxPendingMessages(2)
.addEncryptionKey("Test-EncryptionKey")
.property("Test-Key", "Test-Value")
.properties(properties)
.create();
assertNotNull(producer);
} |
@Override
public void run() {
try { // make sure we call afterRun() even on crashes
// and operate countdown latches, else we may hang the parallel runner
if (steps == null) {
beforeRun();
}
if (skipped) {
return;
}
int count = steps.size();
int index = 0;
while ((index = nextStepIndex()) < count) {
currentStep = steps.get(index);
execute(currentStep);
if (currentStepResult != null) { // can be null if debug step-back or hook skip
result.addStepResult(currentStepResult);
}
}
} catch (Exception e) {
if (currentStepResult != null) {
result.addStepResult(currentStepResult);
}
logError("scenario [run] failed\n" + StringUtils.throwableToString(e));
currentStepResult = result.addFakeStepResult("scenario [run] failed", e);
} finally {
if (!skipped) {
afterRun();
if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) {
featureRuntime.suite.abort();
}
}
if (caller.isNone()) {
logAppender.close(); // reclaim memory
}
}
} | @Test
void testStepLog() {
run(
"print 'hello world'"
);
List<StepResult> results = sr.result.getStepResults();
assertEquals(1, results.size());
String log = results.get(0).getStepLog();
assertTrue(log.contains("[print] hello world"));
} |
@Udf
public <T> List<T> remove(
@UdfParameter(description = "Array of values") final List<T> array,
@UdfParameter(description = "Value to remove") final T victim) {
if (array == null) {
return null;
}
return array.stream()
.filter(el -> !Objects.equals(el, victim))
.collect(Collectors.toList());
} | @Test
public void shouldReturnAllElementsIfNoNull() {
final List<String> input1 = Arrays.asList("foo");
final String input2 = null;
final List<String> result = udf.remove(input1, input2);
assertThat(result, contains("foo"));
} |
public SubsetItem getClientsSubset(String serviceName,
int minClusterSubsetSize,
int partitionId,
Map<URI, Double> possibleUris,
long version,
SimpleLoadBalancerState state)
{
SubsettingStrategy<URI> subsettingStrategy = _subsettingStrategyFactory.get(serviceName, minClusterSubsetSize, partitionId);
if (subsettingStrategy == null)
{
return new SubsetItem(false, false, possibleUris, Collections.emptySet());
}
DeterministicSubsettingMetadata metadata = _subsettingMetadataProvider.getSubsettingMetadata(state);
if (metadata == null)
{
return new SubsetItem(false, false, possibleUris, Collections.emptySet());
}
synchronized (_lockMap.computeIfAbsent(serviceName, name -> new Object()))
{
SubsetCache subsetCache = _subsetCache.get(serviceName);
if (isCacheValid(version, metadata.getPeerClusterVersion(), minClusterSubsetSize, subsetCache))
{
if (subsetCache.getWeightedSubsets().containsKey(partitionId))
{
return new SubsetItem(true, false, subsetCache.getWeightedSubsets().get(partitionId), Collections.emptySet());
}
}
Map<URI, Double> subsetMap = subsettingStrategy.getWeightedSubset(possibleUris, metadata);
if (subsetMap == null)
{
return new SubsetItem(false, false, possibleUris, Collections.emptySet());
}
else
{
LOG.debug("Force updating subset cache for service " + serviceName);
Set<URI> doNotSlowStartUris = new HashSet<>();
if (subsetCache != null)
{
Set<URI> oldPossibleUris = subsetCache.getPossibleUris().getOrDefault(partitionId, Collections.emptySet());
for (URI uri : subsetMap.keySet())
{
if (oldPossibleUris.contains(uri))
{
doNotSlowStartUris.add(uri);
}
}
subsetCache.setVersion(version);
subsetCache.setPeerClusterVersion(metadata.getPeerClusterVersion());
subsetCache.setMinClusterSubsetSize(minClusterSubsetSize);
subsetCache.getPossibleUris().put(partitionId, possibleUris.keySet());
subsetCache.getWeightedSubsets().put(partitionId, subsetMap);
}
else
{
LOG.info("Cluster subsetting enabled for service: " + serviceName);
Map<Integer, Set<URI>> servicePossibleUris = new HashMap<>();
Map<Integer, Map<URI, Double>> serviceWeightedSubset = new HashMap<>();
servicePossibleUris.put(partitionId, possibleUris.keySet());
serviceWeightedSubset.put(partitionId, subsetMap);
subsetCache = new SubsetCache(version, metadata.getPeerClusterVersion(),
minClusterSubsetSize, servicePossibleUris, serviceWeightedSubset);
_subsetCache.put(serviceName, subsetCache);
}
LOG.debug("Subset cache updated for service " + serviceName + ": " + subsetCache);
return new SubsetItem(true, true, subsetMap, doNotSlowStartUris);
}
}
} | @Test
public void testSingleThreadCase()
{
Mockito.when(_subsettingMetadataProvider.getSubsettingMetadata(_state))
.thenReturn(new DeterministicSubsettingMetadata(0, 5, 0));
SubsettingState.SubsetItem subsetItem = _subsettingState.getClientsSubset(SERVICE_NAME, 4, 0,
createUris(30), 0, _state);
assertEquals(subsetItem.getWeightedUriSubset().size(), 6);
assertTrue(subsetItem.shouldForceUpdate());
Mockito.when(_subsettingMetadataProvider.getSubsettingMetadata(_state))
.thenReturn(new DeterministicSubsettingMetadata(0, 4, 1));
SubsettingState.SubsetItem subsetItem1 = _subsettingState.getClientsSubset(SERVICE_NAME, 4, 0,
createUris(30), 0, _state);
assertEquals(subsetItem1.getWeightedUriSubset().size(), 8);
assertTrue(subsetItem1.shouldForceUpdate());
SubsettingState.SubsetItem subsetItem2 = _subsettingState.getClientsSubset(SERVICE_NAME, 4, 0,
createUris(28), 2, _state);
assertEquals(subsetItem2.getWeightedUriSubset().size(), 7);
assertTrue(subsetItem2.shouldForceUpdate());
SubsettingState.SubsetItem subsetItem3 = _subsettingState.getClientsSubset(SERVICE_NAME, 8, 0,
createUris(28), 2, _state);
assertEquals(subsetItem3.getWeightedUriSubset().size(), 14);
assertTrue(subsetItem3.shouldForceUpdate());
SubsettingState.SubsetItem subsetItem4 = _subsettingState.getClientsSubset(SERVICE_NAME, 8, 0,
createUris(28), 2, _state);
assertEquals(subsetItem4.getWeightedUriSubset().size(), 14);
assertFalse(subsetItem4.shouldForceUpdate());
} |
@Operation(summary = "queryProcessInstanceListByTrigger", description = "QUERY_PROCESS_INSTANCE_BY_TRIGGER_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = Long.class)),
@Parameter(name = "triggerCode", description = "TRIGGER_CODE", required = true, schema = @Schema(implementation = Long.class))
})
@GetMapping("/trigger")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR)
public Result queryProcessInstancesByTriggerCode(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable long projectCode,
@RequestParam(value = "triggerCode") Long triggerCode) {
Map<String, Object> result = processInstanceService.queryByTriggerCode(loginUser, projectCode, triggerCode);
return returnDataList(result);
} | @Test
public void queryProcessInstancesByTriggerCode() throws Exception {
Map<String, Object> mockResult = new HashMap<>();
mockResult.put(Constants.STATUS, Status.SUCCESS);
Mockito.when(processInstanceService
.queryByTriggerCode(Mockito.any(), Mockito.anyLong(), Mockito.anyLong()))
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/projects/1113/process-instances/trigger")
.header("sessionId", sessionId)
.param("triggerCode", "12051206"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assertions.assertNotNull(result);
Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
} |
@CanIgnoreReturnValue
public final Ordered containsAtLeast(
@Nullable Object k0, @Nullable Object v0, @Nullable Object... rest) {
return containsAtLeastEntriesIn(accumulateMap("containsAtLeast", k0, v0, rest));
} | @Test
public void containsAtLeastExtraKeyAndMissingKey_failsWithSameToStringForKeys() {
expectFailureWhenTestingThat(ImmutableMap.of(1L, "jan", 2, "feb"))
.containsAtLeast(1, "jan", 2, "feb");
assertFailureKeys(
"missing keys",
"for key",
"expected value",
"---",
"expected to contain at least",
"but was");
assertFailureValue("for key", "1 (java.lang.Integer)");
assertFailureValue("expected value", "jan");
} |
@Override
public List<Class<? extends ComputationStep>> orderedStepClasses() {
return STEPS_CLASSES;
} | @Test
public void count_step_classes() {
assertThat(copyOf(underTest.orderedStepClasses())).hasSize(19);
} |
public Map<Integer, List<ProducerBatch>> drain(MetadataSnapshot metadataSnapshot, Set<Node> nodes, int maxSize, long now) {
if (nodes.isEmpty())
return Collections.emptyMap();
Map<Integer, List<ProducerBatch>> batches = new HashMap<>();
for (Node node : nodes) {
List<ProducerBatch> ready = drainBatchesForOneNode(metadataSnapshot, node, maxSize, now);
batches.put(node.id(), ready);
}
return batches;
} | @Test
public void testDrainWithANodeThatDoesntHostAnyPartitions() {
int batchSize = 10;
int lingerMs = 10;
long totalSize = 10 * 1024;
RecordAccumulator accum = createTestRecordAccumulator(batchSize, totalSize, Compression.NONE, lingerMs);
// Create cluster metadata, node2 doesn't host any partitions.
PartitionMetadata part1Metadata = new PartitionMetadata(Errors.NONE, tp1, Optional.of(node1.id()), Optional.empty(), null, null, null);
MetadataSnapshot metadataCache = new MetadataSnapshot(null, nodes, Collections.singletonList(part1Metadata), Collections.emptySet(), Collections.emptySet(), Collections.emptySet(), null, Collections.emptyMap());
// Drain for node2, it should return 0 batches,
Map<Integer, List<ProducerBatch>> batches = accum.drain(metadataCache,
new HashSet<>(Collections.singletonList(node2)), 999999 /* maxSize */, time.milliseconds());
assertTrue(batches.get(node2.id()).isEmpty());
} |
@VisibleForTesting
HiveConf hiveConf() {
return hiveConf;
} | @Test
public void testConf() {
HiveConf conf = createHiveConf();
conf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "file:/mywarehouse/");
HiveClientPool clientPool = new HiveClientPool(10, conf);
HiveConf clientConf = clientPool.hiveConf();
assertThat(clientConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname))
.isEqualTo(conf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
assertThat(clientPool.poolSize()).isEqualTo(10);
// 'hive.metastore.sasl.enabled' should be 'true' as defined in xml
assertThat(clientConf.get(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname))
.isEqualTo(conf.get(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname));
assertThat(clientConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL)).isTrue();
} |
public static Event[] fromJson(final String json) throws IOException {
return fromJson(json, BasicEventFactory.INSTANCE);
} | @Test(expected=ClassCastException.class)
public void testFromJsonWithInvalidJsonArray1() throws Exception {
Event.fromJson("[1,2]");
} |
protected SuppressionRules rules() {
return rules;
} | @Test
public void updateAnnotationRule() {
final String key1 = "key1", key2 = "key2";
final String value1 = "value1", value2 = "value2";
Map<String, String> annotation = new HashMap<>();
annotation.put(key1, value1);
cfg.annotation(annotation);
configEvent(NetworkConfigEvent.Type.CONFIG_ADDED);
assertAfter(EVENT_MS, () -> {
assertTrue(provider.rules().getSuppressedAnnotation().containsKey(key1));
assertEquals(value1, provider.rules().getSuppressedAnnotation().get(key1));
assertFalse(provider.rules().getSuppressedAnnotation().containsKey(key2));
});
annotation.put(key2, value2);
cfg.annotation(annotation);
configEvent(NetworkConfigEvent.Type.CONFIG_UPDATED);
assertAfter(EVENT_MS, () -> {
assertTrue(provider.rules().getSuppressedAnnotation().containsKey(key1));
assertEquals(value1, provider.rules().getSuppressedAnnotation().get(key1));
assertTrue(provider.rules().getSuppressedAnnotation().containsKey(key2));
assertEquals(value2, provider.rules().getSuppressedAnnotation().get(key2));
});
} |
@Override
public boolean canReadView(ViewLike view) {
final String viewId = view.id();
// If a resolved view id is provided, delegate the permissions check to the resolver.
final ViewResolverDecoder decoder = new ViewResolverDecoder(viewId);
if (decoder.isResolverViewId()) {
final ViewResolver viewResolver = viewResolvers.get(decoder.getResolverName());
if (viewResolver != null) {
return viewResolver.canReadView(viewId, isPermitted, isPermittedEntity);
} else {
// Resolved view could not be found, so permissions cannot be checked.
LOG.error("View resolver [{}] could not be found.", decoder.getResolverName());
return false;
}
}
// Proceed to standard views permission check.
return isPermitted(ViewsRestPermissions.VIEW_READ, viewId)
|| (view.type().equals(ViewDTO.Type.DASHBOARD) && isPermitted(RestPermissions.DASHBOARDS_READ, viewId));
} | @Test
void testResolvedViewReadAccess() {
// Test that the resolver permission check allows and disallows appropriately.
assertThat(searchUserResolvedRequiringPermission("missing-permission")
.canReadView(new TestView("resolver" + ViewResolverDecoder.SEPARATOR + "resolved-id"))).isFalse();
assertThat(searchUserResolvedRequiringPermission("allowed-permission")
.canReadView(new TestView("resolver" + ViewResolverDecoder.SEPARATOR + "resolved-id"))).isTrue();
// Test that the resolver permission and entity id check allows and disallows appropriately.
assertThat(searchUserResolvedRequiringPermissionEntity("bad-permission", "resolved-id")
.canReadView(new TestView("resolver" + ViewResolverDecoder.SEPARATOR + "resolved-id"))).isFalse();
assertThat(searchUserResolvedRequiringPermissionEntity("allowed-permission", "bad-id")
.canReadView(new TestView("resolver" + ViewResolverDecoder.SEPARATOR + "resolved-id"))).isFalse();
assertThat(searchUserResolvedRequiringPermissionEntity("missing-permission", "bad-id")
.canReadView(new TestView("resolver" + ViewResolverDecoder.SEPARATOR + "resolved-id"))).isFalse();
assertThat(searchUserResolvedRequiringPermissionEntity("allowed-permission", "resolved-id")
.canReadView(new TestView("resolver" + ViewResolverDecoder.SEPARATOR + "resolved-id"))).isTrue();
} |
@Override
public void write(int b) throws IOException {
if (stream.getCount() >= multiPartSize) {
newStream();
uploadParts();
}
stream.write(b);
pos += 1;
writeBytes.increment();
writeOperations.increment();
// switch to multipart upload
if (multipartUploadId == null && pos >= multiPartThresholdSize) {
initializeMultiPartUpload();
uploadParts();
}
} | @Test
public void testWrite() {
writeTest();
} |
@Override
public Future<Void> closeAsync() {
if (executor.inEventLoop()) {
return close0();
} else {
final Promise<Void> closeComplete = executor.newPromise();
executor.execute(new Runnable() {
@Override
public void run() {
close0().addListener(new FutureListener<Void>() {
@Override
public void operationComplete(Future<Void> f) throws Exception {
if (f.isSuccess()) {
closeComplete.setSuccess(null);
} else {
closeComplete.setFailure(f.cause());
}
}
});
}
});
return closeComplete;
}
} | @Test
public void testCloseAsync() throws ExecutionException, InterruptedException {
LocalAddress addr = new LocalAddress(getLocalAddrId());
Bootstrap cb = new Bootstrap();
cb.remoteAddress(addr);
cb.group(group).channel(LocalChannel.class);
ServerBootstrap sb = new ServerBootstrap();
sb.group(group)
.channel(LocalServerChannel.class)
.childHandler(new ChannelInitializer<LocalChannel>() {
@Override
public void initChannel(LocalChannel ch) throws Exception {
ch.pipeline().addLast(new ChannelInboundHandlerAdapter());
}
});
// Start server
final Channel sc = sb.bind(addr).syncUninterruptibly().channel();
final FixedChannelPool pool = new FixedChannelPool(cb, new TestChannelPoolHandler(), 2);
pool.acquire().get();
pool.acquire().get();
final ChannelPromise closePromise = sc.newPromise();
pool.closeAsync().addListener(new GenericFutureListener<Future<? super Void>>() {
@Override
public void operationComplete(Future<? super Void> future) throws Exception {
assertEquals(0, pool.acquiredChannelCount());
sc.close(closePromise).syncUninterruptibly();
}
}).awaitUninterruptibly();
closePromise.awaitUninterruptibly();
} |
@Override
public int size() {
Node h = putStack.get();
int putStackSize = h == null ? 0 : h.size;
return putStackSize + takeStackSize.get();
} | @Test
public void size_whenEmpty() {
assertEquals(0, queue.size());
} |
public static AscendingLongIterator or(AscendingLongIterator[] iterators) {
return new OrIterator(iterators);
} | @Test
public void testOr() {
long seed = System.nanoTime();
System.out.println(getClass().getSimpleName() + ".testOr seed: " + seed);
actual.add(new SparseBitSet());
expected.add(new TreeSet<>());
verifyOr();
actual.clear();
expected.clear();
generate(0, 75000, 1);
verifyOr();
generate(100, 40000, 2);
verifyOr();
generate(200, 30000, 3);
verifyOr();
actual.add(new SparseBitSet());
expected.add(new TreeSet<>());
verifyOr();
actual.remove(actual.size() - 1);
expected.remove(expected.size() - 1);
generate(2000000, 30000, 3);
verifyOr();
actual.remove(actual.size() - 1);
expected.remove(expected.size() - 1);
generateRandom(seed, 10000, 50000);
verifyOr();
generateRandom(seed, 20000, -1);
verifyOr();
actual.clear();
expected.clear();
actual.add(new SparseBitSet());
expected.add(new TreeSet<>());
verifyOr();
generateRandom(seed, 40000, 100000);
verifyOr();
generateRandom(seed, 40000, 100000);
verifyOr();
generateRandom(seed, 500000, -1);
verifyOr();
} |
public static ViewMetadata fromJson(String metadataLocation, String json) {
return JsonUtil.parse(json, node -> ViewMetadataParser.fromJson(metadataLocation, node));
} | @Test
public void replaceViewMetadataWithMultipleSQLsForDialect() throws Exception {
String json =
readViewMetadataInputFile(
"org/apache/iceberg/view/ViewMetadataMultipleSQLsForDialect.json");
// reading view metadata with multiple SQLs for the same dialects shouldn't fail
ViewMetadata invalid = ViewMetadataParser.fromJson(json);
// replace metadata with a new view version that fixes the SQL representations
ViewVersion viewVersion =
ImmutableViewVersion.builder()
.versionId(2)
.schemaId(0)
.timestampMillis(5555L)
.summary(ImmutableMap.of("user", "some-user"))
.defaultCatalog("some-catalog")
.defaultNamespace(Namespace.empty())
.addRepresentations(
ImmutableSQLViewRepresentation.builder()
.sql("select * from foo")
.dialect("spark-sql")
.build())
.build();
ViewMetadata replaced =
ViewMetadata.buildFrom(invalid).addVersion(viewVersion).setCurrentVersionId(2).build();
assertThat(replaced.currentVersion()).isEqualTo(viewVersion);
} |
public Cookie decode(String header) {
final int headerLen = checkNotNull(header, "header").length();
if (headerLen == 0) {
return null;
}
CookieBuilder cookieBuilder = null;
loop: for (int i = 0;;) {
// Skip spaces and separators.
for (;;) {
if (i == headerLen) {
break loop;
}
char c = header.charAt(i);
if (c == ',') {
// Having multiple cookies in a single Set-Cookie header is
// deprecated, modern browsers only parse the first one
break loop;
} else if (c == '\t' || c == '\n' || c == 0x0b || c == '\f'
|| c == '\r' || c == ' ' || c == ';') {
i++;
continue;
}
break;
}
int nameBegin = i;
int nameEnd;
int valueBegin;
int valueEnd;
for (;;) {
char curChar = header.charAt(i);
if (curChar == ';') {
// NAME; (no value till ';')
nameEnd = i;
valueBegin = valueEnd = -1;
break;
} else if (curChar == '=') {
// NAME=VALUE
nameEnd = i;
i++;
if (i == headerLen) {
// NAME= (empty value, i.e. nothing after '=')
valueBegin = valueEnd = 0;
break;
}
valueBegin = i;
// NAME=VALUE;
int semiPos = header.indexOf(';', i);
valueEnd = i = semiPos > 0 ? semiPos : headerLen;
break;
} else {
i++;
}
if (i == headerLen) {
// NAME (no value till the end of string)
nameEnd = headerLen;
valueBegin = valueEnd = -1;
break;
}
}
if (valueEnd > 0 && header.charAt(valueEnd - 1) == ',') {
// old multiple cookies separator, skipping it
valueEnd--;
}
if (cookieBuilder == null) {
// cookie name-value pair
DefaultCookie cookie = initCookie(header, nameBegin, nameEnd, valueBegin, valueEnd);
if (cookie == null) {
return null;
}
cookieBuilder = new CookieBuilder(cookie, header);
} else {
// cookie attribute
cookieBuilder.appendAttribute(nameBegin, nameEnd, valueBegin, valueEnd);
}
}
return cookieBuilder != null ? cookieBuilder.cookie() : null;
} | @Test
public void testDecodingQuotedCookie() {
Collection<String> sources = new ArrayList<String>();
sources.add("a=\"\",");
sources.add("b=\"1\",");
Collection<Cookie> cookies = new ArrayList<Cookie>();
for (String source : sources) {
cookies.add(ClientCookieDecoder.STRICT.decode(source));
}
Iterator<Cookie> it = cookies.iterator();
Cookie c;
c = it.next();
assertEquals("a", c.name());
assertEquals("", c.value());
c = it.next();
assertEquals("b", c.name());
assertEquals("1", c.value());
assertFalse(it.hasNext());
} |
@Subscribe
public void onGameTick(GameTick event)
{
final Player local = client.getLocalPlayer();
final Duration waitDuration = Duration.ofMillis(config.getIdleNotificationDelay());
lastCombatCountdown = Math.max(lastCombatCountdown - 1, 0);
if (client.getGameState() != GameState.LOGGED_IN
|| local == null
// If user has clicked in the last second then they're not idle so don't send idle notification
|| System.currentTimeMillis() - client.getMouseLastPressedMillis() < 1000
|| client.getKeyboardIdleTicks() < 10)
{
resetTimers();
return;
}
if (checkIdleLogout())
{
notifier.notify(config.logoutIdle(), "You are about to log out from idling too long!");
}
if (check6hrLogout())
{
notifier.notify("You are about to log out from being online for 6 hours!");
}
if (checkAnimationIdle(waitDuration, local))
{
notifier.notify(config.animationIdle(), "You are now idle!");
}
if (checkMovementIdle(waitDuration, local))
{
notifier.notify(config.movementIdle(), "You have stopped moving!");
}
if (checkInteractionIdle(waitDuration, local))
{
if (lastInteractWasCombat)
{
notifier.notify(config.interactionIdle(), "You are now out of combat!");
}
else
{
notifier.notify(config.interactionIdle(), "You are now idle!");
}
}
if (checkLowHitpoints())
{
notifier.notify(config.getHitpointsNotification(), "You have low hitpoints!");
}
if (checkLowPrayer())
{
notifier.notify(config.getPrayerNotification(), "You have low prayer!");
}
if (checkLowEnergy())
{
notifier.notify(config.getLowEnergyNotification(), "You have low run energy!");
}
if (checkHighEnergy())
{
notifier.notify(config.getHighEnergyNotification(), "You have restored run energy!");
}
if (checkLowOxygen())
{
notifier.notify(config.getOxygenNotification(), "You have low oxygen!");
}
if (checkFullSpecEnergy())
{
notifier.notify(config.getSpecNotification(), "You have restored spec energy!");
}
} | @Test
public void testMovementIdle()
{
when(config.movementIdle()).thenReturn(Notification.ON);
when(player.getWorldLocation()).thenReturn(new WorldPoint(0, 0, 0));
plugin.onGameTick(new GameTick());
when(player.getWorldLocation()).thenReturn(new WorldPoint(1, 0, 0));
plugin.onGameTick(new GameTick());
// No movement here
plugin.onGameTick(new GameTick());
verify(notifier).notify(Notification.ON, "You have stopped moving!");
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.