focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
public static FullyQualifiedKotlinType convert(FullyQualifiedJavaType javaType) {
FullyQualifiedKotlinType kotlinType = convertBaseType(javaType);
for (FullyQualifiedJavaType argument : javaType.getTypeArguments()) {
kotlinType.addTypeArgument(convert(argument));
}
return kotlinType;
} | @Test
void testPrimitiveByte() {
FullyQualifiedJavaType jt = new FullyQualifiedJavaType("byte");
FullyQualifiedKotlinType kt = JavaToKotlinTypeConverter.convert(jt);
assertThat(kt.getShortNameWithTypeArguments()).isEqualTo("Byte");
assertThat(kt.getImportList()).isEmpty();
} |
public void clear() throws Exception {
lock.lock();
try {
// Clear caches
blockCache.clear();
notFoundCache.clear();
// Clear file content
((Buffer) buffer).position(0);
long fileLength = randomAccessFile.length();
for (int i = 0; i < fileLength; i++) {
buffer.put((byte)0);
}
// Initialize store again
initNewStore(params.getGenesisBlock());
} finally { lock.unlock(); }
} | @Test
public void clear() throws Exception {
Context.propagate(new Context(100, Transaction.DEFAULT_TX_FEE, false, true));
SPVBlockStore store = new SPVBlockStore(TESTNET, blockStoreFile);
// Build a new block.
Address to = new ECKey().toAddress(ScriptType.P2PKH, BitcoinNetwork.TESTNET);
StoredBlock genesis = store.getChainHead();
StoredBlock b1 = genesis.build(genesis.getHeader().createNextBlock(to).cloneAsHeader());
store.put(b1);
store.setChainHead(b1);
assertEquals(b1.getHeader().getHash(), store.getChainHead().getHeader().getHash());
store.clear();
assertNull(store.get(b1.getHeader().getHash()));
assertEquals(TESTNET.getGenesisBlock().getHash(), store.getChainHead().getHeader().getHash());
store.close();
} |
@SuppressWarnings("unused") // Part of required API.
public void execute(
final ConfiguredStatement<InsertValues> statement,
final SessionProperties sessionProperties,
final KsqlExecutionContext executionContext,
final ServiceContext serviceContext
) {
final InsertValues insertValues = statement.getStatement();
final MetaStore metaStore = executionContext.getMetaStore();
final KsqlConfig config = statement.getSessionConfig().getConfig(true);
final DataSource dataSource = getDataSource(config, metaStore, insertValues);
validateInsert(insertValues.getColumns(), dataSource);
final ProducerRecord<byte[], byte[]> record =
buildRecord(statement, metaStore, dataSource, serviceContext);
try {
producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps());
} catch (final TopicAuthorizationException e) {
// TopicAuthorizationException does not give much detailed information about why it failed,
// except which topics are denied. Here we just add the ACL to make the error message
// consistent with other authorization error messages.
final Exception rootCause = new KsqlTopicAuthorizationException(
AclOperation.WRITE,
e.unauthorizedTopics()
);
throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause);
} catch (final ClusterAuthorizationException e) {
// ClusterAuthorizationException is thrown when using idempotent producers
// and either a topic write permission or a cluster-level idempotent write
// permission (only applicable for broker versions no later than 2.8) is
// missing. In this case, we include additional context to help the user
// distinguish this type of failure from other permissions exceptions
// such as the ones thrown above when TopicAuthorizationException is caught.
throw new KsqlException(
createInsertFailedExceptionMessage(insertValues),
createClusterAuthorizationExceptionRootCause(dataSource)
);
} catch (final KafkaException e) {
if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) {
// The error message thrown when an idempotent producer is missing permissions
// is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException,
// as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException.
// ksqlDB handles these two the same way, accordingly.
// See https://issues.apache.org/jira/browse/KAFKA-14138 for more.
throw new KsqlException(
createInsertFailedExceptionMessage(insertValues),
createClusterAuthorizationExceptionRootCause(dataSource)
);
} else {
throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e);
}
} catch (final Exception e) {
throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e);
}
} | @Test
public void shouldHandleTablesWithNoKeyField() {
// Given:
givenSourceTableWithSchema(SerdeFeatures.of(), SerdeFeatures.of());
final ConfiguredStatement<InsertValues> statement = givenInsertValues(
ImmutableList.of(K0, COL0, COL1),
ImmutableList.of(
new StringLiteral("key"),
new StringLiteral("str"),
new LongLiteral(2L))
);
// When:
executor.execute(statement, mock(SessionProperties.class), engine, serviceContext);
// Then:
verify(keySerializer).serialize(TOPIC_NAME, genericKey("key"));
verify(valueSerializer).serialize(TOPIC_NAME, genericRow("str", 2L));
verify(producer).send(new ProducerRecord<>(TOPIC_NAME, null, 1L, KEY, VALUE));
} |
protected String decideSource(MappedMessage cef, RawMessage raw) {
// Try getting the host name from the CEF extension "deviceAddress"/"dvc"
final Map<String, Object> fields = cef.mappedExtensions();
if (fields != null && !fields.isEmpty()) {
final String deviceAddress = (String) fields.getOrDefault(CEFMapping.dvc.getFullName(), fields.get(CEFMapping.dvc.getKeyName()));
if (!isNullOrEmpty(deviceAddress)) {
return deviceAddress;
}
}
// Try getting the hostname from the CEF message metadata (e. g. syslog)
if (!isNullOrEmpty(cef.host())) {
return cef.host();
}
// Use raw message source information if we were not able to parse a source from the CEF extensions.
final ResolvableInetSocketAddress address = raw.getRemoteAddress();
final InetSocketAddress remoteAddress;
if (address == null) {
remoteAddress = null;
} else {
remoteAddress = address.getInetSocketAddress();
}
return remoteAddress == null ? "unknown" : remoteAddress.getAddress().toString();
} | @Test
public void decideSourceWithShortDeviceAddressReturnsExtensionValue() throws Exception {
final MappedMessage cefMessage = mock(MappedMessage.class);
when(cefMessage.mappedExtensions()).thenReturn(Collections.singletonMap("dvc", "128.66.23.42"));
final RawMessage rawMessage = new RawMessage(new byte[0], new InetSocketAddress("example.com", 12345));
assertEquals("128.66.23.42", codec.decideSource(cefMessage, rawMessage));
} |
private URI rebuildUri(String url, URI uri) {
final Optional<URI> optionalUri = formatUri(url, uri);
if (optionalUri.isPresent()) {
return optionalUri.get();
}
throw new IllegalArgumentException("Invalid url: " + url);
} | @Test
public void rebuildUriTest() {
Optional<Method> method = ReflectUtils.findMethod(RestTemplateInterceptor.class, "rebuildUri",
new Class[]{String.class, URI.class});
URI uri = createURI(url);
if (method.isPresent()) {
Optional<Object> uriNew = ReflectUtils
.invokeMethod(interceptor, method.get(), new Object[]{convertUrl, uri});
Assert.assertEquals(convertUrl, uriNew.get().toString());
}
} |
@Override
public boolean canRescaleMaxParallelism(int desiredMaxParallelism) {
// Technically a valid parallelism value, but one that cannot be rescaled to
if (desiredMaxParallelism == JobVertex.MAX_PARALLELISM_DEFAULT) {
return false;
}
return !rescaleMaxValidator
.apply(normalizeAndCheckMaxParallelism(desiredMaxParallelism))
.isPresent();
} | @Test
void canRescaleMaxOutOfBounds() {
DefaultVertexParallelismInfo info = new DefaultVertexParallelismInfo(1, 1, ALWAYS_VALID);
assertThatThrownBy(() -> info.canRescaleMaxParallelism(-4))
.withFailMessage("not in valid bounds")
.isInstanceOf(IllegalArgumentException.class);
} |
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) {
return decoder.decodeFunctionResult(rawInput, outputParameters);
} | @Test
public void testBuildDynamicArrayOfStaticStruct() throws ClassNotFoundException {
// This is a version of testDecodeStaticStructDynamicArray() that builds
// the decoding TypeReferences using inner types.
String rawInput =
"0x0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000002"
+ "000000000000000000000000000000000000000000000000000000000000007b"
+ "000000000000000000000000000000000000000000000000000000000000007b"
+ "000000000000000000000000000000000000000000000000000000000000007b"
+ "000000000000000000000000000000000000000000000000000000000000007b";
// (uint256, uint256) static struct.
TypeReference<StaticStruct> staticStructTr =
new TypeReference<StaticStruct>(
false,
Arrays.asList(
TypeReference.makeTypeReference("uint256"),
TypeReference.makeTypeReference("uint256"))) {};
// (uint256, uint256)[] dynamic array of static struct.
TypeReference<DynamicArray> dynamicArray =
new TypeReference<DynamicArray>(false) {
@Override
public TypeReference getSubTypeReference() {
return staticStructTr;
}
@Override
public java.lang.reflect.Type getType() {
return new java.lang.reflect.ParameterizedType() {
@Override
public java.lang.reflect.Type[] getActualTypeArguments() {
return new java.lang.reflect.Type[] {staticStructTr.getType()};
}
@Override
public java.lang.reflect.Type getRawType() {
return DynamicArray.class;
}
@Override
public java.lang.reflect.Type getOwnerType() {
return Class.class;
}
};
}
};
List<Type> decodedData =
FunctionReturnDecoder.decode(rawInput, Utils.convert(Arrays.asList(dynamicArray)));
List<Type> decodedArray = ((DynamicArray) decodedData.get(0)).getValue();
List<Type> decodedStaticStruct0 = ((StaticStruct) decodedArray.get(0)).getValue();
assertEquals(decodedStaticStruct0.get(0).getValue(), BigInteger.valueOf(123));
assertEquals(decodedStaticStruct0.get(1).getValue(), BigInteger.valueOf(123));
List<Type> decodedStaticStruct1 = ((StaticStruct) decodedArray.get(1)).getValue();
assertEquals(decodedStaticStruct1.get(0).getValue(), BigInteger.valueOf(123));
assertEquals(decodedStaticStruct1.get(1).getValue(), BigInteger.valueOf(123));
} |
public String toJson() {
JsonObject details = new JsonObject();
details.addProperty(FIELD_LEVEL, level.toString());
JsonArray conditionResults = new JsonArray();
for (EvaluatedCondition condition : this.conditions) {
conditionResults.add(toJson(condition));
}
details.add("conditions", conditionResults);
details.addProperty(FIELD_IGNORED_CONDITIONS, ignoredConditions);
return details.toString();
} | @Test
public void verify_json_when_there_is_no_condition() {
String actualJson = new QualityGateDetailsData(Measure.Level.OK, Collections.emptyList(), false).toJson();
JsonAssert.assertJson(actualJson).isSimilarTo("{" +
"\"level\":\"OK\"," +
"\"conditions\":[]" +
"}");
} |
public void awaitSuccessfulCompletion() throws InterruptedException, ExecutionException {
awaitUninterruptibly();
for (final Future<?> f : futures) {
f.get();
}
} | @Test
public void failsWhenAnyCallableThrowsException() throws Exception {
StatusEnsuringCallable firstTask = new StatusEnsuringCallable(false);
StatusEnsuringCallable secondTask = new StatusEnsuringCallable(false);
subject.submit(firstTask);
subject.submit(secondTask);
try {
subject.awaitSuccessfulCompletion();
fail("Should fail");
} catch (final Exception e) {
}
} |
@Override
public QuoteCharacter getQuoteCharacter() {
return QuoteCharacter.QUOTE;
} | @Test
void assertGetQuoteCharacter() {
assertThat(dialectDatabaseMetaData.getQuoteCharacter(), is(QuoteCharacter.QUOTE));
} |
public LoginContext login() throws LoginException {
LoginContext tmpLoginContext = loginContextFactory.createLoginContext(this);
tmpLoginContext.login();
log.info("Successfully logged in.");
loginContext = tmpLoginContext;
subject = loginContext.getSubject();
expiringCredential = expiringCredential();
hasExpiringCredential = expiringCredential != null;
if (!hasExpiringCredential) {
// do not bother with re-logins.
log.debug("No Expiring Credential");
principalName = null;
refresherThread = null;
return loginContext;
}
principalName = expiringCredential.principalName();
// Check for a clock skew problem
long expireTimeMs = expiringCredential.expireTimeMs();
long nowMs = currentMs();
if (nowMs > expireTimeMs) {
log.error(
"[Principal={}]: Current clock: {} is later than expiry {}. This may indicate a clock skew problem."
+ " Check that this host's and remote host's clocks are in sync. Not starting refresh thread."
+ " This process is likely unable to authenticate SASL connections (for example, it is unlikely"
+ " to be able to authenticate a connection with a Kafka Broker).",
principalLogText(), new Date(nowMs), new Date(expireTimeMs));
return loginContext;
}
if (log.isDebugEnabled())
log.debug("[Principal={}]: It is an expiring credential", principalLogText());
/*
* Re-login periodically. How often is determined by the expiration date of the
* credential and refresh-related configuration values.
*/
refresherThread = KafkaThread.daemon(String.format("kafka-expiring-relogin-thread-%s", principalName),
new Refresher());
refresherThread.start();
loginContextFactory.refresherThreadStarted();
return loginContext;
} | @Test
public void testRefreshWithMinPeriodIntrusion() throws Exception {
int numExpectedRefreshes = 1;
boolean clientReloginAllowedBeforeLogout = true;
Subject subject = new Subject();
final LoginContext mockLoginContext = mock(LoginContext.class);
when(mockLoginContext.getSubject()).thenReturn(subject);
MockTime mockTime = new MockTime();
long startMs = mockTime.milliseconds();
/*
* Identify the lifetime of each expiring credential
*/
long lifetimeMinutes = 10L;
/*
* Identify the point at which refresh will occur in that lifetime
*/
long refreshEveryMinutes = 8L;
/*
* Set an absolute last refresh time that will cause the login thread to exit
* after a certain number of re-logins (by adding an extra half of a refresh
* interval).
*/
long absoluteLastRefreshMs = startMs + (1 + numExpectedRefreshes) * 1000 * 60 * refreshEveryMinutes
- 1000 * 60 * refreshEveryMinutes / 2;
/*
* Identify a minimum period that will cause the refresh time to be delayed a
* bit.
*/
int bufferIntrusionSeconds = 1;
short minPeriodSeconds = (short) (refreshEveryMinutes * 60 + bufferIntrusionSeconds);
short bufferSeconds = (short) 0;
/*
* Define some listeners so we can keep track of who gets done and when. All
* added listeners should end up done except the last, extra one, which should
* not.
*/
MockScheduler mockScheduler = new MockScheduler(mockTime);
List<KafkaFutureImpl<Long>> waiters = addWaiters(mockScheduler,
1000 * (60 * refreshEveryMinutes + bufferIntrusionSeconds), numExpectedRefreshes + 1);
// Create the ExpiringCredentialRefreshingLogin instance under test
TestLoginContextFactory testLoginContextFactory = new TestLoginContextFactory();
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin = new TestExpiringCredentialRefreshingLogin(
refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
1.0 * refreshEveryMinutes / lifetimeMinutes, minPeriodSeconds, bufferSeconds,
clientReloginAllowedBeforeLogout),
testLoginContextFactory, mockTime, 1000 * 60 * lifetimeMinutes, absoluteLastRefreshMs,
clientReloginAllowedBeforeLogout);
testLoginContextFactory.configure(mockLoginContext, testExpiringCredentialRefreshingLogin);
/*
* Perform the login, wait up to a certain amount of time for the refresher
* thread to exit, and make sure the correct calls happened at the correct times
*/
long expectedFinalMs = startMs
+ numExpectedRefreshes * 1000 * (60 * refreshEveryMinutes + bufferIntrusionSeconds);
assertFalse(testLoginContextFactory.refresherThreadStartedFuture().isDone());
assertFalse(testLoginContextFactory.refresherThreadDoneFuture().isDone());
testExpiringCredentialRefreshingLogin.login();
assertTrue(testLoginContextFactory.refresherThreadStartedFuture().isDone());
testLoginContextFactory.refresherThreadDoneFuture().get(1L, TimeUnit.SECONDS);
assertEquals(expectedFinalMs, mockTime.milliseconds());
for (int i = 0; i < numExpectedRefreshes; ++i) {
KafkaFutureImpl<Long> waiter = waiters.get(i);
assertTrue(waiter.isDone());
assertEquals((i + 1) * 1000 * (60 * refreshEveryMinutes + bufferIntrusionSeconds),
waiter.get() - startMs);
}
assertFalse(waiters.get(numExpectedRefreshes).isDone());
InOrder inOrder = inOrder(mockLoginContext);
inOrder.verify(mockLoginContext).login();
for (int i = 0; i < numExpectedRefreshes; ++i) {
inOrder.verify(mockLoginContext).login();
inOrder.verify(mockLoginContext).logout();
}
} |
@Override
public List<PurgeableAnalysisDto> filter(List<PurgeableAnalysisDto> history) {
List<PurgeableAnalysisDto> result = new ArrayList<>();
for (PurgeableAnalysisDto snapshot : history) {
if (snapshot.getDate().before(before)) {
result.add(snapshot);
}
}
return result;
} | @Test
void shouldDeleteAllSnapshotsPriorToDate() {
Filter filter = new DeleteAllFilter(DateUtils.parseDate("2011-12-25"));
List<PurgeableAnalysisDto> toDelete = filter.filter(Arrays.asList(
DbCleanerTestUtils.createAnalysisWithDate("u1", "2010-01-01"),
DbCleanerTestUtils.createAnalysisWithDate("u2", "2010-12-25"),
DbCleanerTestUtils.createAnalysisWithDate("u3", "2012-01-01")));
assertThat(toDelete).extracting("analysisUuid").containsOnly("u1", "u2");
} |
@ApiOperation(value = "Get a model", tags = { "Models" })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Indicates the model was found and returned."),
@ApiResponse(code = 404, message = "Indicates the requested model was not found.")
})
@GetMapping(value = "/repository/models/{modelId}", produces = "application/json")
public ModelResponse getModel(@ApiParam(name = "modelId") @PathVariable String modelId) {
Model model = getModelFromRequest(modelId);
return restResponseFactory.createModelResponse(model);
} | @Test
@Deployment(resources = { "org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml" })
public void testGetModel() throws Exception {
Model model = null;
try {
Calendar now = Calendar.getInstance();
now.set(Calendar.MILLISECOND, 0);
processEngineConfiguration.getClock().setCurrentTime(now.getTime());
model = repositoryService.newModel();
model.setCategory("Model category");
model.setKey("Model key");
model.setMetaInfo("Model metainfo");
model.setName("Model name");
model.setVersion(2);
model.setDeploymentId(deploymentId);
model.setTenantId("myTenant");
repositoryService.saveModel(model);
repositoryService.addModelEditorSource(model.getId(), "This is the editor source".getBytes());
repositoryService.addModelEditorSourceExtra(model.getId(), "This is the extra editor source".getBytes());
HttpGet httpGet = new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL, model.getId()));
CloseableHttpResponse response = executeRequest(httpGet, HttpStatus.SC_OK);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThat(responseNode).isNotNull();
assertThatJson(responseNode)
.isEqualTo("{"
+ "name: 'Model name',"
+ "key: 'Model key',"
+ "category: 'Model category',"
+ "version: 2,"
+ "metaInfo: 'Model metainfo',"
+ "deploymentId: '" + deploymentId + "',"
+ "id: '" + model.getId() + "',"
+ "tenantId: 'myTenant',"
+ "url: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL, model.getId()) + "',"
+ "deploymentUrl: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT, deploymentId) + "',"
+ "sourceUrl: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_SOURCE, model.getId()) + "',"
+ "sourceExtraUrl: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_SOURCE_EXTRA, model.getId())
+ "',"
+ "createTime: " + new TextNode(getISODateStringWithTZ(now.getTime())) + ","
+ "lastUpdateTime: " + new TextNode(getISODateStringWithTZ(now.getTime()))
+ "}");
} finally {
try {
repositoryService.deleteModel(model.getId());
} catch (Throwable ignore) {
// Ignore, model might not be created
}
}
} |
public boolean hasViewPermissionDefined() {
return !viewConfig.equals(new ViewConfig());
} | @Test
public void shouldReturnTrueIfViewPermissionDefined() {
Authorization authorization = new Authorization(new ViewConfig(new AdminUser(new CaseInsensitiveString("baby"))));
assertThat(authorization.hasViewPermissionDefined(), is(true));
} |
public static FormValidation errorWithMarkup(String message) {
return _errorWithMarkup(message, Kind.ERROR);
} | @Test
public void testMessage() {
assertEquals("test msg", FormValidation.errorWithMarkup("test msg").getMessage());
} |
@Override
public boolean resize(int newSize) throws IOException {
lock.lock();
try {
if (super.resize(newSize)) {
this.lastEntry = lastEntryFromIndexFile();
return true;
} else
return false;
} finally {
lock.unlock();
}
} | @Test
public void testResize() throws IOException {
boolean result = idx.resize(maxEntries * idx.entrySize());
assertFalse(result);
result = idx.resize(maxEntries / 2 * idx.entrySize());
assertTrue(result);
result = idx.resize(maxEntries * 2 * idx.entrySize());
assertTrue(result);
} |
public Node parse() throws ScanException {
return E();
} | @Test
public void testFormattingInfo() throws Exception {
{
Parser<Object> p = new Parser("%45x");
Node t = p.parse();
FormattingNode witness = new SimpleKeywordNode("x");
witness.setFormatInfo(new FormatInfo(45, Integer.MAX_VALUE));
assertEquals(witness, t);
}
{
Parser<Object> p = new Parser("%4.5x");
Node t = p.parse();
FormattingNode witness = new SimpleKeywordNode("x");
witness.setFormatInfo(new FormatInfo(4, 5));
assertEquals(witness, t);
}
{
Parser<Object> p = new Parser("%-4.5x");
Node t = p.parse();
FormattingNode witness = new SimpleKeywordNode("x");
witness.setFormatInfo(new FormatInfo(4, 5, false, true));
assertEquals(witness, t);
}
{
Parser<Object> p = new Parser("%-4.-5x");
Node t = p.parse();
FormattingNode witness = new SimpleKeywordNode("x");
witness.setFormatInfo(new FormatInfo(4, 5, false, false));
assertEquals(witness, t);
}
{
Parser<Object> p = new Parser("%-4.5x %12y");
Node t = p.parse();
FormattingNode witness = new SimpleKeywordNode("x");
witness.setFormatInfo(new FormatInfo(4, 5, false, true));
Node n = witness.next = new Node(Node.LITERAL, " ");
n = n.next = new SimpleKeywordNode("y");
((FormattingNode) n).setFormatInfo(new FormatInfo(12, Integer.MAX_VALUE));
assertEquals(witness, t);
}
} |
public MyNewIssuesNotification newMyNewIssuesNotification(Map<String, UserDto> assigneesByUuid) {
verifyAssigneesByUuid(assigneesByUuid);
return new MyNewIssuesNotification(new DetailsSupplierImpl(assigneesByUuid));
} | @Test
public void newMyNewIssuesNotification_DetailsSupplier_getUserNameByUuid_fails_with_NPE_if_uuid_is_null() {
MyNewIssuesNotification underTest = this.underTest.newMyNewIssuesNotification(emptyMap());
DetailsSupplier detailsSupplier = readDetailsSupplier(underTest);
assertThatThrownBy(() -> detailsSupplier.getUserNameByUuid(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("uuid can't be null");
} |
@Override
public void acceptPolicy(ApplicationId appId) {
} | @Test
public void testAcceptPolicy() {
assertEquals(SECURED, store.getState(appId));
store.acceptPolicy(appId, getMaximumPermissions(appId));
assertEquals(POLICY_VIOLATED, store.getState(appId));
} |
public IssueQuery create(SearchRequest request) {
try (DbSession dbSession = dbClient.openSession(false)) {
final ZoneId timeZone = parseTimeZone(request.getTimeZone()).orElse(clock.getZone());
Collection<RuleDto> ruleDtos = ruleKeysToRuleId(dbSession, request.getRules());
Collection<String> ruleUuids = ruleDtos.stream().map(RuleDto::getUuid).collect(Collectors.toSet());
Collection<String> issueKeys = collectIssueKeys(dbSession, request);
if (request.getRules() != null && request.getRules().stream().collect(Collectors.toSet()).size() != ruleDtos.size()) {
ruleUuids.add("non-existing-uuid");
}
IssueQuery.Builder builder = IssueQuery.builder()
.issueKeys(issueKeys)
.severities(request.getSeverities())
.cleanCodeAttributesCategories(request.getCleanCodeAttributesCategories())
.impactSoftwareQualities(request.getImpactSoftwareQualities())
.impactSeverities(request.getImpactSeverities())
.statuses(request.getStatuses())
.resolutions(request.getResolutions())
.issueStatuses(request.getIssueStatuses())
.resolved(request.getResolved())
.prioritizedRule(request.getPrioritizedRule())
.rules(ruleDtos)
.ruleUuids(ruleUuids)
.assigneeUuids(request.getAssigneeUuids())
.authors(request.getAuthors())
.scopes(request.getScopes())
.languages(request.getLanguages())
.tags(request.getTags())
.types(request.getTypes())
.pciDss32(request.getPciDss32())
.pciDss40(request.getPciDss40())
.owaspAsvs40(request.getOwaspAsvs40())
.owaspAsvsLevel(request.getOwaspAsvsLevel())
.owaspTop10(request.getOwaspTop10())
.owaspTop10For2021(request.getOwaspTop10For2021())
.stigAsdR5V3(request.getStigAsdV5R3())
.casa(request.getCasa())
.sansTop25(request.getSansTop25())
.cwe(request.getCwe())
.sonarsourceSecurity(request.getSonarsourceSecurity())
.assigned(request.getAssigned())
.createdAt(parseStartingDateOrDateTime(request.getCreatedAt(), timeZone))
.createdBefore(parseEndingDateOrDateTime(request.getCreatedBefore(), timeZone))
.facetMode(request.getFacetMode())
.timeZone(timeZone)
.codeVariants(request.getCodeVariants());
List<ComponentDto> allComponents = new ArrayList<>();
boolean effectiveOnComponentOnly = mergeDeprecatedComponentParameters(dbSession, request, allComponents);
addComponentParameters(builder, dbSession, effectiveOnComponentOnly, allComponents, request);
setCreatedAfterFromRequest(dbSession, builder, request, allComponents, timeZone);
String sort = request.getSort();
if (!isNullOrEmpty(sort)) {
builder.sort(sort);
builder.asc(request.getAsc());
}
return builder.build();
}
} | @Test
public void new_code_period_does_not_rely_on_date_for_reference_branch_with_analysis_after_sonarqube_94() {
ComponentDto project = db.components().insertPublicProject().getMainBranchComponent();
ComponentDto file = db.components().insertComponent(newFileDto(project));
db.components().insertSnapshot(project, s -> s.setPeriodMode(REFERENCE_BRANCH.name())
.setPeriodParam("master"));
MetricDto analysisMetric = db.measures().insertMetric(m -> m.setKey(ANALYSIS_FROM_SONARQUBE_9_4_KEY));
db.measures().insertLiveMeasure(project, analysisMetric, measure -> measure.setData("true"));
SearchRequest request = new SearchRequest()
.setComponentUuids(Collections.singletonList(file.uuid()))
.setOnComponentOnly(true)
.setInNewCodePeriod(true);
IssueQuery query = underTest.create(request);
assertThat(query.componentUuids()).containsOnly(file.uuid());
assertThat(query.newCodeOnReference()).isTrue();
assertThat(query.createdAfter()).isNull();
} |
@VisibleForTesting
static void verifyImageMetadata(ImageMetadataTemplate metadata, Path metadataCacheDirectory)
throws CacheCorruptedException {
List<ManifestAndConfigTemplate> manifestsAndConfigs = metadata.getManifestsAndConfigs();
if (manifestsAndConfigs.isEmpty()) {
throw new CacheCorruptedException(metadataCacheDirectory, "Manifest cache empty");
}
if (manifestsAndConfigs.stream().anyMatch(entry -> entry.getManifest() == null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Manifest(s) missing");
}
if (metadata.getManifestList() == null && manifestsAndConfigs.size() != 1) {
throw new CacheCorruptedException(metadataCacheDirectory, "Manifest list missing");
}
ManifestTemplate firstManifest = manifestsAndConfigs.get(0).getManifest();
if (firstManifest instanceof V21ManifestTemplate) {
if (metadata.getManifestList() != null
|| manifestsAndConfigs.stream().anyMatch(entry -> entry.getConfig() != null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Schema 1 manifests corrupted");
}
} else if (firstManifest instanceof BuildableManifestTemplate) {
if (manifestsAndConfigs.stream().anyMatch(entry -> entry.getConfig() == null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Schema 2 manifests corrupted");
}
if (metadata.getManifestList() != null
&& manifestsAndConfigs.stream().anyMatch(entry -> entry.getManifestDigest() == null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Schema 2 manifests corrupted");
}
} else {
throw new CacheCorruptedException(
metadataCacheDirectory, "Unknown manifest type: " + firstManifest);
}
} | @Test
public void testVerifyImageMetadata_unknownManifestType() {
ManifestAndConfigTemplate manifestAndConfig =
new ManifestAndConfigTemplate(
Mockito.mock(ManifestTemplate.class), new ContainerConfigurationTemplate());
ImageMetadataTemplate metadata =
new ImageMetadataTemplate(null, Arrays.asList(manifestAndConfig));
try {
CacheStorageReader.verifyImageMetadata(metadata, Paths.get("/cache/dir"));
Assert.fail();
} catch (CacheCorruptedException ex) {
MatcherAssert.assertThat(ex.getMessage(), CoreMatchers.startsWith("Unknown manifest type:"));
}
} |
public static RestSettingBuilder head() {
return all(HttpMethod.HEAD);
} | @Test
public void should_head_with_matcher() throws Exception {
server.resource("targets",
head("1").request(eq(query("name"), "foo")).response(header("ETag", "Moco"))
);
running(server, () -> {
HttpResponse httpResponse = helper.headForResponse(remoteUrl("/targets/1?name=foo"));
assertThat(httpResponse.getCode(), is(200));
});
} |
static String readFileContents(String fileName) {
try {
File file = new File(fileName);
return Files.readString(file.toPath(), StandardCharsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException("Could not get " + fileName, e);
}
} | @Test
public void readFileContents()
throws IOException {
// given
String expectedContents = "Hello, world!\nThis is a test with Unicode ✓.";
String testFile = createTestFile(expectedContents);
// when
String actualContents = GcpDiscoveryStrategyFactory.readFileContents(testFile);
// then
assertEquals(expectedContents, actualContents);
} |
public static ProxyBackendHandler newInstance(final DatabaseType databaseType, final String sql, final SQLStatement sqlStatement,
final ConnectionSession connectionSession, final HintValueContext hintValueContext) throws SQLException {
if (sqlStatement instanceof EmptyStatement) {
return new SkipBackendHandler(sqlStatement);
}
SQLStatementContext sqlStatementContext = sqlStatement instanceof DistSQLStatement ? new DistSQLStatementContext((DistSQLStatement) sqlStatement)
: new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), connectionSession.getCurrentDatabaseName(), hintValueContext).bind(sqlStatement,
Collections.emptyList());
QueryContext queryContext = new QueryContext(sqlStatementContext, sql, Collections.emptyList(), hintValueContext, connectionSession.getConnectionContext(),
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData());
connectionSession.setQueryContext(queryContext);
return newInstance(databaseType, queryContext, connectionSession, false);
} | @Test
void assertNewInstanceWithUnsupportedNonQueryDistSQLInTransaction() {
when(connectionSession.getTransactionStatus().isInTransaction()).thenReturn(true);
String sql = "CREATE SHARDING TABLE RULE t_order (STORAGE_UNITS(ms_group_0,ms_group_1), SHARDING_COLUMN=order_id, TYPE(NAME='hash_mod', PROPERTIES('sharding-count'='4')));";
SQLStatement sqlStatement = ProxySQLComQueryParser.parse(sql, databaseType, connectionSession);
assertThrows(UnsupportedSQLOperationException.class, () -> ProxyBackendHandlerFactory.newInstance(databaseType, sql, sqlStatement, connectionSession, new HintValueContext()));
} |
public static UserAgent parse(String userAgentString) {
return UserAgentParser.parse(userAgentString);
} | @Test
public void parseDesktopTest() {
final String uaStr = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1";
final UserAgent ua = UserAgentUtil.parse(uaStr);
assertEquals("Chrome", ua.getBrowser().toString());
assertEquals("14.0.835.163", ua.getVersion());
assertEquals("Webkit", ua.getEngine().toString());
assertEquals("535.1", ua.getEngineVersion());
assertEquals("Windows 7 or Windows Server 2008R2", ua.getOs().toString());
assertEquals("6.1", ua.getOsVersion());
assertEquals("Windows", ua.getPlatform().toString());
assertFalse(ua.isMobile());
} |
@CanIgnoreReturnValue
public <K1 extends K, V1 extends V> Caffeine<K1, V1> removalListener(
RemovalListener<? super K1, ? super V1> removalListener) {
requireState(this.removalListener == null,
"removal listener was already set to %s", this.removalListener);
@SuppressWarnings("unchecked")
Caffeine<K1, V1> self = (Caffeine<K1, V1>) this;
self.removalListener = requireNonNull(removalListener);
return self;
} | @Test
public void removalListener() {
RemovalListener<Object, Object> removalListener = (k, v, c) -> {};
var builder = Caffeine.newBuilder().removalListener(removalListener);
assertThat(builder.getRemovalListener(false)).isSameInstanceAs(removalListener);
assertThat(builder.build()).isNotNull();
} |
public static Map<String, String> loadProperties(final File propertiesFile) {
return loadProperties(ImmutableList.of(propertiesFile));
} | @Test
public void shouldThrowIfPropsFileContainsBlackListedProps() {
// Given:
givenPropsFileContains(
"java.some.disallowed.setting=something" + System.lineSeparator()
+ "java.not.another.one=v"
);
// When
final KsqlException e = assertThrows(
KsqlException.class,
() -> PropertiesUtil.loadProperties(propsFile)
);
// Then:
assertThat(e.getMessage(), containsString(
"Property file contains the following blacklisted properties"));
assertThat(e.getMessage(), containsString(
"java.some.disallowed.setting"));
assertThat(e.getMessage(), containsString(
"java.not.another.one"));
} |
public static ProxyProvider.TypeSpec builder() {
return new ProxyProvider.Build();
} | @Test
void shouldNotCreateProxyProviderWithMissingRemoteHostInfo() {
ProxyProvider.Build builder = (ProxyProvider.Build) ProxyProvider.builder().type(ProxyProvider.Proxy.HTTP);
assertThatIllegalArgumentException()
.isThrownBy(builder::build)
.withMessage("Neither address nor host is specified");
} |
long getTotalFileLength(DataSplit split) {
return split.dataFiles().stream().map(DataFileMeta::fileSize).reduce(0L, Long::sum);
} | @Test
public void testTotalFileLength(@Mocked PaimonTable table) {
BinaryRow row1 = new BinaryRow(2);
BinaryRowWriter writer = new BinaryRowWriter(row1, 10);
writer.writeInt(0, 2000);
writer.writeInt(1, 4444);
writer.complete();
List<DataFileMeta> meta1 = new ArrayList<>();
meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null,
1, 1, 1, DUMMY_LEVEL, 0L, null));
DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1)
.withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build();
TupleDescriptor desc = new TupleDescriptor(new TupleId(0));
desc.setTable(table);
PaimonScanNode scanNode = new PaimonScanNode(new PlanNodeId(0), desc, "XXX");
long totalFileLength = scanNode.getTotalFileLength(split);
Assert.assertEquals(200, totalFileLength);
} |
static CounterResult fromJson(String json) {
return JsonUtil.parse(json, CounterResultParser::fromJson);
} | @Test
public void invalidValue() {
assertThatThrownBy(
() -> CounterResultParser.fromJson("{\"unit\":\"count\",\"value\":\"illegal\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a long value: value: \"illegal\"");
} |
@Deprecated
public B local(String local) {
this.local = local;
return getThis();
} | @Test
void local() {
InterfaceBuilder builder = new InterfaceBuilder();
builder.local("GreetingMock");
Assertions.assertEquals("GreetingMock", builder.build().getLocal());
} |
public static <T> T toObj(byte[] json, Class<T> cls) {
try {
return mapper.readValue(json, cls);
} catch (Exception e) {
throw new NacosDeserializationException(cls, e);
}
} | @Test
void testToObject12() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj(new ByteArrayInputStream("{not_A}Json:String}".getBytes()),
TypeUtils.parameterize(Map.class, String.class, String.class));
});
} |
public static void removeMatching(Collection<String> values,
String... patterns) {
removeMatching(values, Arrays.asList(patterns));
} | @Test
public void testRemoveMatchingWithNoPatterns() throws Exception {
Collection<String> values = stringToList("A");
StringCollectionUtil.removeMatching(values);
assertTrue(values.contains("A"));
} |
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (this.ip == null ? 0 : this.ip.hashCode());
result = prime * result + this.port;
return result;
} | @Test
public void testEqualsHashCode() {
final Endpoint ep1 = new Endpoint("192.168.1.1", 8080);
final Endpoint ep2 = new Endpoint("192.168.1.1", 8080);
assertEquals(ep1, ep2);
assertEquals(ep1.hashCode(), ep2.hashCode());
} |
KettleValidatorException assertNumeric( ValueMetaInterface valueMeta,
Object valueData,
Validation field ) throws KettleValueException {
if ( valueMeta.isNumeric() || containsOnlyDigits( valueMeta.getString( valueData ) ) ) {
return null;
}
return new KettleValidatorException( this, field, KettleValidatorException.ERROR_NON_NUMERIC_DATA,
BaseMessages.getString( PKG, "Validator.Exception.NonNumericDataNotAllowed", field.getFieldName(),
valueMeta.toStringMeta(), valueMeta.getString( valueData ) ), field.getFieldName() );
} | @Test
public void assertNumeric_StringWithDigits() throws Exception {
ValueMetaString metaString = new ValueMetaString( "string-with-digits" );
assertNull( "Strings with digits are allowed", validator.assertNumeric( metaString, "123", new Validation() ) );
} |
public static String findAddress(List<NodeAddress> addresses, NodeAddressType preferredAddressType) {
if (addresses == null) {
return null;
}
Map<String, String> addressMap = addresses.stream()
.collect(Collectors.toMap(NodeAddress::getType, NodeAddress::getAddress, (address1, address2) -> {
LOGGER.warnOp("Found multiple addresses with the same type. Only the first address '{}' will be used.", address1);
return address1;
}));
// If user set preferred address type, we should check it first
if (preferredAddressType != null && addressMap.containsKey(preferredAddressType.toValue())) {
return addressMap.get(preferredAddressType.toValue());
}
if (addressMap.containsKey("ExternalDNS")) {
return addressMap.get("ExternalDNS");
} else if (addressMap.containsKey("ExternalIP")) {
return addressMap.get("ExternalIP");
} else if (addressMap.containsKey("InternalDNS")) {
return addressMap.get("InternalDNS");
} else if (addressMap.containsKey("InternalIP")) {
return addressMap.get("InternalIP");
} else if (addressMap.containsKey("Hostname")) {
return addressMap.get("Hostname");
}
return null;
} | @Test
public void testFindAddressReturnsExternalAddress() {
String address = NodeUtils.findAddress(ADDRESSES, null);
assertThat(address, is("my.external.address"));
} |
@Override
public StatusOutputStream<Void> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
try {
if(!session.getClient().setFileType(FTPClient.BINARY_FILE_TYPE)) {
throw new FTPException(session.getClient().getReplyCode(), session.getClient().getReplyString());
}
final OutputStream out = new DataConnectionActionExecutor(session).data(new DataConnectionAction<OutputStream>() {
@Override
public OutputStream execute() throws BackgroundException {
try {
if(status.isAppend()) {
if(!status.isExists()) {
log.warn(String.format("Allocate %d bytes for file %s", status.getOffset(), file));
session.getClient().allocate((int) status.getOffset());
}
return session.getClient().appendFileStream(file.getAbsolute());
}
else {
return session.getClient().storeFileStream(file.getAbsolute());
}
}
catch(IOException e) {
throw new FTPExceptionMappingService().map(e);
}
}
});
return new ReadReplyOutputStream(out, status);
}
catch(IOException e) {
throw new FTPExceptionMappingService().map("Upload {0} failed", e, file);
}
} | @Test
@Ignore
public void testWriteRangeEndFirst() throws Exception {
final FTPWriteFeature feature = new FTPWriteFeature(session);
final Path test = new Path(new FTPWorkdirService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
final byte[] content = RandomUtils.nextBytes(2048);
{
// Write end of file first
final TransferStatus status = new TransferStatus();
status.setLength(1024L);
status.setOffset(1024L);
status.setAppend(true);
final OutputStream out = feature.write(test, status, new DisabledConnectionCallback());
new StreamCopier(status, status).withOffset(status.getOffset()).withLimit(status.getLength()).transfer(new ByteArrayInputStream(content), out);
out.close();
}
assertTrue(new DefaultFindFeature(session).find(test));
assertEquals(content.length, new DefaultAttributesFinderFeature(session).find(test).getSize());
{
// Write beginning of file up to the last chunk
final TransferStatus status = new TransferStatus();
status.setExists(true);
status.setOffset(0L);
status.setLength(1024L);
status.setAppend(true);
final OutputStream out = feature.write(test, status, new DisabledConnectionCallback());
new StreamCopier(status, status).withOffset(status.getOffset()).withLimit(status.getLength()).transfer(new ByteArrayInputStream(content), out);
out.close();
}
final ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
IOUtils.copy(new FTPReadFeature(session).read(test, new TransferStatus().withLength(content.length), new DisabledConnectionCallback()), out);
assertArrayEquals(content, out.toByteArray());
assertTrue(new DefaultFindFeature(session).find(test));
assertEquals(content.length, new DefaultAttributesFinderFeature(session).find(test).getSize());
new FTPDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
@Override
protected int compareFirst(final Path p1, final Path p2) {
// Version with no duplicate flag first
final int duplicateComparison = Boolean.compare(!p1.attributes().isDuplicate(), !p2.attributes().isDuplicate());
if(0 == duplicateComparison) {
final int timestampComparison = super.compareFirst(p1, p2);
if(0 == timestampComparison) {
return StringUtils.compare(p1.attributes().getVersionId(), p2.attributes().getVersionId());
}
return timestampComparison;
}
return duplicateComparison;
} | @Test
public void testCompareFirst() {
final Path p1 = new Path("/a", EnumSet.of(Path.Type.file));
final Path p2 = new Path("/b", EnumSet.of(Path.Type.file));
assertEquals(0, new VersionsComparator(true).compareFirst(p1, p2));
p1.attributes().setDuplicate(true);
assertEquals(-1, new VersionsComparator(true).compareFirst(p1, p2));
final long ts = System.currentTimeMillis();
p1.attributes().setModificationDate(ts);
p2.attributes().setModificationDate(ts);
assertEquals(-1, new VersionsComparator(true).compareFirst(p1, p2));
p2.attributes().setModificationDate(ts - 1000);
assertEquals(-1, new VersionsComparator(true).compareFirst(p1, p2));
p1.attributes().setDuplicate(false);
assertEquals(1, new VersionsComparator(true).compareFirst(p1, p2));
} |
public static Instant toInstant(Date date) {
return null == date ? null : date.toInstant();
} | @Test
public void toInstantTest() {
final LocalDateTime localDateTime = LocalDateTime.parse("2017-05-06T08:30:00", DateTimeFormatter.ISO_DATE_TIME);
Instant instant = DateUtil.toInstant(localDateTime);
assertEquals("2017-05-06T00:30:00Z", instant.toString());
final LocalDate localDate = localDateTime.toLocalDate();
instant = DateUtil.toInstant(localDate);
assertNotNull(instant);
final LocalTime localTime = localDateTime.toLocalTime();
instant = DateUtil.toInstant(localTime);
assertNotNull(instant);
} |
public static <InputT> Builder<InputT> withoutHold(AppliedPTransform<?, ?, ?> transform) {
return new Builder(transform, BoundedWindow.TIMESTAMP_MAX_VALUE);
} | @Test
public void withAdditionalOutputProducedOutputs() {
TransformResult<Integer> result =
StepTransformResult.<Integer>withoutHold(transform)
.withAdditionalOutput(OutputType.PCOLLECTION_VIEW)
.build();
assertThat(result.getOutputTypes(), containsInAnyOrder(OutputType.PCOLLECTION_VIEW));
} |
public Type parse(final String schema) {
try {
final TypeContext typeContext = parseTypeContext(schema);
return getType(typeContext);
} catch (final ParsingException e) {
throw new KsqlStatementException(
"Failed to parse schema",
"Failed to parse: " + schema,
schema,
KsqlStatementException.Problem.STATEMENT,
e
);
}
} | @Test
public void shouldGetTypeFromDecimal() {
// Given:
final String schemaString = "DECIMAL(2, 1)";
// When:
final Type type = parser.parse(schemaString);
// Then:
assertThat(type, is(new Type(SqlTypes.decimal(2, 1))));
} |
public static void writeStringToFile(File file, String data, String encoding) throws IOException {
OutputStream os = null;
try {
os = new FileOutputStream(file);
os.write(data.getBytes(encoding));
} finally {
if (null != os) {
os.close();
}
}
} | @Test
public void testWriteStringToFile() throws Exception {
File file = new File(testRootDir, "testWriteStringToFile");
assertTrue(!file.exists());
IOTinyUtils.writeStringToFile(file, "testWriteStringToFile", StandardCharsets.UTF_8.name());
assertTrue(file.exists());
} |
public static <T extends ScanTask> List<ScanTaskGroup<T>> planTaskGroups(
List<T> tasks, long splitSize, int lookback, long openFileCost) {
return Lists.newArrayList(
planTaskGroups(CloseableIterable.withNoopClose(tasks), splitSize, lookback, openFileCost));
} | @Test
public void testTaskGroupPlanningCorruptedOffset() {
DataFile dataFile =
DataFiles.builder(TestBase.SPEC)
.withPath("/path/to/data-a.parquet")
.withFileSizeInBytes(10)
.withPartitionPath("data_bucket=0")
.withRecordCount(1)
.withSplitOffsets(
ImmutableList.of(2L, 12L)) // the last offset is beyond the end of the file
.build();
ResidualEvaluator residualEvaluator =
ResidualEvaluator.of(TestBase.SPEC, Expressions.equal("id", 1), false);
BaseFileScanTask baseFileScanTask =
new BaseFileScanTask(
dataFile,
null,
SchemaParser.toJson(TestBase.SCHEMA),
PartitionSpecParser.toJson(TestBase.SPEC),
residualEvaluator);
List<BaseFileScanTask> baseFileScanTasks = ImmutableList.of(baseFileScanTask);
int taskCount = 0;
for (ScanTaskGroup<BaseFileScanTask> task :
TableScanUtil.planTaskGroups(CloseableIterable.withNoopClose(baseFileScanTasks), 1, 1, 0)) {
for (FileScanTask fileScanTask : task.tasks()) {
DataFile taskDataFile = fileScanTask.file();
assertThat(taskDataFile.splitOffsets()).isNull();
taskCount++;
}
}
// 10 tasks since the split offsets are ignored and there are 1 byte splits for a 10 byte file
assertThat(taskCount).isEqualTo(10);
} |
@Override
public String[] getCompressionProviderNames() {
ArrayList<String> providerNames = new ArrayList<String>();
List<PluginInterface> providers = getPlugins();
if ( providers != null ) {
for ( PluginInterface plugin : providers ) {
try {
CompressionProvider provider = PluginRegistry.getInstance().loadClass( plugin, CompressionProvider.class );
if ( provider != null ) {
providerNames.add( provider.getName() );
}
} catch ( Exception e ) {
// Do nothing here, if we can't load the provider, don't add it to the list
}
}
}
return providerNames.toArray( new String[providerNames.size()] );
} | @Test
public void getCoreProviderNames() {
@SuppressWarnings( "serial" )
final HashMap<String, Boolean> foundProvider = new HashMap<String, Boolean>() {
{
put( "None", false );
put( "Zip", false );
put( "GZip", false );
put( "Snappy", false );
put( "Hadoop-snappy", false );
}
};
String[] providers = factory.getCompressionProviderNames();
assertNotNull( providers );
for ( String provider : providers ) {
assertNotNull( foundProvider.get( provider ) );
foundProvider.put( provider, true );
}
boolean foundAllProviders = true;
for ( Boolean b : foundProvider.values() ) {
foundAllProviders = foundAllProviders && b;
}
assertTrue( foundAllProviders );
} |
@Override
public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) {
SQLStatement sqlStatement = sqlStatementContext.getSqlStatement();
if (sqlStatement instanceof ShowFunctionStatusStatement) {
return Optional.of(new ShowFunctionStatusExecutor((ShowFunctionStatusStatement) sqlStatement));
}
if (sqlStatement instanceof ShowProcedureStatusStatement) {
return Optional.of(new ShowProcedureStatusExecutor((ShowProcedureStatusStatement) sqlStatement));
}
if (sqlStatement instanceof ShowTablesStatement) {
return Optional.of(new ShowTablesExecutor((ShowTablesStatement) sqlStatement, sqlStatementContext.getDatabaseType()));
}
return Optional.empty();
} | @Test
void assertCreateWithSelectStatementForCurrentUser() {
MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class);
when(selectStatement.getFrom()).thenReturn(Optional.empty());
ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class);
when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CURRENT_USER()")));
when(selectStatement.getProjections()).thenReturn(projectionsSegment);
when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement);
Optional<DatabaseAdminExecutor> actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CURRENT_USER()", "", Collections.emptyList());
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(ShowCurrentUserExecutor.class));
} |
public static int hashToIndex(int hash, int length) {
checkPositive("length", length);
if (hash == Integer.MIN_VALUE) {
return 0;
}
return abs(hash) % length;
} | @Test(expected = IllegalArgumentException.class)
public void hashToIndex_whenItemCountZero() {
hashToIndex(Integer.MIN_VALUE, 0);
} |
public static void mergeParams(
Map<String, ParamDefinition> params,
Map<String, ParamDefinition> paramsToMerge,
MergeContext context) {
if (paramsToMerge == null) {
return;
}
Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream())
.forEach(
name -> {
ParamDefinition paramToMerge = paramsToMerge.get(name);
if (paramToMerge == null) {
return;
}
if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) {
Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name);
Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name);
mergeParams(
baseMap,
toMergeMap,
MergeContext.copyWithParentMode(
context, params.getOrDefault(name, paramToMerge).getMode()));
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, baseMap));
} else if (paramToMerge.getType() == ParamType.STRING_MAP
&& paramToMerge.isLiteral()) {
Map<String, String> baseMap = stringMapValueOrEmpty(params, name);
Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name);
baseMap.putAll(toMergeMap);
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, baseMap));
} else {
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, paramToMerge.getValue()));
}
});
} | @Test
public void testMergeDisallowLessRestrictiveMode() throws JsonProcessingException {
Map<String, ParamDefinition> allParams =
parseParamDefMap(
"{'tomerge': {'type': 'STRING','value': 'hello', 'mode': 'MUTABLE_ON_START'}}");
Map<String, ParamDefinition> paramsToMerge =
parseParamDefMap("{'tomerge': {'type': 'STRING', 'value': 'goodbye', 'mode': 'MUTABLE'}}");
AssertHelper.assertThrows(
"Should not allow setting to less strict mode",
MaestroValidationException.class,
"Cannot modify param mode to be less strict for parameter [tomerge] from [MUTABLE_ON_START] to [MUTABLE]",
() -> ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext));
} |
protected void validateImpl(OtaPackageInfo otaPackageInfo) {
validateString("OtaPackage title", otaPackageInfo.getTitle());
if (otaPackageInfo.getTenantId() == null) {
throw new DataValidationException("OtaPackage should be assigned to tenant!");
} else {
if (!getTenantService().tenantExists(otaPackageInfo.getTenantId())) {
throw new DataValidationException("OtaPackage is referencing to non-existent tenant!");
}
}
if (otaPackageInfo.getDeviceProfileId() != null) {
DeviceProfile deviceProfile = getDeviceProfileDao().findById(otaPackageInfo.getTenantId(), otaPackageInfo.getDeviceProfileId().getId());
if (deviceProfile == null) {
throw new DataValidationException("OtaPackage is referencing to non-existent device profile!");
}
}
if (otaPackageInfo.getType() == null) {
throw new DataValidationException("Type should be specified!");
}
if (StringUtils.isEmpty(otaPackageInfo.getVersion())) {
throw new DataValidationException("OtaPackage version should be specified!");
}
if (otaPackageInfo.getTitle().length() > 255) {
throw new DataValidationException("The length of title should be equal or shorter than 255");
}
if (otaPackageInfo.getVersion().length() > 255) {
throw new DataValidationException("The length of version should be equal or shorter than 255");
}
} | @Test
void testValidateNameInvocation() {
OtaPackageInfo otaPackageInfo = new OtaPackageInfo();
otaPackageInfo.setTitle("fw");
otaPackageInfo.setVersion("1.0");
otaPackageInfo.setType(OtaPackageType.FIRMWARE);
otaPackageInfo.setTenantId(tenantId);
validator.validateImpl(otaPackageInfo);
verify(validator).validateString("OtaPackage title", otaPackageInfo.getTitle());
} |
public int doWork()
{
final long nowNs = nanoClock.nanoTime();
trackTime(nowNs);
int workCount = 0;
workCount += processTimers(nowNs);
if (!asyncClientCommandInFlight)
{
workCount += clientCommandAdapter.receive();
}
workCount += drainCommandQueue();
workCount += trackStreamPositions(workCount, nowNs);
workCount += nameResolver.doWork(cachedEpochClock.time());
workCount += freeEndOfLifeResources(ctx.resourceFreeLimit());
return workCount;
} | @Test
void shouldUseExistingChannelEndpointOnAddSubscriptionWithSameTagId()
{
final long id1 = driverProxy.addSubscription(CHANNEL_4000_TAG_ID_1, STREAM_ID_1);
final long id2 = driverProxy.addSubscription(CHANNEL_TAG_ID_1, STREAM_ID_1);
driverConductor.doWork();
driverConductor.doWork();
verify(mockErrorHandler, never()).onError(any());
verify(receiverProxy).registerReceiveChannelEndpoint(any());
driverProxy.removeSubscription(id1);
driverProxy.removeSubscription(id2);
driverConductor.doWork();
driverConductor.doWork();
verify(receiverProxy).closeReceiveChannelEndpoint(any());
} |
@PostMapping("/admin")
public Object createAdminUser(@RequestParam(required = false) String password) {
if (AuthSystemTypes.NACOS.name().equalsIgnoreCase(authConfigs.getNacosAuthSystemType())) {
if (iAuthenticationManager.hasGlobalAdminRole()) {
return RestResultUtils.failed(HttpStatus.CONFLICT.value(), "have admin user cannot use it");
}
if (StringUtils.isBlank(password)) {
password = PasswordGeneratorUtil.generateRandomPassword();
}
String username = AuthConstants.DEFAULT_USER;
userDetailsService.createUser(username, PasswordEncoderUtil.encode(password));
roleService.addAdminRole(username);
ObjectNode result = JacksonUtils.createEmptyJsonNode();
result.put(AuthConstants.PARAM_USERNAME, username);
result.put(AuthConstants.PARAM_PASSWORD, password);
return result;
} else {
return RestResultUtils.failed(HttpStatus.NOT_IMPLEMENTED.value(), "not support");
}
} | @Test
void testCreateAdminUser1() {
when(authConfigs.getNacosAuthSystemType()).thenReturn(AuthSystemTypes.NACOS.name());
when(authenticationManager.hasGlobalAdminRole()).thenReturn(true);
RestResult<String> result = (RestResult<String>) userController.createAdminUser("test");
assertEquals(HttpStatus.CONFLICT.value(), result.getCode());
} |
public static String getTypeStrFromProto(Descriptors.FieldDescriptor desc) {
switch (desc.getJavaType()) {
case INT:
return "Integer";
case LONG:
return "Long";
case STRING:
return "String";
case FLOAT:
return "Float";
case DOUBLE:
return "Double";
case BYTE_STRING:
return "ByteString";
case BOOLEAN:
return "Boolean";
case ENUM:
return getFullJavaNameForEnum(desc.getEnumType());
case MESSAGE:
if (desc.isMapField()) {
// map
final Descriptors.FieldDescriptor key = desc.getMessageType().findFieldByName("key");
final Descriptors.FieldDescriptor value = desc.getMessageType().findFieldByName("value");
// key and value cannot be repeated
String keyTypeStr = getTypeStrFromProto(key);
String valueTypeStr = getTypeStrFromProto(value);
return "Map<" + keyTypeStr + "," + valueTypeStr + ">";
} else {
// simple message
return getFullJavaName(desc.getMessageType());
}
default:
throw new RuntimeException("do not support field type: " + desc.getJavaType());
}
} | @Test
public void testGetTypeStrFromProto() throws Exception {
URL jarFile = getClass().getClassLoader().getResource("complex_types.jar");
ClassLoader clsLoader = ProtoBufCodeGenMessageDecoder.loadClass(jarFile.getPath());
Descriptors.Descriptor desc = ProtoBufCodeGenMessageDecoder.getDescriptorForProtoClass(clsLoader,
"org.apache.pinot.plugin.inputformat.protobuf.ComplexTypes$TestMessage");
Assert.assertEquals(ProtoBufUtils.getFullJavaName(desc),
"org.apache.pinot.plugin.inputformat.protobuf.ComplexTypes.TestMessage");
} |
public static ResourceModel processResource(final Class<?> resourceClass)
{
return processResource(resourceClass, null);
} | @Test(expectedExceptions = ResourceConfigException.class)
public void failsOnInvalidBatchFinderMethodBatchParamParameterType() {
@RestLiCollection(name = "batchFinderWithInvalidBatchParamType")
class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord>
{
@BatchFinder(value = "batchFinderWithInvalidBatchParamType", batchParam = "criteria")
public List<EmptyRecord> batchFinderWithInvalidBatchParamType(@QueryParam("criteria") String[] criteria) {
return Collections.emptyList();
}
}
RestLiAnnotationReader.processResource(LocalClass.class);
Assert.fail("#validateBatchFinderMethod should fail throwing a ResourceConfigException");
} |
@Override
public UnderFileSystem create(String path, UnderFileSystemConfiguration conf) {
Preconditions.checkNotNull(path, "Unable to create UnderFileSystem instance:"
+ " URI path should not be null");
if (checkCOSCredentials(conf)) {
try {
return COSUnderFileSystem.createInstance(new AlluxioURI(path), conf);
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
String err = "COS Credentials not available, cannot create COS Under File System.";
throw Throwables.propagate(new IOException(err));
} | @Test
public void createInstanceWithoutCredentials() {
Configuration.unset(PropertyKey.COS_ACCESS_KEY);
Configuration.unset(PropertyKey.COS_SECRET_KEY);
Configuration.unset(PropertyKey.COS_REGION);
Configuration.unset(PropertyKey.COS_APP_ID);
mAlluxioConf = Configuration.global();
mConf = UnderFileSystemConfiguration.defaults(mAlluxioConf);
try {
mFactory.create(mCosPath, mConf);
} catch (Exception e) {
Assert.assertTrue(e instanceof RuntimeException);
Assert.assertTrue(e.getMessage().contains("COS Credentials not available, "
+ "cannot create COS Under File System."));
}
Exception e = Assert.assertThrows(RuntimeException.class, () -> mFactory.create(
mCosPath, mConf));
Assert.assertTrue(e.getMessage().contains("COS Credentials not available, "
+ "cannot create COS Under File System."));
} |
@Override
public CiConfiguration loadConfiguration() {
String pr = system.envVariable("TRAVIS_PULL_REQUEST");
String revision;
if (isBlank(pr) || "false".equals(pr)) {
revision = system.envVariable("TRAVIS_COMMIT");
} else {
revision = system.envVariable("TRAVIS_PULL_REQUEST_SHA");
}
return new CiConfigurationImpl(revision, getName());
} | @Test
public void loadConfiguration_of_branch() {
setEnvVariable("CI", "true");
setEnvVariable("TRAVIS", "true");
setEnvVariable("TRAVIS_COMMIT", "abd12fc");
setEnvVariable("TRAVIS_PULL_REQUEST", "false");
setEnvVariable("TRAVIS_PULL_REQUEST_SHA", "");
assertThat(underTest.loadConfiguration().getScmRevision()).hasValue("abd12fc");
} |
@Override
public PageData<WidgetTypeInfo> findSystemWidgetTypes(WidgetTypeFilter widgetTypeFilter, PageLink pageLink) {
boolean deprecatedFilterEnabled = !DeprecatedFilter.ALL.equals(widgetTypeFilter.getDeprecatedFilter());
boolean deprecatedFilterBool = DeprecatedFilter.DEPRECATED.equals(widgetTypeFilter.getDeprecatedFilter());
boolean widgetTypesEmpty = widgetTypeFilter.getWidgetTypes() == null || widgetTypeFilter.getWidgetTypes().isEmpty();
return DaoUtil.toPageData(
widgetTypeInfoRepository
.findSystemWidgetTypes(
NULL_UUID,
pageLink.getTextSearch(),
widgetTypeFilter.isFullSearch(),
deprecatedFilterEnabled,
deprecatedFilterBool,
widgetTypesEmpty,
widgetTypeFilter.getWidgetTypes() == null ? Collections.emptyList() : widgetTypeFilter.getWidgetTypes(),
widgetTypeFilter.isScadaFirst(),
DaoUtil.toPageable(pageLink, WidgetTypeInfoEntity.SEARCH_COLUMNS_MAP)));
} | @Test
public void testFindSystemWidgetTypesForSameName() throws InterruptedException {
List<WidgetTypeDetails> sameNameList = new ArrayList<>();
for (int i = 0; i < 20; i++) {
Thread.sleep(2);
var widgetType = saveWidgetType(TenantId.SYS_TENANT_ID, "widgetName");
sameNameList.add(widgetType);
widgetTypeList.add(widgetType);
}
sameNameList.sort(Comparator.comparing(BaseWidgetType::getName).thenComparing((BaseWidgetType baseWidgetType) -> baseWidgetType.getId().getId()));
List<WidgetTypeInfo> expected = sameNameList.stream().map(WidgetTypeInfo::new).collect(Collectors.toList());
PageData<WidgetTypeInfo> widgetTypesFirstPage = widgetTypeDao.findSystemWidgetTypes(
WidgetTypeFilter.builder()
.tenantId(TenantId.SYS_TENANT_ID)
.fullSearch(true)
.deprecatedFilter(DeprecatedFilter.ALL)
.widgetTypes(Collections.singletonList("static")).build(),
new PageLink(10, 0, null, new SortOrder("name")));
assertEquals(10, widgetTypesFirstPage.getData().size());
assertThat(widgetTypesFirstPage.getData()).containsExactlyElementsOf(expected.subList(0, 10));
PageData<WidgetTypeInfo> widgetTypesSecondPage = widgetTypeDao.findSystemWidgetTypes(WidgetTypeFilter.builder()
.tenantId(TenantId.SYS_TENANT_ID)
.fullSearch(true)
.deprecatedFilter(DeprecatedFilter.ALL)
.widgetTypes(Collections.singletonList("static")).build(),
new PageLink(10, 1, null, new SortOrder("name")));
assertEquals(10, widgetTypesSecondPage.getData().size());
assertThat(widgetTypesSecondPage.getData()).containsExactlyElementsOf(expected.subList(10, 20));
} |
public ExportMessagesCommand buildWithSearchOnly(Search search, ResultFormat resultFormat) {
Query query = queryFrom(search);
return builderFrom(resultFormat)
.timeRange(resultFormat.timerange().orElse(toAbsolute(query.timerange())))
.queryString(queryStringFrom(search, query))
.usedSearchFilters(search.queries().stream()
.map(Query::filters)
.flatMap(List::stream)
.collect(Collectors.toList())
)
.streams(query.usedStreamIds())
.build();
} | @Test
void searchWithMultipleQueriesLeadsToExceptionIfNoSearchTypeProvided() {
Search s = searchWithQueries(org.graylog.plugins.views.search.TestData.validQueryBuilder().build(), org.graylog.plugins.views.search.TestData.validQueryBuilder().build());
assertThatExceptionOfType(ExportException.class)
.isThrownBy(() -> sut.buildWithSearchOnly(s, ResultFormat.builder().build()))
.withMessageContaining("multiple queries");
} |
public boolean isIp6() {
return address.isIp6();
} | @Test
public void testIsIp6() {
IpPrefix ipPrefix;
// IPv4
ipPrefix = IpPrefix.valueOf("0.0.0.0/0");
assertFalse(ipPrefix.isIp6());
// IPv6
ipPrefix = IpPrefix.valueOf("::/0");
assertTrue(ipPrefix.isIp6());
} |
@Override
public String attemptRequest() throws RemoteServiceException {
evaluateState();
if (state == State.OPEN) {
// return cached response if the circuit is in OPEN state
return this.lastFailureResponse;
} else {
// Make the API request if the circuit is not OPEN
try {
//In a real application, this would be run in a thread and the timeout
//parameter of the circuit breaker would be utilized to know if service
//is working. Here, we simulate that based on server response itself
var response = service.call();
// Yay!! the API responded fine. Let's reset everything.
recordSuccess();
return response;
} catch (RemoteServiceException ex) {
recordFailure(ex.getMessage());
throw ex;
}
}
} | @Test
void testApiResponses() throws RemoteServiceException {
RemoteService mockService = new RemoteService() {
@Override
public String call() throws RemoteServiceException {
return "Remote Success";
}
};
var circuitBreaker = new DefaultCircuitBreaker(mockService, 1, 1, 100);
//Call with the parameter start_time set to huge amount of time in past so that service
//replies with "Ok". Also, state is CLOSED in start
var serviceStartTime = System.nanoTime() - 60 * 1000 * 1000 * 1000;
var response = circuitBreaker.attemptRequest();
assertEquals(response, "Remote Success");
} |
@Override
public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) {
ShardingSpherePreconditions.checkNotNull(shardingValue.getValue(), NullShardingValueException::new);
String tableNameSuffix = String.valueOf(doSharding(parseDate(shardingValue.getValue())));
return ShardingAutoTableAlgorithmUtils.findMatchedTargetName(availableTargetNames, tableNameSuffix, shardingValue.getDataNodeInfo()).orElse(null);
} | @Test
void assertPreciseDoShardingBeyondTheLastOne() {
List<String> availableTargetNames = Arrays.asList("t_order_0", "t_order_1", "t_order_2", "t_order_3", "t_order_4", "t_order_5");
assertThat(shardingAlgorithm.doSharding(availableTargetNames,
new PreciseShardingValue<>("t_order", "create_time", DATA_NODE_INFO, "2021-01-01 00:00:02")), is("t_order_5"));
} |
public static Optional<ExplicitConstructorInvocationStmt> getExplicitConstructorInvocationStmt(final BlockStmt body) {
return body.getStatements().stream()
.filter(ExplicitConstructorInvocationStmt.class::isInstance)
.map(ExplicitConstructorInvocationStmt.class::cast)
.findFirst();
} | @Test
void getExplicitConstructorInvocationStmt() {
BlockStmt body = new BlockStmt();
Optional<ExplicitConstructorInvocationStmt> retrieved = CommonCodegenUtils.getExplicitConstructorInvocationStmt(body);
assertThat(retrieved).isNotNull();
assertThat(retrieved).isNotPresent();
ExplicitConstructorInvocationStmt explicitConstructorInvocationStmt = new ExplicitConstructorInvocationStmt();
body.addStatement(explicitConstructorInvocationStmt);
retrieved = CommonCodegenUtils.getExplicitConstructorInvocationStmt(body);
assertThat(retrieved).isNotNull();
assertThat(retrieved).isPresent();
ExplicitConstructorInvocationStmt retrievedExplicitConstructorInvocationStmt = retrieved.get();
assertThat(retrievedExplicitConstructorInvocationStmt).isEqualTo(explicitConstructorInvocationStmt);
} |
@Override
public ExportResult<ContactsModelWrapper> export(
UUID jobId, TokensAndUrlAuthData authData, Optional<ExportInformation> exportInformation) {
if (exportInformation.isPresent()) {
StringPaginationToken stringPaginationToken = (StringPaginationToken)
exportInformation.get().getPaginationData();
return exportContacts(authData, Optional.ofNullable(stringPaginationToken));
} else {
return exportContacts(authData, Optional.empty());
}
} | @Test
public void exportFirstPage() throws IOException {
setUpSinglePersonResponse();
// Looking at first page, with at least one page after it
listConnectionsResponse.setNextPageToken(NEXT_PAGE_TOKEN);
ExportResult<ContactsModelWrapper> result = contactsService.export(UUID.randomUUID(), null, Optional.empty());
// Check that correct methods were called
verify(connections).list(SELF_RESOURCE);
InOrder inOrder = Mockito.inOrder(getBatchGet);
inOrder.verify(getBatchGet).setResourceNames(Collections.singletonList(RESOURCE_NAME));
inOrder.verify(getBatchGet).setPersonFields(PERSON_FIELDS);
inOrder.verify(getBatchGet).execute();
// Check continuation data
ContinuationData continuationData = (ContinuationData) result.getContinuationData();
assertThat(continuationData.getContainerResources()).isEmpty();
StringPaginationToken paginationToken =
(StringPaginationToken)
((ContinuationData) result.getContinuationData()).getPaginationData();
assertThat(paginationToken.getToken()).isEqualTo(NEXT_PAGE_TOKEN);
// Check that the right number of VCards was returned
JCardReader reader = new JCardReader(result.getExportedData().getVCards());
List<VCard> vCardList = reader.readAll();
assertThat(vCardList.size()).isEqualTo(connectionsList.size());
} |
public Analysis analyze(Statement statement)
{
return analyze(statement, false);
} | @Test
public void testJoinUnnest()
{
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) CROSS JOIN UNNEST(x)");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) LEFT OUTER JOIN UNNEST(x) ON true");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) RIGHT OUTER JOIN UNNEST(x) ON true");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) FULL OUTER JOIN UNNEST(x) ON true");
} |
@Override
@SuppressWarnings("rawtypes")
public void report(SortedMap<String, Gauge> gauges,
SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
if (loggerProxy.isEnabled(marker)) {
StringBuilder b = new StringBuilder();
for (Entry<String, Gauge> entry : gauges.entrySet()) {
logGauge(b, entry.getKey(), entry.getValue());
}
for (Entry<String, Counter> entry : counters.entrySet()) {
logCounter(b, entry.getKey(), entry.getValue());
}
for (Entry<String, Histogram> entry : histograms.entrySet()) {
logHistogram(b, entry.getKey(), entry.getValue());
}
for (Entry<String, Meter> entry : meters.entrySet()) {
logMeter(b, entry.getKey(), entry.getValue());
}
for (Entry<String, Timer> entry : timers.entrySet()) {
logTimer(b, entry.getKey(), entry.getValue());
}
}
} | @Test
public void reportsTimerValuesDefault() {
final Timer timer = timer();
when(logger.isInfoEnabled(marker)).thenReturn(true);
infoReporter().report(map(),
map(),
map(),
map(),
map("test.another.timer", timer));
verify(logger).info(marker, "type=TIMER, name=prefix.test.another.timer, count=1, min=300.0, max=100.0, " +
"mean=200.0, stddev=400.0, p50=500.0, p75=600.0, p95=700.0, p98=800.0, p99=900.0, p999=1000.0," +
" m1_rate=3.0, m5_rate=4.0, m15_rate=5.0, mean_rate=2.0, rate_unit=events/second, duration_unit=milliseconds");
} |
public static String getWebServiceName(NetworkService networkService) {
if (isWebService(networkService)
&& networkService.getServiceContext().getWebServiceContext().hasSoftware()) {
return Ascii.toLowerCase(
networkService.getServiceContext().getWebServiceContext().getSoftware().getName());
}
return Ascii.toLowerCase(networkService.getServiceName());
} | @Test
public void getWebServiceName_whenWebServiceWithSoftware_returnsWebServiceName() {
assertThat(
NetworkServiceUtils.getWebServiceName(
NetworkService.newBuilder()
.setNetworkEndpoint(forIpAndPort("127.0.0.1", 8080))
.setServiceName("http")
.setServiceContext(
ServiceContext.newBuilder()
.setWebServiceContext(
WebServiceContext.newBuilder()
.setSoftware(Software.newBuilder().setName("jenkins"))))
.build()))
.isEqualTo("jenkins");
} |
@Override
public void configure(ResourceGroup group, SelectionContext<VariableMap> context)
{
Map.Entry<ResourceGroupIdTemplate, ResourceGroupSpec> entry = getMatchingSpec(group, context);
configureGroup(group, entry.getValue());
} | @SuppressWarnings("SimplifiedTestNGAssertion")
@Test
public void testConfiguration() throws IOException
{
ResourceGroupConfigurationManager<VariableMap> manager = parse("resource_groups_config.json");
ResourceGroupId globalId = new ResourceGroupId("global");
ResourceGroup global = new TestingResourceGroup(globalId);
manager.configure(global, new SelectionContext<>(globalId, new VariableMap(ImmutableMap.of("USER", "user"))));
assertEquals(global.getPerQueryLimits().getExecutionTimeLimit(), Optional.of(new Duration(1, HOURS)));
assertEquals(global.getPerQueryLimits().getTotalMemoryLimit(), Optional.of(new DataSize(1, MEGABYTE)));
assertEquals(global.getPerQueryLimits().getCpuTimeLimit(), Optional.of(new Duration(1, HOURS)));
assertEquals(global.getCpuQuotaGenerationMillisPerSecond(), 1000 * 24);
assertEquals(global.getMaxQueuedQueries(), 1000);
assertEquals(global.getHardConcurrencyLimit(), 100);
assertEquals(global.getSchedulingPolicy(), WEIGHTED);
assertEquals(global.getSchedulingWeight(), 0);
assertEquals(global.getJmxExport(), true);
ResourceGroupId subId = new ResourceGroupId(globalId, "sub");
ResourceGroup sub = new TestingResourceGroup(subId);
manager.configure(sub, new SelectionContext<>(subId, new VariableMap(ImmutableMap.of("USER", "user"))));
assertEquals(sub.getSoftMemoryLimit(), new DataSize(2, MEGABYTE));
assertEquals(sub.getHardConcurrencyLimit(), 3);
assertEquals(sub.getMaxQueuedQueries(), 4);
assertNull(sub.getSchedulingPolicy());
assertEquals(sub.getSchedulingWeight(), 5);
assertFalse(sub.getJmxExport());
assertEquals(sub.getPerQueryLimits().getExecutionTimeLimit(), Optional.empty());
assertEquals(sub.getPerQueryLimits().getTotalMemoryLimit(), Optional.empty());
assertEquals(sub.getPerQueryLimits().getCpuTimeLimit(), Optional.empty());
} |
protected List<E> list(CriteriaQuery<E> criteria) throws HibernateException {
return currentSession().createQuery(requireNonNull(criteria)).getResultList();
} | @Test
void returnsUniqueListsFromJpaCriteriaQueries() throws Exception {
when(session.createQuery(criteriaQuery)).thenReturn(query);
when(query.getResultList()).thenReturn(Collections.singletonList("woo"));
assertThat(dao.list(criteriaQuery))
.containsOnly("woo");
} |
static Map<String, String> fromSystemProperties()
{
final HashMap<String, String> result = new HashMap<>();
final Properties properties = System.getProperties();
for (final Map.Entry<Object, Object> entry : properties.entrySet())
{
result.put((String)entry.getKey(), (String)entry.getValue());
}
return result;
} | @Test
void shouldReturnAllSystemProperties()
{
final Map<String, String> values = fromSystemProperties();
assertNotEquals(Collections.emptySet(), values);
} |
@Override
public List<Catalogue> sort(List<Catalogue> catalogueTree, SortTypeEnum sortTypeEnum) {
log.debug(
"sort catalogue tree based on first letter. catalogueTree: {}, sortTypeEnum: {}",
catalogueTree,
sortTypeEnum);
Collator collator = Collator.getInstance(Locale.CHINA);
return recursionSortCatalogues(catalogueTree, sortTypeEnum, collator);
} | @Test
public void sortDescTest() {
SortTypeEnum sortTypeEnum = SortTypeEnum.DESC;
List<Catalogue> catalogueTree = Lists.newArrayList();
Catalogue catalogue = new Catalogue();
catalogue.setId(1);
catalogue.setName("测试目录");
catalogue.setCreateTime(LocalDateTime.of(2024, 4, 28, 19, 22, 0));
Catalogue catalogue11 = new Catalogue();
catalogue11.setId(2);
catalogue11.setName("bbb");
catalogue11.setCreateTime(LocalDateTime.of(2024, 4, 28, 20, 22, 0));
Catalogue catalogue12 = new Catalogue();
catalogue12.setId(3);
catalogue12.setName("aaa");
catalogue12.setCreateTime(LocalDateTime.of(2024, 4, 28, 21, 22, 0));
catalogue.setChildren(Lists.newArrayList(catalogue12, catalogue11));
Catalogue catalogue2 = new Catalogue();
catalogue2.setId(4);
catalogue2.setName("test");
catalogue2.setCreateTime(LocalDateTime.of(2024, 4, 29, 19, 22, 0));
Catalogue catalogue21 = new Catalogue();
catalogue21.setId(7);
catalogue21.setName("测试目录2");
catalogue21.setCreateTime(LocalDateTime.of(2024, 4, 29, 21, 22, 0));
Catalogue catalogue22 = new Catalogue();
catalogue22.setId(6);
catalogue22.setName("生产任务");
catalogue22.setCreateTime(LocalDateTime.of(2024, 4, 29, 20, 22, 0));
catalogue2.setChildren(Lists.newArrayList(catalogue21, catalogue22));
catalogueTree.add(catalogue2);
catalogueTree.add(catalogue);
/*
input:
-- 4 (2024-04-29 19:22:00) (test)
-- 7 (2024-04-29 21:22:00) (测试目录2)
-- 6 (2024-04-29 20:22:00) (生产任务)
-- 1 (2024-04-28 19:22:00) (测试目录)
-- 3 (2024-04-28 21:22:00) (aaa)
-- 2 (2024-04-28 20:22:00) (bbb)
output:
-- 1 (2024-04-28 19:22:00) (测试目录)
-- 2 (2024-04-28 20:22:00) (bbb)
-- 3 (2024-04-28 21:22:00) (aaa)
-- 4 (2024-04-29 19:22:00) (test)
-- 6 (2024-04-29 20:22:00) (生产任务)
-- 7 (2024-04-29 21:22:00) (测试目录2)
*/
List<Catalogue> resultList = catalogueTreeSortFirstLetterStrategyTest.sort(catalogueTree, sortTypeEnum);
List<Integer> resultIdList = CategoryTreeSortStrategyTestUtils.breadthTraverse(resultList);
assertEquals(Lists.newArrayList(1, 4, 2, 3, 6, 7), resultIdList);
} |
private List<Instance> getAllInstancesFromIndex(Service service) {
Set<Instance> result = new HashSet<>();
Set<String> clusters = new HashSet<>();
for (String each : serviceIndexesManager.getAllClientsRegisteredService(service)) {
Optional<InstancePublishInfo> instancePublishInfo = getInstanceInfo(each, service);
if (instancePublishInfo.isPresent()) {
InstancePublishInfo publishInfo = instancePublishInfo.get();
//If it is a BatchInstancePublishInfo type, it will be processed manually and added to the instance list
if (publishInfo instanceof BatchInstancePublishInfo) {
BatchInstancePublishInfo batchInstancePublishInfo = (BatchInstancePublishInfo) publishInfo;
List<Instance> batchInstance = parseBatchInstance(service, batchInstancePublishInfo, clusters);
result.addAll(batchInstance);
} else {
Instance instance = parseInstance(service, instancePublishInfo.get());
result.add(instance);
clusters.add(instance.getClusterName());
}
}
}
// cache clusters of this service
serviceClusterIndex.put(service, clusters);
return new LinkedList<>(result);
} | @Test
void testGetAllInstancesFromIndex() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Class<ServiceStorage> serviceStorageClass = ServiceStorage.class;
Method getAllInstancesFromIndex = serviceStorageClass.getDeclaredMethod("getAllInstancesFromIndex", Service.class);
getAllInstancesFromIndex.setAccessible(true);
List<Instance> list = (List<Instance>) getAllInstancesFromIndex.invoke(serviceStorage, SERVICE);
assertNotNull(list);
} |
public Optional<String> getUserIdAttribute() {
final Map<Object, Object> attributes = getAttributes();
if (attributes == null) {
return Optional.empty();
}
final Object sessionId;
// A subject can have more than one principal. If that's the case, the user ID is required to be the first one.
final Object principals = attributes.get(DefaultSubjectContext.PRINCIPALS_SESSION_KEY);
if (principals instanceof Iterable) {
sessionId = Iterables.getFirst((Iterable<?>) principals, null);
} else {
sessionId = principals;
}
return Optional.ofNullable(sessionId).map(String::valueOf);
} | @Test
public void noPrincipal() {
assertThat(new MongoDbSession(fields).getUserIdAttribute()).isEmpty();
} |
@Override
public void returnLogicalSlot(LogicalSlot logicalSlot) {
LOG.debug("Returning logical slot to shared slot ({})", physicalSlotRequestId);
Preconditions.checkState(
state != State.RELEASED, "The shared slot has already been released.");
Preconditions.checkState(!logicalSlot.isAlive(), "Returned logic slot must not be alive.");
Preconditions.checkState(
allocatedLogicalSlots.remove(logicalSlot.getSlotRequestId()) != null,
"Trying to remove a logical slot request which has been either already removed or never created.");
tryReleaseExternally();
} | @Test
void testReturnLogicalSlotRejectsUnknownSlot() {
assertThatThrownBy(
() -> {
final TestingPhysicalSlot physicalSlot =
TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(
new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot =
new TestingLogicalSlotBuilder().createTestingLogicalSlot();
logicalSlot.releaseSlot(new Exception("test"));
sharedSlot.returnLogicalSlot(logicalSlot);
})
.isInstanceOf(IllegalStateException.class);
} |
@SuppressWarnings("unchecked")
public static Object getMockObject(ExtensionDirector extensionDirector, String mockService, Class serviceType) {
boolean isDefault = ConfigUtils.isDefault(mockService);
if (isDefault) {
mockService = serviceType.getName() + "Mock";
}
Class<?> mockClass;
try {
mockClass = ReflectUtils.forName(mockService);
} catch (Exception e) {
if (!isDefault) { // does not check Spring bean if it is default config.
ExtensionInjector extensionFactory = extensionDirector
.getExtensionLoader(ExtensionInjector.class)
.getAdaptiveExtension();
Object obj = extensionFactory.getInstance(serviceType, mockService);
if (obj != null) {
return obj;
}
}
throw new IllegalStateException(
"Did not find mock class or instance "
+ mockService
+ ", please check if there's mock class or instance implementing interface "
+ serviceType.getName(),
e);
}
if (mockClass == null || !serviceType.isAssignableFrom(mockClass)) {
throw new IllegalStateException(
"The mock class " + mockClass.getName() + " not implement interface " + serviceType.getName());
}
try {
return mockClass.newInstance();
} catch (InstantiationException e) {
throw new IllegalStateException("No default constructor from mock class " + mockClass.getName(), e);
} catch (IllegalAccessException e) {
throw new IllegalStateException(e);
}
} | @Test
void testGetMockObject() {
Assertions.assertEquals(
"",
MockInvoker.getMockObject(
ApplicationModel.defaultModel().getExtensionDirector(), "java.lang.String", String.class));
Assertions.assertThrows(
IllegalStateException.class,
() -> MockInvoker.getMockObject(
ApplicationModel.defaultModel().getExtensionDirector(), "true", String.class));
Assertions.assertThrows(
IllegalStateException.class,
() -> MockInvoker.getMockObject(
ApplicationModel.defaultModel().getExtensionDirector(), "default", String.class));
Assertions.assertThrows(
IllegalStateException.class,
() -> MockInvoker.getMockObject(
ApplicationModel.defaultModel().getExtensionDirector(), "java.lang.String", Integer.class));
Assertions.assertThrows(
IllegalStateException.class,
() -> MockInvoker.getMockObject(
ApplicationModel.defaultModel().getExtensionDirector(),
"java.io.Serializable",
Serializable.class));
} |
@Override
public String getResourceId() {
if (resourceId == null) {
initResourceId();
}
return resourceId;
} | @Test
public void getResourceIdTest() throws SQLException, NoSuchFieldException, IllegalAccessException {
// Disable 'DataSourceProxy.tableMetaExecutor' to prevent unit tests from being affected
Field enableField = TableMetaCacheFactory.class.getDeclaredField("ENABLE_TABLE_META_CHECKER_ENABLE");
enableField.setAccessible(true);
enableField.set(null, false);
final MockDriver mockDriver = new MockDriver();
final String username = "username";
final String jdbcUrl = "jdbc:mock:xxx";
// create data source
final DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl(jdbcUrl);
dataSource.setDriver(mockDriver);
dataSource.setUsername(username);
dataSource.setPassword("password");
// create data source proxy
final DataSourceProxy proxy = getDataSourceProxy(dataSource);
// get fields
Field resourceIdField = proxy.getClass().getDeclaredField("resourceId");
resourceIdField.setAccessible(true);
Field dbTypeField = proxy.getClass().getDeclaredField("dbType");
dbTypeField.setAccessible(true);
Field userNameField = proxy.getClass().getDeclaredField("userName");
userNameField.setAccessible(true);
Field jdbcUrlField = proxy.getClass().getDeclaredField("jdbcUrl");
jdbcUrlField.setAccessible(true);
// set userName
String userNameFromMetaData = dataSource.getConnection().getMetaData().getUserName();
Assertions.assertEquals(userNameFromMetaData, username);
userNameField.set(proxy, username);
// case: dbType = oracle
{
resourceIdField.set(proxy, null);
dbTypeField.set(proxy, org.apache.seata.sqlparser.util.JdbcConstants.ORACLE);
Assertions.assertEquals("jdbc:mock:xxx/username", proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
}
// case: dbType = postgresql
{
resourceIdField.set(proxy, null);
dbTypeField.set(proxy, org.apache.seata.sqlparser.util.JdbcConstants.POSTGRESQL);
Assertions.assertEquals(jdbcUrl, proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
resourceIdField.set(proxy, null);
jdbcUrlField.set(proxy, "jdbc:postgresql://mock/postgresql?xxx=1111¤tSchema=schema1,schema2&yyy=1");
Assertions.assertEquals("jdbc:postgresql://mock/postgresql?currentSchema=schema1!schema2", proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
resourceIdField.set(proxy, null);
jdbcUrlField.set(proxy, "jdbc:postgresql://192.168.1.123:30100,192.168.1.124:30100?xxx=1111¤tSchema=schema1,schema2&yyy=1");
Assertions.assertEquals("jdbc:postgresql://192.168.1.123:30100|192.168.1.124:30100?currentSchema=schema1!schema2", proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
jdbcUrlField.set(proxy, jdbcUrl);
}
// case: dbType = dm
{
resourceIdField.set(proxy, null);
dbTypeField.set(proxy, org.apache.seata.sqlparser.util.JdbcConstants.DM);
Assertions.assertEquals(jdbcUrl, proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
resourceIdField.set(proxy, null);
jdbcUrlField.set(proxy, "jdbc:dm://mock/dm?xxx=1111&schema=schema1");
Assertions.assertEquals("jdbc:dm://mock/dm?schema=schema1", proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
jdbcUrlField.set(proxy, jdbcUrl);
}
// case: dbType = mysql
{
resourceIdField.set(proxy, null);
dbTypeField.set(proxy, org.apache.seata.sqlparser.util.JdbcConstants.MYSQL);
Assertions.assertEquals(jdbcUrl, proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
resourceIdField.set(proxy, null);
jdbcUrlField.set(proxy, "jdbc:mysql:loadbalance://192.168.100.2:3306,192.168.100.3:3306,192.168.100.1:3306/seata");
Assertions.assertEquals("jdbc:mysql:loadbalance://192.168.100.2:3306|192.168.100.3:3306|192.168.100.1:3306/seata", proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
jdbcUrlField.set(proxy, jdbcUrl);
}
// case: dbType = sqlserver
{
resourceIdField.set(proxy, null);
dbTypeField.set(proxy, org.apache.seata.sqlparser.util.JdbcConstants.SQLSERVER);
Assertions.assertEquals(jdbcUrl, proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
resourceIdField.set(proxy, null);
jdbcUrlField.set(proxy, "jdbc:mock:xxx;database=test");
Assertions.assertEquals("jdbc:mock:xxx;database=test", proxy.getResourceId(), "dbType=" + dbTypeField.get(proxy));
jdbcUrlField.set(proxy, jdbcUrl);
}
} |
public void seek(long position) throws IOException {
final int block = MathUtils.checkedDownCast(position / segmentSize);
final int positionInBlock = (int) (position % segmentSize);
if (position < 0
|| block >= numBlocksTotal
|| (block == numBlocksTotal - 1 && positionInBlock > sizeOfLastBlock)) {
throw new IllegalArgumentException("Position is out of range");
}
clear();
if (reader != null) {
reader.close();
}
reader = ioManager.createBlockChannelReader(channelId);
if (block > 0) {
reader.seekToPosition(((long) block) * segmentSize);
}
this.numBlocksRemaining = this.numBlocksTotal - block;
this.numRequestsRemaining = numBlocksRemaining;
for (int i = 0; i < memory.size(); i++) {
sendReadRequest(memory.get(i));
}
numBlocksRemaining--;
seekInput(
reader.getNextReturnedBlock(),
positionInBlock,
numBlocksRemaining == 0 ? sizeOfLastBlock : segmentSize);
} | @Test
void testSeek() throws Exception {
final int PAGE_SIZE = 16 * 1024;
final int NUM_RECORDS = 120000;
// integers across 7.x pages (7 pages = 114.688 bytes, 8 pages = 131.072 bytes)
try (IOManager ioManager = new IOManagerAsync()) {
MemoryManager memMan =
MemoryManagerBuilder.newBuilder()
.setMemorySize(4 * PAGE_SIZE)
.setPageSize(PAGE_SIZE)
.build();
List<MemorySegment> memory = new ArrayList<MemorySegment>();
memMan.allocatePages(new DummyInvokable(), memory, 4);
FileIOChannel.ID channel = ioManager.createChannel();
BlockChannelWriter<MemorySegment> writer = ioManager.createBlockChannelWriter(channel);
FileChannelOutputView out =
new FileChannelOutputView(writer, memMan, memory, memMan.getPageSize());
// write some integers across 7.5 pages (7 pages = 114.688 bytes, 8 pages = 131.072
// bytes)
for (int i = 0; i < NUM_RECORDS; i += 4) {
out.writeInt(i);
}
// close for the first time, make sure all memory returns
out.close();
assertThat(memMan.verifyEmpty()).isTrue();
memMan.allocatePages(new DummyInvokable(), memory, 4);
SeekableFileChannelInputView in =
new SeekableFileChannelInputView(
ioManager, channel, memMan, memory, out.getBytesInLatestSegment());
// read first, complete
for (int i = 0; i < NUM_RECORDS; i += 4) {
assertThat(in.readInt()).isEqualTo(i);
}
assertThatThrownBy(in::readInt)
.withFailMessage("should throw EOF exception")
.isInstanceOf(EOFException.class);
// seek to the middle of the 3rd page
int i = 2 * PAGE_SIZE + PAGE_SIZE / 4;
in.seek(i);
for (; i < NUM_RECORDS; i += 4) {
assertThat(in.readInt()).isEqualTo(i);
}
assertThatThrownBy(in::readInt)
.withFailMessage("should throw EOF exception")
.isInstanceOf(EOFException.class);
// seek to the end
i = 120000 - 4;
in.seek(i);
for (; i < NUM_RECORDS; i += 4) {
assertThat(in.readInt()).isEqualTo(i);
}
assertThatThrownBy(in::readInt)
.withFailMessage("should throw EOF exception")
.isInstanceOf(EOFException.class);
// seek to the beginning
i = 0;
in.seek(i);
for (; i < NUM_RECORDS; i += 4) {
assertThat(in.readInt()).isEqualTo(i);
}
assertThatThrownBy(in::readInt)
.withFailMessage("should throw EOF exception")
.isInstanceOf(EOFException.class);
// seek to after a page
i = PAGE_SIZE;
in.seek(i);
for (; i < NUM_RECORDS; i += 4) {
assertThat(in.readInt()).isEqualTo(i);
}
assertThatThrownBy(in::readInt)
.withFailMessage("should throw EOF exception")
.isInstanceOf(EOFException.class);
// seek to after a page
i = 3 * PAGE_SIZE;
in.seek(i);
for (; i < NUM_RECORDS; i += 4) {
assertThat(in.readInt()).isEqualTo(i);
}
assertThatThrownBy(in::readInt)
.withFailMessage("should throw EOF exception")
.isInstanceOf(EOFException.class);
// seek to the end
i = NUM_RECORDS;
in.seek(i);
assertThatThrownBy(in::readInt)
.withFailMessage("should throw EOF exception")
.isInstanceOf(EOFException.class);
// seek out of bounds
assertThatThrownBy(() -> in.seek(-10))
.withFailMessage("should throw an exception")
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> in.seek(NUM_RECORDS + 1))
.withFailMessage("should throw an exception")
.isInstanceOf(IllegalArgumentException.class);
}
} |
@Override
public ListView<String> getServicesOfServer(int pageNo, int pageSize) throws NacosException {
return getServicesOfServer(pageNo, pageSize, Constants.DEFAULT_GROUP);
} | @Test
void testGetServicesOfServer4() throws NacosException {
//given
int pageNo = 1;
int pageSize = 10;
String groupName = "group1";
AbstractSelector selector = new AbstractSelector("aaa") {
@Override
public String getType() {
return super.getType();
}
};
//when
client.getServicesOfServer(pageNo, pageSize, groupName, selector);
//then
verify(proxy, times(1)).getServiceList(pageNo, pageSize, groupName, selector);
} |
public boolean similarTo(ClusterStateBundle other) {
if (!baselineState.getClusterState().similarToIgnoringInitProgress(other.baselineState.getClusterState())) {
return false;
}
if (clusterFeedIsBlocked() != other.clusterFeedIsBlocked()) {
return false;
}
if (clusterFeedIsBlocked() && !feedBlock.similarTo(other.feedBlock)) {
return false;
}
// Distribution configs must match exactly for bundles to be similar.
// It may be the case that they are both null, in which case they are also considered equal.
if (!Objects.equals(distributionConfig, other.distributionConfig)) {
return false;
}
// FIXME we currently treat mismatching bucket space sets as unchanged to avoid breaking some tests
return derivedBucketSpaceStates.entrySet().stream()
.allMatch(entry -> other.derivedBucketSpaceStates.getOrDefault(entry.getKey(), entry.getValue())
.getClusterState().similarToIgnoringInitProgress(entry.getValue().getClusterState()));
} | @Test
void similarity_test_considers_cluster_feed_block_state() {
var nonBlockingBundle = createTestBundle(false);
var blockingBundle = createTestBundleWithFeedBlock("foo");
var blockingBundleWithOtherDesc = createTestBundleWithFeedBlock("bar");
assertFalse(nonBlockingBundle.similarTo(blockingBundle));
assertFalse(blockingBundle.similarTo(nonBlockingBundle));
assertTrue(blockingBundle.similarTo(blockingBundle));
// We currently consider different descriptions with same blocking status to be similar
assertTrue(blockingBundle.similarTo(blockingBundleWithOtherDesc));
} |
public static TableSchema toTableSchema(Schema schema) {
return new TableSchema().setFields(toTableFieldSchema(schema));
} | @Test
public void testToTableSchema_array_row() {
TableSchema schema = toTableSchema(ARRAY_ROW_TYPE);
assertThat(schema.getFields().size(), equalTo(1));
TableFieldSchema field = schema.getFields().get(0);
assertThat(field.getName(), equalTo("rows"));
assertThat(field.getType(), equalTo(StandardSQLTypeName.STRUCT.toString()));
assertThat(field.getMode(), equalTo(Mode.REPEATED.toString()));
assertThat(
field.getFields(),
containsInAnyOrder(
ID,
VALUE,
NAME,
TIMESTAMP_VARIANT1,
TIMESTAMP_VARIANT2,
TIMESTAMP_VARIANT3,
TIMESTAMP_VARIANT4,
TIMESTAMP_VARIANT5,
TIMESTAMP_VARIANT6,
TIMESTAMP_VARIANT7,
TIMESTAMP_VARIANT8,
DATETIME,
DATETIME_0MS,
DATETIME_0S_NS,
DATETIME_0S_0NS,
DATE,
TIME,
TIME_0MS,
TIME_0S_NS,
TIME_0S_0NS,
VALID,
BINARY,
RAW_BYTES,
NUMERIC,
BOOLEAN,
LONG,
DOUBLE));
} |
public void allocate( int nrfields ) {
fieldName = new String[nrfields];
} | @Test
public void testAllocate() {
CheckSumMeta checkSumMeta = new CheckSumMeta();
Random random = new Random();
int maxAllocation = 50;
// Initially the array should exist but be empty
String[] fieldNames = checkSumMeta.getFieldName();
assertNotNull( fieldNames );
assertEquals( 0, fieldNames.length );
// Some random numbers
for ( int i = 0; i < 10; ++i ) {
int n = random.nextInt( maxAllocation );
checkSumMeta.allocate( n );
fieldNames = checkSumMeta.getFieldName();
assertNotNull( fieldNames );
assertEquals( n, fieldNames.length );
}
// Zero
checkSumMeta.allocate( 0 );
fieldNames = checkSumMeta.getFieldName();
assertNotNull( fieldNames );
assertEquals( 0, fieldNames.length );
} |
@Override
public T build(ConfigurationSourceProvider provider, String path) throws IOException, ConfigurationException {
try (InputStream input = provider.open(requireNonNull(path))) {
final JsonNode node = mapper.readTree(createParser(input));
if (node == null) {
throw ConfigurationParsingException
.builder("Configuration at " + path + " must not be empty")
.build(path);
}
return build(node, path);
} catch (JsonParseException e) {
throw ConfigurationParsingException
.builder("Malformed " + formatName)
.setCause(e)
.setLocation(e.getLocation())
.setDetail(e.getMessage())
.build(path);
}
} | @Test
void overridesArrayWithIndicesReverse() throws Exception {
System.setProperty("dw.type[0]", "overridden");
final Example example = factory.build(configurationSourceProvider, validFile);
assertThat(example.getType())
.containsExactly("overridden", "wizard");
} |
@Override
public Long createCouponTemplate(CouponTemplateCreateReqVO createReqVO) {
// 校验商品范围
validateProductScope(createReqVO.getProductScope(), createReqVO.getProductScopeValues());
// 插入
CouponTemplateDO couponTemplate = CouponTemplateConvert.INSTANCE.convert(createReqVO)
.setStatus(CommonStatusEnum.ENABLE.getStatus());
couponTemplateMapper.insert(couponTemplate);
// 返回
return couponTemplate.getId();
} | @Test
public void testCreateCouponTemplate_success() {
// 准备参数
CouponTemplateCreateReqVO reqVO = randomPojo(CouponTemplateCreateReqVO.class,
o -> o.setProductScope(randomEle(PromotionProductScopeEnum.values()).getScope())
.setValidityType(randomEle(CouponTemplateValidityTypeEnum.values()).getType())
.setDiscountType(randomEle(PromotionDiscountTypeEnum.values()).getType()));
// 调用
Long couponTemplateId = couponTemplateService.createCouponTemplate(reqVO);
// 断言
assertNotNull(couponTemplateId);
// 校验记录的属性是否正确
CouponTemplateDO couponTemplate = couponTemplateMapper.selectById(couponTemplateId);
assertPojoEquals(reqVO, couponTemplate);
} |
public static synchronized void setCache(Map<Pair<String, String>, String> cache) {
CpeEcosystemCache.cache = cache;
CpeEcosystemCache.changed = new HashMap<>();
} | @Test
public void testSetCache() {
Map<Pair<String, String>, String> map = new HashMap<>();
CpeEcosystemCache.setCache(map);
assertTrue(CpeEcosystemCache.isEmpty());
map = new HashMap<>();
Pair<String, String> key = new Pair<>("apache", "zookeeper");
map.put(key, "java");
CpeEcosystemCache.setCache(map);
assertFalse(CpeEcosystemCache.isEmpty());
} |
public boolean compatibleVersion(String acceptableVersionRange, String actualVersion) {
V pluginVersion = parseVersion(actualVersion);
// Treat a single version "1.4" as a left bound, equivalent to "[1.4,)"
if (acceptableVersionRange.matches(VERSION_REGEX)) {
return ge(pluginVersion, parseVersion(acceptableVersionRange));
}
// Otherwise ensure it is a version range with bounds
Matcher matcher = INTERVAL_PATTERN.matcher(acceptableVersionRange);
Preconditions.checkArgument(matcher.matches(), "invalid version range");
String leftBound = matcher.group("left");
String rightBound = matcher.group("right");
Preconditions.checkArgument(
leftBound != null || rightBound != null, "left and right bounds cannot both be empty");
BiPredicate<V, V> leftComparator =
acceptableVersionRange.startsWith("[") ? VersionChecker::ge : VersionChecker::gt;
BiPredicate<V, V> rightComparator =
acceptableVersionRange.endsWith("]") ? VersionChecker::le : VersionChecker::lt;
if (leftBound != null && !leftComparator.test(pluginVersion, parseVersion(leftBound))) {
return false;
}
if (rightBound != null && !rightComparator.test(pluginVersion, parseVersion(rightBound))) {
return false;
}
return true;
} | @Test
public void testMinimumBound_high() {
Assert.assertTrue(checker.compatibleVersion("2.3", "2.4"));
Assert.assertTrue(checker.compatibleVersion("2.3", "4.0"));
} |
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) {
final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps());
map.put(
MetricCollectors.RESOURCE_LABEL_PREFIX
+ StreamsConfig.APPLICATION_ID_CONFIG,
applicationId
);
// Streams client metrics aren't used in Confluent deployment
possiblyConfigureConfluentTelemetry(map);
return Collections.unmodifiableMap(map);
} | @Test
public void shouldSetStreamsConfigProducerPrefixedProperties() {
final KsqlConfig ksqlConfig = new KsqlConfig(
Collections.singletonMap(
StreamsConfig.PRODUCER_PREFIX + ProducerConfig.BUFFER_MEMORY_CONFIG, "1024"));
assertThat(ksqlConfig.getKsqlStreamConfigProps()
.get(StreamsConfig.PRODUCER_PREFIX + ProducerConfig.BUFFER_MEMORY_CONFIG),
equalTo(1024L));
assertThat(ksqlConfig.getKsqlStreamConfigProps()
.get(ProducerConfig.BUFFER_MEMORY_CONFIG),
is(nullValue()));
} |
public static <K, V> Reshuffle<K, V> of() {
return new Reshuffle<>();
} | @Test
@Category(ValidatesRunner.class)
public void testReshuffleAfterFixedWindows() {
PCollection<KV<String, Integer>> input =
pipeline
.apply(
Create.of(ARBITRARY_KVS)
.withCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of())))
.apply(Window.into(FixedWindows.of(Duration.standardMinutes(10L))));
PCollection<KV<String, Integer>> output = input.apply(Reshuffle.of());
PAssert.that(output).containsInAnyOrder(ARBITRARY_KVS);
assertEquals(input.getWindowingStrategy(), output.getWindowingStrategy());
pipeline.run();
} |
List<PickleStepDefinitionMatch> getMatches() {
return matches;
} | @Test
void can_report_ambiguous_step_definitions() {
Feature feature = TestFeatureParser.parse("" +
"Feature: Test feature\n" +
" Scenario: Test scenario\n" +
" Given I have 4 cukes in my belly\n");
Step mockPickleStep = feature.getPickles().get(0).getSteps().get(0);
PickleStepDefinitionMatch mockPickleStepDefinitionMatchOne = mock(PickleStepDefinitionMatch.class);
when(mockPickleStepDefinitionMatchOne.getPattern()).thenReturn("PickleStepDefinitionMatchOne_Pattern");
when(mockPickleStepDefinitionMatchOne.getLocation()).thenReturn("PickleStepDefinitionMatchOne_Location");
PickleStepDefinitionMatch mockPickleStepDefinitionMatchTwo = mock(PickleStepDefinitionMatch.class);
when(mockPickleStepDefinitionMatchTwo.getPattern()).thenReturn("PickleStepDefinitionMatchTwo_Pattern");
when(mockPickleStepDefinitionMatchTwo.getLocation()).thenReturn("PickleStepDefinitionMatchTwo_Location");
List<PickleStepDefinitionMatch> matches = asList(mockPickleStepDefinitionMatchOne,
mockPickleStepDefinitionMatchTwo);
AmbiguousStepDefinitionsException expectedThrown = new AmbiguousStepDefinitionsException(mockPickleStep,
matches);
assertAll(
() -> assertThat(expectedThrown.getMessage(), is(equalTo(
"" +
"\"I have 4 cukes in my belly\" matches more than one step definition:\n" +
" \"PickleStepDefinitionMatchOne_Pattern\" in PickleStepDefinitionMatchOne_Location\n" +
" \"PickleStepDefinitionMatchTwo_Pattern\" in PickleStepDefinitionMatchTwo_Location"))),
() -> assertThat(expectedThrown.getCause(), is(nullValue())),
() -> assertThat(expectedThrown.getMatches(), is(equalTo(matches))));
} |
@Override
public ObjectNode encode(FlowInfo info, CodecContext context) {
checkNotNull(info, "FlowInfo cannot be null");
ObjectNode result = context.mapper().createObjectNode()
.put(FLOW_TYPE, info.flowType())
.put(DEVICE_ID, info.deviceId().toString())
.put(INPUT_INTERFACE_ID, info.inputInterfaceId())
.put(OUTPUT_INTERFACE_ID, info.outputInterfaceId())
.put(SRC_IP, info.srcIp().address().toString())
.put(SRC_IP_PREFIX_LEN, info.srcIp().prefixLength())
.put(DST_IP, info.dstIp().address().toString())
.put(DST_IP_PREFIX_LEN, info.dstIp().prefixLength())
.put(SRC_PORT, info.srcPort().toString())
.put(DST_PORT, info.dstPort().toString())
.put(PROTOCOL, info.protocol())
.put(SRC_MAC, info.srcMac().toString())
.put(DST_MAC, info.dstMac().toString());
if (info.vlanId() != null) {
result.put(VLAN_ID, info.vlanId().toString());
} else {
result.put(VXLAN_ID, info.vxlanId());
}
ObjectNode statsInfoJson =
context.codec(StatsInfo.class).encode(info.statsInfo(), context);
result.put(STATS_INFO, statsInfoJson);
return result;
} | @Test
public void testEncode() {
StatsInfo statsInfo = new DefaultStatsInfo.DefaultBuilder()
.withStartupTime(LONG_VALUE)
.withFstPktArrTime(LONG_VALUE)
.withLstPktOffset(INTEGER_VALUE)
.withPrevAccBytes(LONG_VALUE)
.withPrevAccPkts(INTEGER_VALUE)
.withCurrAccBytes(LONG_VALUE)
.withCurrAccPkts(INTEGER_VALUE)
.withErrorPkts(SHORT_VALUE)
.withDropPkts(SHORT_VALUE)
.build();
FlowInfo flowInfo = new DefaultFlowInfo.DefaultBuilder()
.withFlowType((byte) FLOW_TYPE)
.withDeviceId(DeviceId.deviceId(DEVICE_ID))
.withInputInterfaceId(INPUT_INTERFACE_ID)
.withOutputInterfaceId(OUTPUT_INTERFACE_ID)
.withVlanId(VlanId.vlanId((short) VLAN_ID))
.withSrcIp(IpPrefix.valueOf(
IpAddress.valueOf(SRC_IP_ADDRESS), SRC_IP_PREFIX))
.withDstIp(IpPrefix.valueOf(
IpAddress.valueOf(DST_IP_ADDRESS), DST_IP_PREFIX))
.withSrcPort(TpPort.tpPort(SRC_PORT))
.withDstPort(TpPort.tpPort(DST_PORT))
.withProtocol((byte) PROTOCOL)
.withSrcMac(MacAddress.valueOf(SRC_MAC_ADDRESS))
.withDstMac(MacAddress.valueOf(DST_MAC_ADDRESS))
.withStatsInfo(statsInfo)
.build();
ObjectNode nodeJson = flowInfoCodec.encode(flowInfo, context);
assertThat(nodeJson, matchesFlowInfo(flowInfo));
FlowInfo flowInfoDecoded = flowInfoCodec.decode(nodeJson, context);
new EqualsTester().addEqualityGroup(flowInfo, flowInfoDecoded).testEquals();
} |
public void incrementIndex(int task_index) {
moveTask(task_index, INCREMENT_INDEX);
} | @Test
public void shouldIncrementIndexOfGivenTask() {
Tasks tasks = new Tasks();
AntTask task1 = antTask("b1", "t1", "w1");
tasks.add(task1);
AntTask task2 = antTask("b2", "t2", "w2");
tasks.add(task2);
AntTask task3 = antTask("b3", "t3", "w3");
tasks.add(task3);
tasks.incrementIndex(0);
assertThat(tasks.get(0), is(task2));
assertThat(tasks.get(1), is(task1));
assertThat(tasks.get(2), is(task3));
} |
@Override
public RSet<V> get(final K key) {
String keyHash = keyHash(key);
final String setName = getValuesName(keyHash);
return new RedissonSet<V>(codec, commandExecutor, setName, null) {
@Override
public RFuture<Boolean> addAsync(V value) {
return RedissonSetMultimap.this.putAsync(key, value);
}
@Override
public RFuture<Boolean> addAllAsync(Collection<? extends V> c) {
return RedissonSetMultimap.this.putAllAsync(key, c);
}
@Override
public RFuture<Boolean> removeAsync(Object value) {
return RedissonSetMultimap.this.removeAsync(key, value);
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
if (c.isEmpty()) {
return new CompletableFutureWrapper<>(false);
}
List<Object> args = new ArrayList<Object>(c.size() + 1);
args.add(encodeMapKey(key));
encode(args, c);
return commandExecutor.evalWriteAsync(RedissonSetMultimap.this.getRawName(), codec, RedisCommands.EVAL_BOOLEAN_AMOUNT,
"local count = 0;" +
"for i=2, #ARGV, 5000 do " +
"count = count + redis.call('srem', KEYS[2], unpack(ARGV, i, math.min(i+4999, table.getn(ARGV)))) " +
"end; " +
"if count > 0 then " +
"if redis.call('scard', KEYS[2]) == 0 then " +
"redis.call('hdel', KEYS[1], ARGV[1]); " +
"end; " +
"return 1;" +
"end;" +
"return 0; ",
Arrays.<Object>asList(RedissonSetMultimap.this.getRawName(), setName),
args.toArray());
}
@Override
public RFuture<Boolean> deleteAsync() {
ByteBuf keyState = encodeMapKey(key);
return RedissonSetMultimap.this.fastRemoveAsync(Arrays.asList(keyState),
Arrays.asList(RedissonSetMultimap.this.getRawName(), setName), RedisCommands.EVAL_BOOLEAN_AMOUNT);
}
@Override
public RFuture<Boolean> clearExpireAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Long> remainTimeToLiveAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Void> renameAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> renamenxAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
};
} | @Test
public void testGetRemove() {
RSetMultimap<String, Integer> multimap1 = redisson.getSetMultimap("myMultimap1");
Set<Integer> one = multimap1.get("1");
Set<Integer> two = multimap1.get("2");
Set<Integer> four = multimap1.get("4");
one.add(1);
one.add(2);
one.add(3);
two.add(5);
two.add(6);
four.add(7);
assertThat(one.remove(1)).isTrue();
assertThat(one.remove(2)).isTrue();
assertThat(two.remove(5)).isTrue();
assertThat(four.remove(7)).isTrue();
assertThat(multimap1.keySet()).containsOnly("1", "2");
assertThat(multimap1.keySize()).isEqualTo(2);
assertThat(multimap1.get("1")).containsOnly(3);
assertThat(multimap1.get("2")).containsOnly(6);
} |
public double[][] test(DataFrame data) {
DataFrame x = formula.x(data);
int n = x.nrow();
int ntrees = trees.length;
double[][] prediction = new double[ntrees][n];
for (int j = 0; j < n; j++) {
Tuple xj = x.get(j);
double base = b;
for (int i = 0; i < ntrees; i++) {
base += shrinkage * trees[i].predict(xj);
prediction[i][j] = base;
}
}
return prediction;
} | @Test
public void testAbaloneLS() {
test(Loss.ls(), "abalone", Abalone.formula, Abalone.train, 2.2159);
} |
@Override
public org.apache.kafka.streams.kstream.Transformer<KIn, VIn, Iterable<KeyValue<KOut, VOut>>> get() {
return new org.apache.kafka.streams.kstream.Transformer<KIn, VIn, Iterable<KeyValue<KOut, VOut>>>() {
private final org.apache.kafka.streams.kstream.Transformer<KIn, VIn, KeyValue<KOut, VOut>> transformer = transformerSupplier.get();
@Override
public void init(final ProcessorContext context) {
transformer.init(context);
}
@Override
public Iterable<KeyValue<KOut, VOut>> transform(final KIn key, final VIn value) {
final KeyValue<KOut, VOut> pair = transformer.transform(key, value);
if (pair != null) {
return Collections.singletonList(pair);
}
return Collections.emptyList();
}
@Override
public void close() {
transformer.close();
}
};
} | @Test
public void shouldCallTransformOfAdaptedTransformerAndReturnEmptyIterable() {
when(transformerSupplier.get()).thenReturn(transformer);
when(transformer.transform(key, value)).thenReturn(null);
final TransformerSupplierAdapter<String, String, Integer, Integer> adapter =
new TransformerSupplierAdapter<>(transformerSupplier);
final org.apache.kafka.streams.kstream.Transformer<String, String, Iterable<KeyValue<Integer, Integer>>> adaptedTransformer = adapter.get();
final Iterator<KeyValue<Integer, Integer>> iterator = adaptedTransformer.transform(key, value).iterator();
assertThat(iterator.hasNext(), equalTo(false));
} |
@Override
public Object getValue(final int columnIndex, final Class<?> type) throws SQLException {
if (boolean.class == type) {
return resultSet.getBoolean(columnIndex);
}
if (byte.class == type) {
return resultSet.getByte(columnIndex);
}
if (short.class == type) {
return resultSet.getShort(columnIndex);
}
if (int.class == type) {
return resultSet.getInt(columnIndex);
}
if (long.class == type) {
return resultSet.getLong(columnIndex);
}
if (float.class == type) {
return resultSet.getFloat(columnIndex);
}
if (double.class == type) {
return resultSet.getDouble(columnIndex);
}
if (String.class == type) {
return resultSet.getString(columnIndex);
}
if (BigDecimal.class == type) {
return resultSet.getBigDecimal(columnIndex);
}
if (byte[].class == type) {
return resultSet.getBytes(columnIndex);
}
if (Date.class == type) {
return resultSet.getDate(columnIndex);
}
if (Time.class == type) {
return resultSet.getTime(columnIndex);
}
if (Timestamp.class == type) {
return resultSet.getTimestamp(columnIndex);
}
if (Blob.class == type) {
return resultSet.getBlob(columnIndex);
}
if (Clob.class == type) {
return resultSet.getClob(columnIndex);
}
if (Array.class == type) {
return resultSet.getArray(columnIndex);
}
return resultSet.getObject(columnIndex);
} | @Test
void assertGetValueByShort() throws SQLException {
ResultSet resultSet = mock(ResultSet.class);
when(resultSet.getShort(1)).thenReturn((short) 1);
assertThat(new JDBCStreamQueryResult(resultSet).getValue(1, short.class), is((short) 1));
} |
@Override
public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return true;
}
try {
return new BoxAttributesFinderFeature(session, fileid).find(file, listener) != PathAttributes.EMPTY;
}
catch(NotfoundException e) {
return false;
}
} | @Test
public void testFindNotFound() throws Exception {
final BoxFileidProvider fileid = new BoxFileidProvider(session);
assertFalse(new BoxFindFeature(session, fileid).find(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file))));
} |
public static SqlSelect parseSelect(String statement) {
SqlNode sqlNode = null;
try {
sqlNode = getCalciteParser(statement).parseQuery();
} catch (SqlParseException e) {
LOG.error("Statements can not be parsed. {} \n {}", statement, e);
throw new ParseException("Statements can not be parsed.", e);
}
if (sqlNode instanceof SqlSelect) {
return (SqlSelect) sqlNode;
} else {
throw new ParseException("Only select statements can be parsed.");
}
} | @Test
public void testCalciteRelNode() {
SqlSelect parse =
TransformParser.parseSelect(
"select SUBSTR(id, 1) as uniq_id, * from tb where id is not null");
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true);
Map<String, Object> operand = new HashMap<>();
operand.put("tableName", "tb");
operand.put("columns", CUSTOMERS_SCHEMA.getColumns());
org.apache.calcite.schema.Schema schema =
TransformSchemaFactory.INSTANCE.create(
rootSchema.plus(), "default_schema", operand);
rootSchema.add("default_schema", schema);
SqlTypeFactoryImpl factory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
CalciteCatalogReader calciteCatalogReader =
new CalciteCatalogReader(
rootSchema,
rootSchema.path("default_schema"),
factory,
new CalciteConnectionConfigImpl(new Properties()));
TransformSqlOperatorTable transformSqlOperatorTable = TransformSqlOperatorTable.instance();
SqlValidator validator =
SqlValidatorUtil.newValidator(
transformSqlOperatorTable,
calciteCatalogReader,
factory,
SqlValidator.Config.DEFAULT.withIdentifierExpansion(true));
SqlNode validateSqlNode = validator.validate(parse);
RexBuilder rexBuilder = new RexBuilder(factory);
HepProgramBuilder builder = new HepProgramBuilder();
HepPlanner planner = new HepPlanner(builder.build());
RelOptCluster cluster = RelOptCluster.create(planner, rexBuilder);
SqlToRelConverter.Config config = SqlToRelConverter.config().withTrimUnusedFields(false);
SqlToRelConverter sqlToRelConverter =
new SqlToRelConverter(
null,
validator,
calciteCatalogReader,
cluster,
StandardConvertletTable.INSTANCE,
config);
RelRoot relRoot = sqlToRelConverter.convertQuery(validateSqlNode, false, true);
relRoot = relRoot.withRel(sqlToRelConverter.flattenTypes(relRoot.rel, true));
RelBuilder relBuilder = config.getRelBuilderFactory().create(cluster, null);
relRoot = relRoot.withRel(RelDecorrelator.decorrelateQuery(relRoot.rel, relBuilder));
RelNode relNode = relRoot.rel;
Assertions.assertThat(parse.getSelectList().toString())
.isEqualTo("SUBSTR(`tb`.`id`, 1) AS `uniq_id`, `tb`.`id`, `tb`.`order_id`");
Assertions.assertThat(parse.getWhere().toString()).isEqualTo("`tb`.`id` IS NOT NULL");
Assertions.assertThat(validateSqlNode.toString().replaceAll("\r\n", "\n"))
.isEqualTo(
"SELECT SUBSTR(`tb`.`id`, 1) AS `uniq_id`, `tb`.`id`, `tb`.`order_id`\n"
+ "FROM `default_schema`.`tb` AS `tb`\n"
+ "WHERE `tb`.`id` IS NOT NULL");
} |
public DirectoryEntry lookUp(
File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException {
checkNotNull(path);
checkNotNull(options);
DirectoryEntry result = lookUp(workingDirectory, path, options, 0);
if (result == null) {
// an intermediate file in the path did not exist or was not a directory
throw new NoSuchFileException(path.toString());
}
return result;
} | @Test
public void testLookup_relative_emptyPath() throws IOException {
assertExists(lookup(""), "/", "work");
} |
public boolean stripLastChar() {
return stripLastChar;
} | @Test
public void testStripLastChar() {
String testString = "abc"; // encoded as 1|00000|00, 001|00010, exactly two bytes
MetaStringEncoder encoder = new MetaStringEncoder('_', '$');
MetaString encodedMetaString = encoder.encode(testString);
assertFalse(encodedMetaString.stripLastChar());
testString =
"abcde"; // encoded as 1|00000|00, 001|00010, 00011|001, 00xxxxxx, stripped last char
encodedMetaString = encoder.encode(testString);
assertTrue(encodedMetaString.stripLastChar());
} |
@Override
public List<String> getInsertParamsValue() {
List<SQLInsertStatement.ValuesClause> valuesList = ast.getValuesList();
List<String> list = new ArrayList<>();
for (SQLInsertStatement.ValuesClause m : valuesList) {
String values = m.toString().replace("VALUES", "").trim();
// when all params is constant or have default value, the length of values less than 1
if (values.length() > 1) {
values = values.substring(1, values.length() - 1);
}
list.add(values);
}
return list;
} | @Test
public void testGetInsertParamsValue() {
String sql = "INSERT INTO t(a) VALUES (?)";
SQLStatement ast = getSQLStatement(sql);
SqlServerInsertRecognizer recognizer = new SqlServerInsertRecognizer(sql, ast);
Assertions.assertEquals("?", recognizer.getInsertParamsValue().get(0));
String sql_2 = "INSERT INTO t(a) VALUES ()";
SQLStatement ast_2 = getSQLStatement(sql_2);
SqlServerInsertRecognizer recognizer_2 = new SqlServerInsertRecognizer(sql_2, ast_2);
Assertions.assertEquals("", recognizer_2.getInsertParamsValue().get(0));
String sql_3 = "INSERT INTO T1 DEFAULT VALUES";
SQLStatement ast_3 = getSQLStatement(sql_3);
SqlServerInsertRecognizer recognizer_3 = new SqlServerInsertRecognizer(sql_3, ast_3);
Assertions.assertTrue(recognizer_3.getInsertParamsValue().isEmpty());
} |
public static String getContent(String content) {
int index = content.indexOf(WORD_SEPARATOR);
if (index == -1) {
throw new IllegalArgumentException("content does not contain separator");
}
return content.substring(index + 1);
} | @Test
void testGetContent() {
String content = "aa" + WORD_SEPARATOR + "bbb";
String content1 = ContentUtils.getContent(content);
assertEquals("bbb", content1);
} |
public static AgentRuntimeInfo fromServer(Agent agent, boolean registeredAlready, String location,
Long freeDiskSpace, String operatingSystem ) {
if (isEmpty(location)) {
throw new RuntimeException("Agent should not register without installation path.");
}
AgentStatus status = Pending;
if (isLocalIpAddress(agent.getIpaddress()) || registeredAlready) {
status = AgentStatus.Idle;
}
AgentRuntimeStatus runtimeStatus = status.getRuntimeStatus();
AgentIdentifier identifier = agent.getAgentIdentifier();
AgentRuntimeInfo runtimeInfo = new AgentRuntimeInfo(identifier, runtimeStatus, location, null);
runtimeInfo.setUsableSpace(freeDiskSpace);
runtimeInfo.operatingSystemName = operatingSystem;
return runtimeInfo;
} | @Test
public void shouldThrowOnEmptyLocation() {
assertThatThrownBy(() -> AgentRuntimeInfo.fromServer(new Agent("uuid", "localhost", "127.0.0.1"), false, "", 0L, "linux"))
.isExactlyInstanceOf(RuntimeException.class)
.hasMessageContaining("Agent should not register without installation path");
} |
public TolerantLongComparison isWithin(long tolerance) {
return new TolerantLongComparison() {
@Override
public void of(long expected) {
Long actual = LongSubject.this.actual;
checkNotNull(
actual, "actual value cannot be null. tolerance=%s expected=%s", tolerance, expected);
checkTolerance(tolerance);
if (!equalWithinTolerance(actual, expected, tolerance)) {
failWithoutActual(
fact("expected", Long.toString(expected)),
butWas(),
fact("outside tolerance", Long.toString(tolerance)));
}
}
};
} | @Test
public void isWithinOf() {
assertThat(20000L).isWithin(0L).of(20000L);
assertThat(20000L).isWithin(1L).of(20000L);
assertThat(20000L).isWithin(10000L).of(20000L);
assertThat(20000L).isWithin(10000L).of(30000L);
assertThat(Long.MIN_VALUE).isWithin(1L).of(Long.MIN_VALUE + 1);
assertThat(Long.MAX_VALUE).isWithin(1L).of(Long.MAX_VALUE - 1);
assertThat(Long.MAX_VALUE / 2).isWithin(Long.MAX_VALUE).of(-Long.MAX_VALUE / 2);
assertThat(-Long.MAX_VALUE / 2).isWithin(Long.MAX_VALUE).of(Long.MAX_VALUE / 2);
assertThatIsWithinFails(20000L, 9999L, 30000L);
assertThatIsWithinFails(20000L, 10000L, 30001L);
assertThatIsWithinFails(Long.MIN_VALUE, 0L, Long.MAX_VALUE);
assertThatIsWithinFails(Long.MAX_VALUE, 0L, Long.MIN_VALUE);
assertThatIsWithinFails(Long.MIN_VALUE, 1L, Long.MIN_VALUE + 2);
assertThatIsWithinFails(Long.MAX_VALUE, 1L, Long.MAX_VALUE - 2);
// Don't fall for rollover
assertThatIsWithinFails(Long.MIN_VALUE, 1L, Long.MAX_VALUE);
assertThatIsWithinFails(Long.MAX_VALUE, 1L, Long.MIN_VALUE);
} |
public static boolean transferLeadership(final ThreadId id, final long logIndex) {
final Replicator r = (Replicator) id.lock();
if (r == null) {
return false;
}
// dummy is unlock in _transfer_leadership
return r.transferLeadership(logIndex);
} | @Test
public void testTransferLeadership() {
final Replicator r = getReplicator();
this.id.unlock();
assertEquals(0, r.getTimeoutNowIndex());
assertTrue(Replicator.transferLeadership(this.id, 11));
assertEquals(11, r.getTimeoutNowIndex());
assertNull(r.getTimeoutNowInFly());
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.