focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public void customize(ServiceInstance serviceInstance, ApplicationModel applicationModel) {
MetadataInfo metadataInfo = serviceInstance.getServiceMetadata();
if (metadataInfo == null || CollectionUtils.isEmptyMap(metadataInfo.getServices())) {
return;
}
// try to load instance params that do not appear in service urls
// TODO, duplicate snippet in ApplicationConfig
Map<String, String> extraParameters = Collections.emptyMap();
Set<InfraAdapter> adapters =
applicationModel.getExtensionLoader(InfraAdapter.class).getSupportedExtensionInstances();
if (CollectionUtils.isNotEmpty(adapters)) {
Map<String, String> inputParameters = new HashMap<>();
inputParameters.put(APPLICATION_KEY, applicationModel.getApplicationName());
for (InfraAdapter adapter : adapters) {
extraParameters = adapter.getExtraAttributes(inputParameters);
}
}
serviceInstance.getMetadata().putAll(extraParameters);
if (CollectionUtils.isNotEmptyMap(metadataInfo.getInstanceParams())) {
serviceInstance.getMetadata().putAll(metadataInfo.getInstanceParams());
}
} | @Test
void testCustomizeWithIncludeFilters() {
ApplicationModel applicationModel = spy(ApplicationModel.defaultModel());
ApplicationConfig applicationConfig = new ApplicationConfig("aa");
doReturn(applicationConfig).when(applicationModel).getCurrentConfig();
DefaultServiceInstance serviceInstance1 =
new DefaultServiceInstance("ServiceInstanceMetadataCustomizerTest", applicationModel);
MetadataInfo metadataInfo = new MetadataInfo();
metadataInfo.addService(
URL.valueOf(
"tri://127.1.1.1:50052/org.apache.dubbo.demo.GreetingService?application=ServiceInstanceMetadataCustomizerTest&env=test&side=provider&group=test"));
serviceInstance1.setServiceMetadata(metadataInfo);
serviceInstanceMetadataCustomizer.customize(serviceInstance1, applicationModel);
Assertions.assertEquals(1, serviceInstance1.getMetadata().size());
Assertions.assertEquals("provider", serviceInstance1.getMetadata(SIDE_KEY));
Assertions.assertNull(serviceInstance1.getMetadata("env"));
Assertions.assertNull(serviceInstance1.getMetadata("application"));
} |
@Operation(summary = "queryUiPluginsByType", description = "QUERY_UI_PLUGINS_BY_TYPE")
@Parameters({
@Parameter(name = "pluginType", description = "pluginType", required = true, schema = @Schema(implementation = PluginType.class)),
})
@GetMapping(value = "/query-by-type")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(QUERY_PLUGINS_ERROR)
public Result queryUiPluginsByType(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "pluginType") PluginType pluginType) {
Map<String, Object> result = uiPluginService.queryUiPluginsByType(pluginType);
return returnDataList(result);
} | @Test
public void testQueryUiPluginsByType() throws Exception {
when(uiPluginService.queryUiPluginsByType(any(PluginType.class)))
.thenReturn(uiPluginServiceResult);
final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("pluginType", String.valueOf(pluginType));
final MvcResult mvcResult = mockMvc.perform(get("/ui-plugins/query-by-type")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
final Result actualResponseContent =
JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertThat(actualResponseContent.toString()).isEqualTo(expectResponseContent.toString());
} |
@Override
public void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint) {
String name = getComponentName(endpoint);
span.setComponent(CAMEL_COMPONENT + name);
String scheme = getSchemeName(endpoint);
span.setTag(TagConstants.URL_SCHEME, scheme);
// Including the endpoint URI provides access to any options that may
// have been provided, for subsequent analysis
String uri = endpoint.toString(); // toString will sanitize
span.setTag("camel.uri", uri);
span.setTag(TagConstants.URL_PATH, stripSchemeAndOptions(endpoint));
String query = URISupport.extractQuery(uri);
if (query != null) {
span.setTag(TagConstants.URL_QUERY, query);
}
// enrich with server location details
if (endpoint instanceof EndpointServiceLocation ela) {
String adr = ela.getServiceUrl();
if (adr != null) {
span.setTag(TagConstants.SERVER_ADDRESS, adr);
}
String ap = ela.getServiceProtocol();
if (ap != null) {
span.setTag(TagConstants.SERVER_PROTOCOL, ap);
}
Map<String, String> map = ela.getServiceMetadata();
if (map != null) {
String un = map.get("username");
if (un != null) {
span.setTag(TagConstants.USER_NAME, un);
}
String id = map.get("clientId");
if (id != null) {
span.setTag(TagConstants.USER_ID, id);
}
String region = map.get("region");
if (region != null) {
span.setTag(TagConstants.SERVER_REGION, region);
}
}
}
} | @Test
public void testPre() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn(TEST_URI);
Mockito.when(endpoint.toString()).thenReturn(TEST_URI);
SpanDecorator decorator = new AbstractSpanDecorator() {
@Override
public String getComponent() {
return null;
}
@Override
public String getComponentClassName() {
return null;
}
};
MockSpanAdapter span = new MockSpanAdapter();
decorator.pre(span, null, endpoint);
assertEquals("camel-test", span.tags().get(TagConstants.COMPONENT));
assertEquals("test", span.tags().get(TagConstants.URL_SCHEME));
assertEquals("uri", span.tags().get(TagConstants.URL_PATH));
assertEquals("query=hello", span.tags().get(TagConstants.URL_QUERY));
} |
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(
ServerCall<ReqT, RespT> call,
final Metadata requestHeaders,
ServerCallHandler<ReqT, RespT> next) {
// handle header
if (requestHeaders != null) {
extractTrafficTagFromCarrier(requestHeaders);
}
return next.startCall(new SimpleForwardingServerCall<ReqT, RespT>(call) {
}, requestHeaders);
} | @Test
public void testInterceptCall() {
// Configure parameters required by the grpc interceptor
ServerCall call = Mockito.mock(ServerCall.class);
ServerCallHandler handler = Mockito.mock(ServerCallHandler.class);
Metadata metadata;
Map<String, List<String>> expectTag;
Key<String> name = Key.of("name", Metadata.ASCII_STRING_MARSHALLER);
Key<String> id = Key.of("id", Metadata.ASCII_STRING_MARSHALLER);
// metadata is null
interceptor.interceptCall(call, null, handler);
Assert.assertNull(TrafficUtils.getTrafficTag());
// The metadata contains traffic labels that all match the matching rules, and the value is not null.
metadata = new Metadata();
metadata.put(id, "001");
metadata.put(name, "test001");
expectTag = new HashMap<>();
expectTag.put("id", Collections.singletonList("001"));
expectTag.put("name", Collections.singletonList("test001"));
interceptor.interceptCall(call, metadata, handler);
Assert.assertEquals(TrafficUtils.getTrafficTag().getTag(), expectTag);
// metadata contains null traffic tag
metadata = new Metadata();
metadata.put(name, "null");
metadata.put(id, "001");
expectTag = new HashMap<>();
expectTag.put("id", Collections.singletonList("001"));
expectTag.put("name", null);
interceptor.interceptCall(call, metadata, handler);
Assert.assertEquals(TrafficUtils.getTrafficTag().getTag(), expectTag);
} |
public int generate(Class<? extends CustomResource> crdClass, Writer out) throws IOException {
ObjectNode node = nf.objectNode();
Crd crd = crdClass.getAnnotation(Crd.class);
if (crd == null) {
err(crdClass + " is not annotated with @Crd");
} else {
node.put("apiVersion", "apiextensions.k8s.io/" + crdApiVersion)
.put("kind", "CustomResourceDefinition")
.putObject("metadata")
.put("name", crd.spec().names().plural() + "." + crd.spec().group());
if (!labels.isEmpty()) {
((ObjectNode) node.get("metadata"))
.putObject("labels")
.setAll(labels.entrySet().stream()
.collect(Collectors.<Map.Entry<String, String>, String, JsonNode, LinkedHashMap<String, JsonNode>>toMap(
Map.Entry::getKey,
e -> new TextNode(
e.getValue()
.replace("%group%", crd.spec().group())
.replace("%plural%", crd.spec().names().plural())
.replace("%singular%", crd.spec().names().singular())),
(x, y) -> x,
LinkedHashMap::new)));
}
node.set("spec", buildSpec(crdApiVersion, crd.spec(), crdClass));
}
mapper.writeValue(out, node);
return numErrors;
} | @Test
void versionedTest() throws IOException {
CrdGenerator crdGenerator = new CrdGenerator(KubeVersion.V1_16_PLUS, ApiVersion.V1, CrdGenerator.YAML_MAPPER,
emptyMap(), crdGeneratorReporter, emptyList(), null, null,
new CrdGenerator.NoneConversionStrategy(), null);
StringWriter w = new StringWriter();
crdGenerator.generate(VersionedExampleCrd.class, w);
String s = w.toString();
assertTrue(errors.isEmpty(), "CrdGenerator should not report any errors: " + errors);
assertEquals(CrdTestUtils.readResource("versionedTest.yaml"), s);
} |
@Override
public Object getInternalProperty(String key) {
return metaData.get(key);
} | @Test
void test() {
RegistryConfig registryConfig = new RegistryConfig();
registryConfig.setAddress("127.0.0.1");
registryConfig.setPort(2181);
String prefix = "dubbo.registry";
ConfigConfigurationAdapter configConfigurationAdapter = new ConfigConfigurationAdapter(registryConfig, prefix);
Assertions.assertEquals(configConfigurationAdapter.getInternalProperty(prefix + "." + "address"), "127.0.0.1");
Assertions.assertEquals(configConfigurationAdapter.getInternalProperty(prefix + "." + "port"), "2181");
} |
@Override
public String toString() {
StringBuilder b = new StringBuilder();
if (StringUtils.isNotBlank(protocol)) {
b.append(protocol);
b.append("://");
}
if (StringUtils.isNotBlank(host)) {
b.append(host);
}
if (!isPortDefault() && port != -1) {
b.append(':');
b.append(port);
}
if (StringUtils.isNotBlank(path)) {
// If no scheme/host/port, leave the path as is
if (b.length() > 0 && !path.startsWith("/")) {
b.append('/');
}
b.append(encodePath(path));
}
if (queryString != null && !queryString.isEmpty()) {
b.append(queryString.toString());
}
if (fragment != null) {
b.append("#");
b.append(encodePath(fragment));
}
return b.toString();
} | @Test
public void testHttpProtocolNonDefaultPort() {
s = "http://www.example.com:81/blah";
t = "http://www.example.com:81/blah";
assertEquals(t, new HttpURL(s).toString());
} |
@Udf
public Integer length(@UdfParameter final String jsonArray) {
if (jsonArray == null) {
return null;
}
final JsonNode node = UdfJsonMapper.parseJson(jsonArray);
if (node.isMissingNode() || !node.isArray()) {
return null;
}
return node.size();
} | @Test
public void shouldReturnNullForString() {
// When:
final Integer result = udf.length("\"abc\"");
// Then:
assertNull(result);
} |
@Override
public SignParameters extract(final HttpRequest httpRequest) {
String version = httpRequest.getHeaders().getFirst(Constants.VERSION);
if (Objects.isNull(version)) {
return SignParameters.VERSION_ERROR_PARAMETERS;
}
SignParameterExtractor extractor = VERSION_EXTRACTOR.get(version);
if (Objects.isNull(extractor)) {
return SignParameters.VERSION_ERROR_PARAMETERS;
}
return extractor.extract(httpRequest);
} | @Test
public void testVersionTwoExtract() {
Map<String, String> map = ImmutableMap.of(
"timestamp", "1660659201000",
"appKey", "BD7980F5688A4DE6BCF1B5327FE07F5C",
"sign", "BF485842D2C08A3378308BA9992A309F",
"alg", "MD5");
String parameters = Base64.getEncoder().encodeToString(JsonUtils.toJson(map).getBytes(StandardCharsets.UTF_8));
String token = parameters + ".BF485842D2C08A3378308BA9992A309F";
HttpRequest httpRequest = MockServerHttpRequest
.get("http://localhost:9195/springcloud/class/annotation/get?id=1&id=1")
.header(HttpHeaders.AUTHORIZATION, token)
.header("version", VERSION_2)
.build();
SignParameters signParameters = new SignParameters(VERSION_2, "BD7980F5688A4DE6BCF1B5327FE07F5C", "1660659201000",
"BF485842D2C08A3378308BA9992A309F", httpRequest.getURI(), "MD5");
signParameters.setParameters(parameters);
assertThat(extractor.extract(httpRequest).toString(), is(signParameters.toString()));
} |
@VisibleForTesting
Path getJarArtifact() throws IOException {
Optional<String> classifier = Optional.empty();
Path buildDirectory = Paths.get(project.getBuild().getDirectory());
Path outputDirectory = buildDirectory;
// Read <classifier> and <outputDirectory> from maven-jar-plugin.
Plugin jarPlugin = project.getPlugin("org.apache.maven.plugins:maven-jar-plugin");
if (jarPlugin != null) {
for (PluginExecution execution : jarPlugin.getExecutions()) {
if ("default-jar".equals(execution.getId())) {
Xpp3Dom configuration = (Xpp3Dom) execution.getConfiguration();
classifier = getChildValue(configuration, "classifier");
Optional<String> directoryString = getChildValue(configuration, "outputDirectory");
if (directoryString.isPresent()) {
outputDirectory = project.getBasedir().toPath().resolve(directoryString.get());
}
break;
}
}
}
String finalName = project.getBuild().getFinalName();
String suffix = ".jar";
Optional<Xpp3Dom> bootConfiguration = getSpringBootRepackageConfiguration();
if (bootConfiguration.isPresent()) {
log(LogEvent.lifecycle("Spring Boot repackaging (fat JAR) detected; using the original JAR"));
// Spring renames original JAR only when replacing it, so check if the paths are clashing.
Optional<String> bootFinalName = getChildValue(bootConfiguration.get(), "finalName");
Optional<String> bootClassifier = getChildValue(bootConfiguration.get(), "classifier");
boolean sameDirectory = outputDirectory.equals(buildDirectory);
// If Boot <finalName> is undefined, it uses the default project <finalName>.
boolean sameFinalName = !bootFinalName.isPresent() || finalName.equals(bootFinalName.get());
boolean sameClassifier = classifier.equals(bootClassifier);
if (sameDirectory && sameFinalName && sameClassifier) {
suffix = ".jar.original";
}
}
String noSuffixJarName = finalName + (classifier.isPresent() ? '-' + classifier.get() : "");
Path jarPath = outputDirectory.resolve(noSuffixJarName + suffix);
log(LogEvent.debug("Using JAR: " + jarPath));
if (".jar".equals(suffix)) {
return jarPath;
}
// "*" in "java -cp *" doesn't work if JAR doesn't end with ".jar". Copy the JAR with a new name
// ending with ".jar".
Path tempDirectory = tempDirectoryProvider.newDirectory();
Path newJarPath = tempDirectory.resolve(noSuffixJarName + ".original.jar");
Files.copy(jarPath, newJarPath);
return newJarPath;
} | @Test
public void testGetJarArtifact_executionIdNotMatched() throws IOException {
when(mockBuild.getDirectory()).thenReturn(Paths.get("/foo/bar").toString());
when(mockBuild.getFinalName()).thenReturn("helloworld-1");
when(mockMavenProject.getPlugin("org.apache.maven.plugins:maven-jar-plugin"))
.thenReturn(mockPlugin);
when(mockPlugin.getExecutions()).thenReturn(Arrays.asList(mockPluginExecution));
when(mockPluginExecution.getId()).thenReturn("no-id-match");
Mockito.lenient().when(mockPluginExecution.getConfiguration()).thenReturn(pluginConfiguration);
addXpp3DomChild(pluginConfiguration, "outputDirectory", "/should/ignore");
addXpp3DomChild(pluginConfiguration, "classifier", "a-class");
assertThat(mavenProjectProperties.getJarArtifact())
.isEqualTo(Paths.get("/foo/bar/helloworld-1.jar"));
} |
public boolean remove(final Object value)
{
return remove((int)value);
} | @Test
void removingAnElementFromAnEmptyListDoesNothing()
{
assertFalse(testSet.remove(0));
} |
public static <T> T toBean(Object source, Class<T> clazz) {
return toBean(source, clazz, null);
} | @Test
public void toBeanTest() {
final SubPerson person = new SubPerson();
person.setAge(14);
person.setOpenid("11213232");
person.setName("测试A11");
person.setSubName("sub名字");
final Map<?, ?> map = BeanUtil.toBean(person, Map.class);
assertEquals("测试A11", map.get("name"));
assertEquals(14, map.get("age"));
assertEquals("11213232", map.get("openid"));
// static属性应被忽略
assertFalse(map.containsKey("SUBNAME"));
} |
public static <T> Either<String, T> resolveImportDMN(Import importElement, Collection<T> dmns, Function<T, QName> idExtractor) {
final String importerDMNNamespace = ((Definitions) importElement.getParent()).getNamespace();
final String importerDMNName = ((Definitions) importElement.getParent()).getName();
final String importNamespace = importElement.getNamespace();
final String importName = importElement.getName();
final String importLocationURI = importElement.getLocationURI(); // This is optional
final String importModelName = importElement.getAdditionalAttributes().get(TImport.MODELNAME_QNAME);
LOGGER.debug("Resolving an Import in DMN Model with name={} and namespace={}. " +
"Importing a DMN model with namespace={} name={} locationURI={}, modelName={}",
importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName);
List<T> matchingDMNList = dmns.stream()
.filter(m -> idExtractor.apply(m).getNamespaceURI().equals(importNamespace))
.toList();
if (matchingDMNList.size() == 1) {
T located = matchingDMNList.get(0);
// Check if the located DMN Model in the NS, correspond for the import `drools:modelName`.
if (importModelName == null || idExtractor.apply(located).getLocalPart().equals(importModelName)) {
LOGGER.debug("DMN Model with name={} and namespace={} successfully imported a DMN " +
"with namespace={} name={} locationURI={}, modelName={}",
importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName);
return Either.ofRight(located);
} else {
LOGGER.error("DMN Model with name={} and namespace={} can't import a DMN with namespace={}, name={}, modelName={}, " +
"located within namespace only {} but does not match for the actual modelName",
importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, idExtractor.apply(located));
return Either.ofLeft(String.format(
"DMN Model with name=%s and namespace=%s can't import a DMN with namespace=%s, name=%s, modelName=%s, " +
"located within namespace only %s but does not match for the actual modelName",
importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, idExtractor.apply(located)));
}
} else {
List<T> usingNSandName = matchingDMNList.stream()
.filter(dmn -> idExtractor.apply(dmn).getLocalPart().equals(importModelName))
.toList();
if (usingNSandName.size() == 1) {
LOGGER.debug("DMN Model with name={} and namespace={} successfully imported a DMN " +
"with namespace={} name={} locationURI={}, modelName={}",
importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName);
return Either.ofRight(usingNSandName.get(0));
} else if (usingNSandName.isEmpty()) {
LOGGER.error("DMN Model with name={} and namespace={} failed to import a DMN with namespace={} name={} locationURI={}, modelName={}.",
importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName);
return Either.ofLeft(String.format(
"DMN Model with name=%s and namespace=%s failed to import a DMN with namespace=%s name=%s locationURI=%s, modelName=%s. ",
importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName));
} else {
LOGGER.error("DMN Model with name={} and namespace={} detected a collision ({} elements) trying to import a DMN with namespace={} name={} locationURI={}, modelName={}",
importerDMNName, importerDMNNamespace, usingNSandName.size(), importNamespace, importName, importLocationURI, importModelName);
return Either.ofLeft(String.format(
"DMN Model with name=%s and namespace=%s detected a collision trying to import a DMN with %s namespace, " +
"%s name and modelName %s. There are %s DMN files with the same namespace in your project. " +
"Please change the DMN namespaces and make them unique to fix this issue.",
importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, usingNSandName.size()));
}
}
} | @Test
void nSandModelName() {
final Import i = makeImport("ns1", null, "m1");
final List<QName> available = Arrays.asList(new QName("ns1", "m1"),
new QName("ns2", "m2"),
new QName("ns3", "m3"));
final Either<String, QName> result = ImportDMNResolverUtil.resolveImportDMN(i, available, Function.identity());
assertThat(result.isRight()).isTrue();
assertThat(result.getOrElse(null)).isEqualTo(new QName("ns1", "m1"));
} |
public static void mergeMap(boolean decrypt, Map<String, Object> config) {
merge(decrypt, config);
} | @Test
public void testMap_allowEmptyStringOverwrite() {
Map<String, Object> testMap = new HashMap<>();
testMap.put("key", "${TEST.emptyString: value}");
CentralizedManagement.mergeMap(true, testMap);
Assert.assertEquals("", testMap.get("key"));
} |
public static InetSocketAddress parseAddress(String address, int defaultPort) {
return parseAddress(address, defaultPort, false);
} | @Test
void shouldParseAddressForIPv6WithPort() {
InetSocketAddress socketAddress = AddressUtils.parseAddress("[1abc:2abc:3abc::5ABC:6abc]:8080", 80);
assertThat(socketAddress.isUnresolved()).isFalse();
assertThat(socketAddress.getAddress().getHostAddress()).isEqualTo("1abc:2abc:3abc:0:0:0:5abc:6abc");
assertThat(socketAddress.getPort()).isEqualTo(8080);
assertThat(socketAddress.getHostString()).isEqualTo("1abc:2abc:3abc:0:0:0:5abc:6abc");
} |
static public Entry buildMenuStructure(String xml) {
final Reader reader = new StringReader(xml);
return buildMenuStructure(reader);
} | @Test
public void givenXmlWithChildEntryWithName_createsStructureWithNamedChildEntry() {
String xmlWithoutContent = "<FreeplaneUIEntries><Entry name='entry'/></FreeplaneUIEntries>";
Entry builtMenuStructure = XmlEntryStructureBuilder.buildMenuStructure(xmlWithoutContent);
Entry menuStructureWithChildEntry = new Entry();
final Entry childEntry = new Entry();
childEntry.setName("entry");
menuStructureWithChildEntry.addChild(childEntry);
assertThat(builtMenuStructure, equalTo(menuStructureWithChildEntry));
} |
@Udf(description = "Splits a string into an array of substrings based on a delimiter.")
public List<String> split(
@UdfParameter(
description = "The string to be split. If NULL, then function returns NULL.")
final String string,
@UdfParameter(
description = "The delimiter to split a string by. If NULL, then function returns NULL.")
final String delimiter) {
if (string == null || delimiter == null) {
return null;
}
// Java split() accepts regular expressions as a delimiter, but the behavior of this UDF split()
// is to accept only literal strings. This method uses Guava Splitter instead, which does not
// accept any regex pattern. This is to avoid a confusion to users when splitting by regex
// special characters, such as '.' and '|'.
try {
// Guava Splitter does not accept empty delimiters. Use the Java split() method instead.
if (delimiter.isEmpty()) {
return Arrays.asList(EMPTY_DELIMITER.split(string));
} else {
return Splitter.on(delimiter).splitToList(string);
}
} catch (final Exception e) {
throw new KsqlFunctionException(
String.format("Invalid delimiter '%s' in the split() function.", delimiter), e);
}
} | @Test
public void shouldSplitAllBytesByGivenAnEmptyDelimiter() {
final ByteBuffer xBytes = ByteBuffer.wrap(new byte[]{'x'});
final ByteBuffer dashBytes = ByteBuffer.wrap(new byte[]{'-'});
final ByteBuffer yBytes = ByteBuffer.wrap(new byte[]{'y'});
assertThat(splitUdf.split(EMPTY_BYTES, EMPTY_BYTES), contains(EMPTY_BYTES));
assertThat(splitUdf.split(X_DASH_Y_BYTES, EMPTY_BYTES), contains(xBytes, dashBytes, yBytes));
} |
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
if (decoupleCloseAndGoAway) {
ctx.close(promise);
return;
}
promise = promise.unvoid();
// Avoid NotYetConnectedException and avoid sending before connection preface
if (!ctx.channel().isActive() || !prefaceSent()) {
ctx.close(promise);
return;
}
// If the user has already sent a GO_AWAY frame they may be attempting to do a graceful shutdown which requires
// sending multiple GO_AWAY frames. We should only send a GO_AWAY here if one has not already been sent. If
// a GO_AWAY has been sent we send a empty buffer just so we can wait to close until all other data has been
// flushed to the OS.
// https://github.com/netty/netty/issues/5307
ChannelFuture f = connection().goAwaySent() ? ctx.write(EMPTY_BUFFER) : goAway(ctx, null, ctx.newPromise());
ctx.flush();
doGracefulShutdown(ctx, f, promise);
} | @Test
public void clientChannelClosedDoesNotSendGoAwayBeforePreface() throws Exception {
when(connection.isServer()).thenReturn(false);
when(channel.isActive()).thenReturn(false);
handler = newHandler();
when(channel.isActive()).thenReturn(true);
handler.close(ctx, promise);
verifyZeroInteractions(frameWriter);
} |
public static Expression literalExprFrom(Enum<?> input) {
return input == null ? new NullLiteralExpr() :
new NameExpr(input.getClass().getCanonicalName() + "." + input.name());
} | @Test
void literalExprFromDataType() {
Map<DATA_TYPE, String> inputMap = new HashMap<>();
inputMap.put(DATA_TYPE.STRING, "TEST");
inputMap.put(DATA_TYPE.INTEGER, "1");
inputMap.put(DATA_TYPE.FLOAT, "2.0");
inputMap.put(DATA_TYPE.DOUBLE, "3.0");
inputMap.put(DATA_TYPE.BOOLEAN, "true");
inputMap.put(DATA_TYPE.DATE, "2021-06-01");
inputMap.put(DATA_TYPE.TIME, "11:21:31");
inputMap.put(DATA_TYPE.DATE_TIME, "2021-06-01T11:21:31");
inputMap.put(DATA_TYPE.DATE_DAYS_SINCE_0, "10");
inputMap.put(DATA_TYPE.DATE_DAYS_SINCE_1960, "20");
inputMap.put(DATA_TYPE.DATE_DAYS_SINCE_1970, "30");
inputMap.put(DATA_TYPE.DATE_DAYS_SINCE_1980, "40");
inputMap.put(DATA_TYPE.TIME_SECONDS, "50");
inputMap.put(DATA_TYPE.DATE_TIME_SECONDS_SINCE_0, "60");
inputMap.put(DATA_TYPE.DATE_TIME_SECONDS_SINCE_1960, "70");
inputMap.put(DATA_TYPE.DATE_TIME_SECONDS_SINCE_1970, "80");
inputMap.put(DATA_TYPE.DATE_TIME_SECONDS_SINCE_1980, "90");
for (Map.Entry<DATA_TYPE, String> input : inputMap.entrySet()) {
assertThat(literalExprFrom(input.getKey(), null)).isInstanceOf(NullLiteralExpr.class);
Expression output = literalExprFrom(input.getKey(), input.getValue());
switch (input.getKey()) {
case STRING:
assertThat(output).isInstanceOf(StringLiteralExpr.class);
break;
case INTEGER:
assertThat(output).isInstanceOf(IntegerLiteralExpr.class);
break;
case DOUBLE:
case FLOAT:
assertThat(output).isInstanceOf(DoubleLiteralExpr.class);
break;
case BOOLEAN:
assertThat(output).isInstanceOf(BooleanLiteralExpr.class);
break;
case DATE:
case TIME:
case DATE_TIME:
assertThat(output).isInstanceOf(MethodCallExpr.class);
break;
case DATE_DAYS_SINCE_0:
case DATE_DAYS_SINCE_1960:
case DATE_DAYS_SINCE_1970:
case DATE_DAYS_SINCE_1980:
case TIME_SECONDS:
case DATE_TIME_SECONDS_SINCE_0:
case DATE_TIME_SECONDS_SINCE_1960:
case DATE_TIME_SECONDS_SINCE_1970:
case DATE_TIME_SECONDS_SINCE_1980:
assertThat(output).isInstanceOf(LongLiteralExpr.class);
}
}
assertThatIllegalArgumentException().isThrownBy(() -> literalExprFrom(null, null));
assertThatIllegalArgumentException().isThrownBy(() -> literalExprFrom(null, "test"));
} |
@Nullable static String method(Invocation invocation) {
String methodName = invocation.getMethodName();
if ("$invoke".equals(methodName) || "$invokeAsync".equals(methodName)) {
Object[] arguments = invocation.getArguments();
if (arguments != null && arguments.length > 0 && arguments[0] instanceof String) {
methodName = (String) arguments[0];
} else {
methodName = null;
}
}
return methodName != null && !methodName.isEmpty() ? methodName : null;
} | @Test void method_invoke_nonStringArg() {
when(invocation.getMethodName()).thenReturn("$invoke");
when(invocation.getArguments()).thenReturn(new Object[] {new Object()});
assertThat(DubboParser.method(invocation)).isNull();
} |
@Override
public KsqlSecurityContext provide(final ApiSecurityContext apiSecurityContext) {
final Optional<KsqlPrincipal> principal = apiSecurityContext.getPrincipal();
final Optional<String> authHeader = apiSecurityContext.getAuthHeader();
final List<Entry<String, String>> requestHeaders = apiSecurityContext.getRequestHeaders();
// A user context is not necessary if a user context provider is not present or the user
// principal is missing. If a failed authentication attempt results in a missing principle,
// then the authentication plugin will have already failed the connection before calling
// this method. Therefore, if we've reached this method with a missing principle, then this
// must be a valid connection that does not require authentication.
// For these cases, we create a default service context that the missing user can use.
final boolean requiresUserContext =
securityExtension != null
&& securityExtension.getUserContextProvider().isPresent()
&& principal.isPresent();
if (!requiresUserContext) {
return new KsqlSecurityContext(
principal,
defaultServiceContextFactory.create(
ksqlConfig,
authHeader,
schemaRegistryClientFactory,
connectClientFactory,
sharedClient,
requestHeaders,
principal)
);
}
return securityExtension.getUserContextProvider()
.map(provider -> new KsqlSecurityContext(
principal,
userServiceContextFactory.create(
ksqlConfig,
authHeader,
provider.getKafkaClientSupplier(principal.get()),
provider.getSchemaRegistryClientFactory(principal.get()),
connectClientFactory,
sharedClient,
requestHeaders,
principal)))
.get();
} | @Test
public void shouldCreateUserServiceContextIfUserContextProviderIsEnabled() {
// Given:
when(securityExtension.getUserContextProvider()).thenReturn(Optional.of(userContextProvider));
// When:
final KsqlSecurityContext ksqlSecurityContext =
ksqlSecurityContextProvider.provide(apiSecurityContext);
// Then:
verify(userServiceContextFactory)
.create(eq(ksqlConfig), eq(Optional.empty()), any(), any(), any(), any(), any(), any());
assertThat(ksqlSecurityContext.getUserPrincipal(), is(Optional.of(user1)));
assertThat(ksqlSecurityContext.getServiceContext(), is(userServiceContext));
} |
@VisibleForTesting
public static String parseErrorMessage( String errorMessage, Date now ) {
StringBuilder parsed = new StringBuilder();
try {
String[] splitString = errorMessage.split( "\\n" );
parsed.append( splitString[1] ).append( "\n" );
for ( int i = 2; i < splitString.length; i++ ) {
String dateStr = splitString[i].substring( 0, splitString[i].indexOf( " -" ) );
if ( isDateAfterOrSame( formatDate( now ), dateStr ) ) {
parsed.append( splitString[i] ).append( "\n" );
}
}
} catch ( Exception e ) {
return errorMessage;
}
return parsed.toString();
} | @Test
public void parseErrorMessageUsingBeforeDateTest() {
String result = TransPreviewProgressDialog.parseErrorMessage( ERROR_MSG, parseDate( BEFORE_DATE_STR ) );
assertEquals( FAILED_TO_INIT_MSG + EXPECTED_ERROR_MSG, result );
} |
public static List<Path> listTaskTemporaryPaths(
FileSystem fs, Path basePath, BiPredicate<Integer, Integer> taskAttemptFilter)
throws Exception {
List<Path> taskTmpPaths = new ArrayList<>();
if (fs.exists(basePath)) {
for (FileStatus taskStatus : fs.listStatus(basePath)) {
final String taskDirName = taskStatus.getPath().getName();
final Matcher matcher = TASK_DIR_PATTERN.matcher(taskDirName);
if (matcher.matches()) {
final int subtaskIndex = Integer.parseInt(matcher.group(1));
final int attemptNumber = Integer.parseInt(matcher.group(2));
if (taskAttemptFilter.test(subtaskIndex, attemptNumber)) {
taskTmpPaths.add(taskStatus.getPath());
}
}
}
} else {
LOG.warn(
"The path {} doesn't exist. Maybe no data is generated in the path and the path is not created.",
basePath);
}
return taskTmpPaths;
} | @Test
void testListTaskTemporaryPaths() throws Exception {
// only accept task-0-attempt-1
final BiPredicate<Integer, Integer> taskAttemptFilter =
(subtaskIndex, attemptNumber) -> subtaskIndex == 0 && attemptNumber == 1;
final FileSystem fs = FileSystem.get(tmpPath.toUri());
fs.mkdirs(new Path(tmpPath.toUri() + "/task-0-attempt-0")); // invalid attempt number
fs.mkdirs(new Path(tmpPath.toUri() + "/task-0-attempt-1")); // valid
fs.mkdirs(new Path(tmpPath.toUri() + "/task-1-attempt-0")); // invalid subtask index
fs.mkdirs(new Path(tmpPath.toUri() + "/.task-0-attempt-1")); // invisible dir
fs.mkdirs(new Path(tmpPath.toUri() + "/_SUCCESS")); // not a task dir
final List<Path> taskTmpPaths =
PartitionTempFileManager.listTaskTemporaryPaths(
fs, new Path(tmpPath.toUri()), taskAttemptFilter);
final List<String> taskDirs =
taskTmpPaths.stream().map(Path::getName).collect(Collectors.toList());
assertThat(taskDirs).hasSize(1).containsExactly("task-0-attempt-1");
} |
public static <T> AsIterable<T> asIterable() {
return new AsIterable<>();
} | @Test
@Category(ValidatesRunner.class)
public void testIterableSideInput() {
final PCollectionView<Iterable<Integer>> view =
pipeline.apply("CreateSideInput", Create.of(11, 13, 17, 23)).apply(View.asIterable());
PCollection<Integer> output =
pipeline
.apply("CreateMainInput", Create.of(29, 31))
.apply(
"OutputSideInputs",
ParDo.of(
new DoFn<Integer, Integer>() {
@ProcessElement
public void processElement(ProcessContext c) {
for (Integer i : c.sideInput(view)) {
c.output(i);
}
}
})
.withSideInputs(view));
PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 11, 13, 17, 23);
pipeline.run();
} |
public void printKsqlEntityList(final List<KsqlEntity> entityList) {
switch (outputFormat) {
case JSON:
printAsJson(entityList);
break;
case TABULAR:
final boolean showStatements = entityList.size() > 1;
for (final KsqlEntity ksqlEntity : entityList) {
writer().println();
if (showStatements) {
writer().println(ksqlEntity.getStatementText());
}
printAsTable(ksqlEntity);
}
break;
default:
throw new RuntimeException(String.format(
"Unexpected output format: '%s'",
outputFormat.name()
));
}
} | @Test
public void shouldPrintStreamsList() {
// Given:
final KsqlEntityList entityList = new KsqlEntityList(ImmutableList.of(
new StreamsList("e", ImmutableList.of(
new SourceInfo.Stream("B", "t2", "KAFKA", "AVRO", false),
new SourceInfo.Stream("A", "t1", "JSON", "JSON", true)
))
));
// When:
console.printKsqlEntityList(entityList);
// Then:
final String output = terminal.getOutputString();
Approvals.verify(output, approvalOptions);
} |
@Override
public void setConfigAttributes(Object attributes) {
this.clear();
if (attributes != null) {
for (Map attributeMap : (List<Map>) attributes) {
String tabName = (String) attributeMap.get(Tab.NAME);
String path = (String) attributeMap.get(Tab.PATH);
if (StringUtils.isBlank(tabName) && StringUtils.isBlank(path)) {
continue;
}
this.add(new Tab(tabName, path));
}
}
} | @Test
public void shouldSetAttributedOfTabs() {
Tabs tabs = new Tabs();
tabs.setConfigAttributes(List.of(Map.of(Tab.NAME, "tab1", Tab.PATH, "path1"), Map.of(Tab.NAME, "tab2", Tab.PATH, "path2")));
assertThat(tabs.get(0).getName(), is("tab1"));
assertThat(tabs.get(0).getPath(), is("path1"));
assertThat(tabs.get(1).getName(), is("tab2"));
assertThat(tabs.get(1).getPath(), is("path2"));
} |
@Override
protected void init() throws ServiceException {
LOG.info("Using FileSystemAccess JARs version [{}]", VersionInfo.getVersion());
String security = getServiceConfig().get(AUTHENTICATION_TYPE, "simple").trim();
if (security.equals("kerberos")) {
String defaultName = getServer().getName();
String keytab = System.getProperty("user.home") + "/" + defaultName + ".keytab";
keytab = getServiceConfig().get(KERBEROS_KEYTAB, keytab).trim();
if (keytab.length() == 0) {
throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_KEYTAB);
}
String principal = defaultName + "/localhost@LOCALHOST";
principal = getServiceConfig().get(KERBEROS_PRINCIPAL, principal).trim();
if (principal.length() == 0) {
throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_PRINCIPAL);
}
Configuration conf = new Configuration();
conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
try {
UserGroupInformation.loginUserFromKeytab(principal, keytab);
} catch (IOException ex) {
throw new ServiceException(FileSystemAccessException.ERROR.H02, ex.getMessage(), ex);
}
LOG.info("Using FileSystemAccess Kerberos authentication, principal [{}] keytab [{}]", principal, keytab);
} else if (security.equals("simple")) {
Configuration conf = new Configuration();
conf.set(HADOOP_SECURITY_AUTHENTICATION, "simple");
UserGroupInformation.setConfiguration(conf);
LOG.info("Using FileSystemAccess simple/pseudo authentication, principal [{}]", System.getProperty("user.name"));
} else {
throw new ServiceException(FileSystemAccessException.ERROR.H09, security);
}
String hadoopConfDirProp = getServiceConfig().get(HADOOP_CONF_DIR, getServer().getConfigDir());
File hadoopConfDir = new File(hadoopConfDirProp).getAbsoluteFile();
if (!hadoopConfDir.exists()) {
hadoopConfDir = new File(getServer().getConfigDir()).getAbsoluteFile();
}
if (!hadoopConfDir.exists()) {
throw new ServiceException(FileSystemAccessException.ERROR.H10, hadoopConfDir);
}
try {
serviceHadoopConf = loadHadoopConf(hadoopConfDir);
fileSystemConf = getNewFileSystemConfiguration();
} catch (IOException ex) {
throw new ServiceException(FileSystemAccessException.ERROR.H11, ex.toString(), ex);
}
if (LOG.isDebugEnabled()) {
LOG.debug("FileSystemAccess FileSystem configuration:");
for (Map.Entry entry : serviceHadoopConf) {
LOG.debug(" {} = {}", entry.getKey(), entry.getValue());
}
}
setRequiredServiceHadoopConf(serviceHadoopConf);
nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST));
} | @Test
@TestException(exception = ServiceException.class, msgRegExp = "H01.*")
@TestDir
public void noKerberosPrincipalProperty() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",",
Arrays.asList(InstrumentationService.class.getName(),
SchedulerService.class.getName(),
FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.authentication.type", "kerberos");
conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo");
conf.set("server.hadoop.authentication.kerberos.principal", " ");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
} |
@SuppressWarnings("MethodLength")
public static ChannelUri parse(final CharSequence cs)
{
int position = 0;
final String prefix;
if (startsWith(cs, 0, SPY_PREFIX))
{
prefix = SPY_QUALIFIER;
position = SPY_PREFIX.length();
}
else
{
prefix = "";
}
if (!startsWith(cs, position, AERON_PREFIX))
{
throw new IllegalArgumentException("Aeron URIs must start with 'aeron:', found: " + cs);
}
else
{
position += AERON_PREFIX.length();
}
final StringBuilder builder = new StringBuilder();
final Object2ObjectHashMap<String, String> params = new Object2ObjectHashMap<>();
String media = null;
String key = null;
State state = State.MEDIA;
for (int i = position, length = cs.length(); i < length; i++)
{
final char c = cs.charAt(i);
switch (state)
{
case MEDIA:
switch (c)
{
case '?':
media = builder.toString();
builder.setLength(0);
state = State.PARAMS_KEY;
break;
case ':':
case '|':
case '=':
throw new IllegalArgumentException(
"encountered '" + c + "' within media definition at index " + i + " in " + cs);
default:
builder.append(c);
}
break;
case PARAMS_KEY:
if (c == '=')
{
if (0 == builder.length())
{
throw new IllegalStateException("empty key not allowed at index " + i + " in " + cs);
}
key = builder.toString();
builder.setLength(0);
state = State.PARAMS_VALUE;
}
else
{
if (c == '|')
{
throw new IllegalStateException("invalid end of key at index " + i + " in " + cs);
}
builder.append(c);
}
break;
case PARAMS_VALUE:
if (c == '|')
{
params.put(key, builder.toString());
builder.setLength(0);
state = State.PARAMS_KEY;
}
else
{
builder.append(c);
}
break;
default:
throw new IllegalStateException("unexpected state=" + state + " in " + cs);
}
}
switch (state)
{
case MEDIA:
media = builder.toString();
validateMedia(media);
break;
case PARAMS_VALUE:
params.put(key, builder.toString());
break;
default:
throw new IllegalStateException("no more input found, state=" + state + " in " + cs);
}
return new ChannelUri(prefix, media, params);
} | @Test
void equalsReturnsFalseIfComparedAnotherClass()
{
final ChannelUri channelUri = ChannelUri.parse(
"aeron:udp?endpoint=224.10.9.8|port=4567|interface=192.168.0.3|ttl=16");
//noinspection AssertBetweenInconvertibleTypes
assertNotEquals(channelUri, 123);
} |
@Override
@Deprecated
public void showUpX5WebView(Object x5WebView, JSONObject properties, boolean isSupportJellyBean, boolean enableVerify) {
} | @Test
public void testShowUpX5WebView1() {
WebView webView = new WebView(mApplication);
mSensorsAPI.showUpX5WebView(webView, false);
} |
Command getCommand(String request) {
var commandClass = getCommandClass(request);
try {
return (Command) commandClass.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new ApplicationException(e);
}
} | @Test
void testGetCommandKnown() {
Command command = dispatcher.getCommand("Archer");
assertNotNull(command);
assertTrue(command instanceof ArcherCommand);
} |
public ConfigData get(String path) {
if (allowedPaths == null) {
throw new IllegalStateException("The provider has not been configured yet.");
}
Map<String, String> data = new HashMap<>();
if (path == null || path.isEmpty()) {
return new ConfigData(data);
}
Path filePath = allowedPaths.parseUntrustedPath(path);
if (filePath == null) {
log.warn("The path {} is not allowed to be accessed", path);
return new ConfigData(data);
}
try (Reader reader = reader(filePath)) {
Properties properties = new Properties();
properties.load(reader);
Enumeration<Object> keys = properties.keys();
while (keys.hasMoreElements()) {
String key = keys.nextElement().toString();
String value = properties.getProperty(key);
if (value != null) {
data.put(key, value);
}
}
return new ConfigData(data);
} catch (IOException e) {
log.error("Could not read properties from file {}", path, e);
throw new ConfigException("Could not read properties from file " + path);
}
} | @Test
public void testNonConfiguredProvider() {
FileConfigProvider provider2 = new FileConfigProvider();
IllegalStateException ise = assertThrows(IllegalStateException.class, () -> provider2.get(Paths.get(dirFile).toString()));
assertEquals("The provider has not been configured yet.", ise.getMessage());
} |
public static NativeReader<WindowedValue<?>> create(
final CloudObject spec,
final PipelineOptions options,
DataflowExecutionContext executionContext)
throws Exception {
@SuppressWarnings("unchecked")
final Source<Object> source = (Source<Object>) deserializeFromCloudSource(spec);
if (source instanceof BoundedSource) {
@SuppressWarnings({"unchecked", "rawtypes"})
NativeReader<WindowedValue<?>> reader =
(NativeReader)
new NativeReader<WindowedValue<Object>>() {
@Override
public NativeReaderIterator<WindowedValue<Object>> iterator() throws IOException {
return new BoundedReaderIterator<>(
((BoundedSource<Object>) source).createReader(options));
}
};
return reader;
} else if (source instanceof UnboundedSource) {
@SuppressWarnings({"unchecked", "rawtypes"})
NativeReader<WindowedValue<?>> reader =
(NativeReader)
new UnboundedReader<Object>(
options, spec, (StreamingModeExecutionContext) executionContext);
return reader;
} else {
throw new IllegalArgumentException("Unexpected source kind: " + source.getClass());
}
} | @Test
@SuppressWarnings("unchecked")
public void testProgressAndSourceSplitTranslation() throws Exception {
// Same as previous test, but now using BasicSerializableSourceFormat wrappers.
// We know that the underlying reader behaves correctly (because of the previous test),
// now check that we are wrapping it correctly.
NativeReader<WindowedValue<Integer>> reader =
(NativeReader<WindowedValue<Integer>>)
ReaderRegistry.defaultRegistry()
.create(
translateIOToCloudSource(CountingSource.upTo(10), options),
options,
null, // executionContext
TestOperationContext.create());
try (NativeReader.NativeReaderIterator<WindowedValue<Integer>> iterator = reader.iterator()) {
assertTrue(iterator.start());
assertEquals(valueInGlobalWindow(0L), iterator.getCurrent());
assertEquals(
0.0,
readerProgressToCloudProgress(iterator.getProgress()).getFractionConsumed().doubleValue(),
1e-6);
assertTrue(iterator.advance());
assertEquals(valueInGlobalWindow(1L), iterator.getCurrent());
assertEquals(
0.1,
readerProgressToCloudProgress(iterator.getProgress()).getFractionConsumed().doubleValue(),
1e-6);
assertTrue(iterator.advance());
assertEquals(valueInGlobalWindow(2L), iterator.getCurrent());
assertNull(iterator.requestDynamicSplit(ReaderTestUtils.splitRequestAtFraction(0)));
assertNull(iterator.requestDynamicSplit(ReaderTestUtils.splitRequestAtFraction(0.1f)));
WorkerCustomSources.BoundedSourceSplit<Integer> sourceSplit =
(WorkerCustomSources.BoundedSourceSplit<Integer>)
iterator.requestDynamicSplit(ReaderTestUtils.splitRequestAtFraction(0.5f));
assertNotNull(sourceSplit);
assertThat(readFromSource(sourceSplit.primary, options), contains(0L, 1L, 2L, 3L, 4L));
assertThat(readFromSource(sourceSplit.residual, options), contains(5L, 6L, 7L, 8L, 9L));
sourceSplit =
(WorkerCustomSources.BoundedSourceSplit<Integer>)
iterator.requestDynamicSplit(ReaderTestUtils.splitRequestAtFraction(0.8f));
assertNotNull(sourceSplit);
assertThat(readFromSource(sourceSplit.primary, options), contains(0L, 1L, 2L, 3L));
assertThat(readFromSource(sourceSplit.residual, options), contains(4L));
assertTrue(iterator.advance());
assertEquals(valueInGlobalWindow(3L), iterator.getCurrent());
assertFalse(iterator.advance());
}
} |
@Override
public void remove(String componentName, String key) {
if (!data.containsKey(componentName)) {
return;
}
data.get(componentName).remove(key);
notifyListeners(componentName, key, null);
} | @Test
void testRemove() throws Exception {
dataStore.remove("xxx", "yyy");
dataStore.put("name", "key", "1");
dataStore.remove("name", "key");
assertNull(dataStore.get("name", "key"));
} |
public Image getImage() {
for (PluginInfo extensionInfo : this) {
Image image = extensionInfo.getImage();
if (image != null) {
return image;
}
}
return null;
} | @Test
public void shouldFindFirstExtensionWithImageIfPluginImplementsAtleastOneExtensionWithImage() {
Image image1 = new Image("c1", "d1", "hash1");
Image image2 = new Image("c2", "d2", "hash2");
Image image3 = new Image("c3", "d3", "hash3");
ElasticAgentPluginInfo elasticAgentPluginInfo = new ElasticAgentPluginInfo(null, null, null, image1, null, null);
AuthorizationPluginInfo authorizationPluginInfo = new AuthorizationPluginInfo(null, null, null, image2, null);
AnalyticsPluginInfo analyticsPluginInfo = new AnalyticsPluginInfo(null, image3, null, null);
assertThat(new CombinedPluginInfo(elasticAgentPluginInfo).getImage(), is(image1));
assertThat(new CombinedPluginInfo(authorizationPluginInfo).getImage(), is(image2));
assertThat(new CombinedPluginInfo(analyticsPluginInfo).getImage(), is(image3));
assertThat(new CombinedPluginInfo(List.of(elasticAgentPluginInfo, authorizationPluginInfo)).getImage(), anyOf(is(image1), is(image2)));
assertThat(new CombinedPluginInfo(List.of(analyticsPluginInfo, authorizationPluginInfo)).getImage(), anyOf(is(image2), is(image3)));
} |
public static double tileXToLongitude(long tileX, byte zoomLevel) {
return pixelXToLongitude(tileX * DUMMY_TILE_SIZE, getMapSize(zoomLevel, DUMMY_TILE_SIZE));
} | @Test
public void tileXToLongitudeTest() {
for (int tileSize : TILE_SIZES) {
for (byte zoomLevel = ZOOM_LEVEL_MIN; zoomLevel <= ZOOM_LEVEL_MAX; ++zoomLevel) {
double longitude = MercatorProjection.tileXToLongitude(0, zoomLevel);
Assert.assertEquals(LatLongUtils.LONGITUDE_MIN, longitude, 0);
longitude = MercatorProjection.tileXToLongitudeWithScaleFactor(0, MercatorProjection.zoomLevelToScaleFactor(zoomLevel));
Assert.assertEquals(LatLongUtils.LONGITUDE_MIN, longitude, 0);
long tileX = MercatorProjection.getMapSize(zoomLevel, tileSize) / tileSize;
longitude = MercatorProjection.tileXToLongitude(tileX, zoomLevel);
Assert.assertEquals(LatLongUtils.LONGITUDE_MAX, longitude, 0);
tileX = MercatorProjection.getMapSizeWithScaleFactor(MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize) / tileSize;
longitude = MercatorProjection.tileXToLongitudeWithScaleFactor(tileX, MercatorProjection.zoomLevelToScaleFactor(zoomLevel));
Assert.assertEquals(LatLongUtils.LONGITUDE_MAX, longitude, 0);
}
}
} |
public static void validate(WindowConfig windowConfig) {
if (windowConfig.getWindowLengthDurationMs() == null && windowConfig.getWindowLengthCount() == null) {
throw new IllegalArgumentException("Window length is not specified");
}
if (windowConfig.getWindowLengthDurationMs() != null && windowConfig.getWindowLengthCount() != null) {
throw new IllegalArgumentException(
"Window length for time and count are set! Please set one or the other.");
}
if (windowConfig.getWindowLengthCount() != null) {
if (windowConfig.getWindowLengthCount() <= 0) {
throw new IllegalArgumentException(
"Window length must be positive [" + windowConfig.getWindowLengthCount() + "]");
}
}
if (windowConfig.getWindowLengthDurationMs() != null) {
if (windowConfig.getWindowLengthDurationMs() <= 0) {
throw new IllegalArgumentException(
"Window length must be positive [" + windowConfig.getWindowLengthDurationMs() + "]");
}
}
if (windowConfig.getSlidingIntervalCount() != null) {
if (windowConfig.getSlidingIntervalCount() <= 0) {
throw new IllegalArgumentException(
"Sliding interval must be positive [" + windowConfig.getSlidingIntervalCount() + "]");
}
}
if (windowConfig.getSlidingIntervalDurationMs() != null) {
if (windowConfig.getSlidingIntervalDurationMs() <= 0) {
throw new IllegalArgumentException(
"Sliding interval must be positive [" + windowConfig.getSlidingIntervalDurationMs() + "]");
}
}
if (windowConfig.getTimestampExtractorClassName() != null) {
if (windowConfig.getMaxLagMs() != null) {
if (windowConfig.getMaxLagMs() < 0) {
throw new IllegalArgumentException(
"Lag duration must be positive [" + windowConfig.getMaxLagMs() + "]");
}
}
if (windowConfig.getWatermarkEmitIntervalMs() != null) {
if (windowConfig.getWatermarkEmitIntervalMs() <= 0) {
throw new IllegalArgumentException(
"Watermark interval must be positive [" + windowConfig.getWatermarkEmitIntervalMs() + "]");
}
}
}
} | @Test
public void testSettingWaterMarkInterval() throws Exception {
final Object[] args = new Object[]{-1L, 0L, 1L, 2L, 5L, 10L, null};
for (Object arg : args) {
Object arg0 = arg;
try {
Long watermarkEmitInterval = null;
if (arg0 != null) {
watermarkEmitInterval = (Long) arg0;
}
WindowConfig windowConfig = new WindowConfig();
windowConfig.setWindowLengthCount(1);
windowConfig.setSlidingIntervalCount(1);
windowConfig.setWatermarkEmitIntervalMs(watermarkEmitInterval);
windowConfig.setTimestampExtractorClassName("SomeClass");
WindowConfigUtils.validate(windowConfig);
if (arg0 != null && (Long) arg0 <= 0) {
fail(String.format("Watermark interval cannot be zero or less -- watermarkInterval: "
+ "%s", arg0));
}
} catch (IllegalArgumentException e) {
if (arg0 != null && (Long) arg0 > 0) {
fail(String.format("Exception: %s thrown on valid input -- watermarkInterval: %s", e
.getMessage(), arg0));
}
}
}
} |
@Override
public boolean isDebugEnabled() {
return logger.isDebugEnabled();
} | @Test
void testIsDebugEnabled() {
jobRunrDashboardLogger.isDebugEnabled();
verify(slfLogger).isDebugEnabled();
} |
@Override
public MetadataNode child(String name) {
String value = image.data().get(name);
if (value == null) return null;
return new MetadataNode() {
@Override
public boolean isDirectory() {
return false;
}
@Override
public void print(MetadataNodePrinter printer) {
if (printer.redactionCriteria().
shouldRedactConfig(image.resource().type(), name)) {
printer.output("[redacted]");
} else {
printer.output(value);
}
}
};
} | @Test
public void testNonSecretChild() {
NodeStringifier stringifier = new NodeStringifier(NORMAL);
NODE.child("non.secret").print(stringifier);
assertEquals("baaz", stringifier.toString());
} |
public static L2ModificationInstruction modMplsLabel(MplsLabel mplsLabel) {
checkNotNull(mplsLabel, "MPLS label cannot be null");
return new L2ModificationInstruction.ModMplsLabelInstruction(mplsLabel);
} | @Test
public void testModMplsMethod() {
final MplsLabel mplsLabel = MplsLabel.mplsLabel(33);
final Instruction instruction = Instructions.modMplsLabel(mplsLabel);
final L2ModificationInstruction.ModMplsLabelInstruction modMplsLabelInstruction =
checkAndConvert(instruction,
Instruction.Type.L2MODIFICATION,
L2ModificationInstruction.ModMplsLabelInstruction.class);
assertThat(modMplsLabelInstruction.label(), is(equalTo(mplsLabel)));
assertThat(modMplsLabelInstruction.subtype(),
is(equalTo(L2ModificationInstruction.L2SubType.MPLS_LABEL)));
} |
@Override
public void upgrade() {
final MigrationCompleted migrationCompleted = configService.get(MigrationCompleted.class);
final Set<String> patternNames = patternsToMigrate.stream()
.map(PatternToMigrate::name)
.collect(Collectors.toSet());
if (migrationCompleted != null && migrationCompleted.patterns().containsAll(patternNames)) {
log.debug("Migration already completed.");
return;
}
try {
for (PatternToMigrate patternToMigrate : patternsToMigrate) {
migratePattern(patternToMigrate);
}
configService.write(MigrationCompleted.create(patternNames));
} catch (ValidationException e) {
log.error("Unable to migrate Grok Pattern.", e);
}
} | @Test
public void alreadyMigrated() {
final MigrationCompleted migrationCompleted = MigrationCompleted.create(Collections.singleton(PATTERN_NAME));
when(configService.get(MigrationCompleted.class)).thenReturn(migrationCompleted);
migration.upgrade();
verifyNoMoreInteractions(grokPatternService);
} |
public Object getCell(final int columnIndex) {
Preconditions.checkArgument(columnIndex > 0 && columnIndex < data.size() + 1);
return data.get(columnIndex - 1);
} | @Test
void assertGetCellWithOptional() {
LocalDataQueryResultRow actual = new LocalDataQueryResultRow(Optional.empty(), Optional.of("foo"), Optional.of(1), Optional.of(PropertiesBuilder.build(new Property("foo", "bar"))));
assertThat(actual.getCell(1), is(""));
assertThat(actual.getCell(2), is("foo"));
assertThat(actual.getCell(3), is("1"));
assertThat(actual.getCell(4), is("{\"foo\":\"bar\"}"));
} |
@Override
public String toString() {
return "NULL";
} | @Test
public void testToString() {
String expected = "NULL";
assertEquals(expected.trim(), Null.get().toString().trim());
} |
@Override
public void ensureConsumedPast(final long seqNum, final Duration timeout)
throws InterruptedException, TimeoutException {
final CompletableFuture<Void> future =
sequenceNumberFutureStore.getFutureForSequenceNumber(seqNum);
try {
future.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
} catch (final ExecutionException e) {
if (e.getCause() instanceof RuntimeException) {
throw (RuntimeException) e.getCause();
}
throw new RuntimeException(
"Error waiting for command sequence number of " + seqNum, e.getCause());
} catch (final TimeoutException e) {
throw new TimeoutException(
String.format(
"Timeout reached while waiting for command sequence number of %d."
+ " Caused by: %s "
+ "(Timeout: %d ms)",
seqNum,
e.getMessage(),
timeout.toMillis()
));
}
} | @Test
public void shouldWaitOnSequenceNumberFuture() throws Exception {
// When:
commandStore.ensureConsumedPast(2, TIMEOUT);
// Then:
verify(future).get(eq(TIMEOUT.toMillis()), eq(TimeUnit.MILLISECONDS));
} |
static void moveAuthTag(byte[] messageKey,
byte[] cipherText,
byte[] messageKeyWithAuthTag,
byte[] cipherTextWithoutAuthTag) {
// Check dimensions of arrays
if (messageKeyWithAuthTag.length != messageKey.length + 16) {
throw new IllegalArgumentException("Length of messageKeyWithAuthTag must be length of messageKey + " +
"length of AuthTag (16)");
}
if (cipherTextWithoutAuthTag.length != cipherText.length - 16) {
throw new IllegalArgumentException("Length of cipherTextWithoutAuthTag must be length of cipherText " +
"- length of AuthTag (16)");
}
// Move auth tag from cipherText to messageKey
System.arraycopy(messageKey, 0, messageKeyWithAuthTag, 0, 16);
System.arraycopy(cipherText, 0, cipherTextWithoutAuthTag, 0, cipherTextWithoutAuthTag.length);
System.arraycopy(cipherText, cipherText.length - 16, messageKeyWithAuthTag, 16, 16);
} | @Test(expected = IllegalArgumentException.class)
public void testCheckIllegalMessageKeyWithAuthTagLength() {
byte[] illegalMessageKey = new byte[16 + 15]; // too short
byte[] cipherTextWithoutAuthTag = new byte[35]; // ok
OmemoMessageBuilder.moveAuthTag(messageKey, cipherTextWithAuthTag, illegalMessageKey, cipherTextWithoutAuthTag);
} |
public boolean isShortCircuit() {
return shortCircuit;
} | @Test
public void shortCircuit() {
final CorsConfig cors = forOrigin("http://localhost:8080").shortCircuit().build();
assertThat(cors.isShortCircuit(), is(true));
} |
@Override
public Number parse(final String value) {
try {
return Integer.parseInt(value);
} catch (final NumberFormatException ignored) {
}
try {
return Long.parseLong(value);
} catch (final NumberFormatException ignored) {
}
return new BigDecimal(value);
} | @Test
void assertParseWithBigDecimal() {
assertThat(new PostgreSQLNumericValueParser().parse(Long.MAX_VALUE + "0"), is(new BigDecimal(Long.MAX_VALUE + "0")));
} |
public static void addContainerEnvsToExistingEnvs(Reconciliation reconciliation, List<EnvVar> existingEnvs, ContainerTemplate template) {
if (template != null && template.getEnv() != null) {
// Create set of env var names to test if any user defined template env vars will conflict with those set above
Set<String> predefinedEnvs = new HashSet<>();
for (EnvVar envVar : existingEnvs) {
predefinedEnvs.add(envVar.getName());
}
// Set custom env vars from the user defined template
for (ContainerEnvVar containerEnvVar : template.getEnv()) {
if (predefinedEnvs.contains(containerEnvVar.getName())) {
AbstractModel.LOGGER.warnCr(reconciliation, "User defined container template environment variable {} is already in use and will be ignored", containerEnvVar.getName());
} else {
existingEnvs.add(createEnvVar(containerEnvVar.getName(), containerEnvVar.getValue()));
}
}
}
} | @Test
public void testAddContainerToEnvVarsWithConflict() {
ContainerTemplate template = new ContainerTemplateBuilder()
.withEnv(new ContainerEnvVarBuilder().withName("VAR_1").withValue("newValue").build(),
new ContainerEnvVarBuilder().withName("VAR_2").withValue("value2").build())
.build();
List<EnvVar> vars = new ArrayList<>();
vars.add(new EnvVarBuilder().withName("VAR_1").withValue("value1").build());
ContainerUtils.addContainerEnvsToExistingEnvs(Reconciliation.DUMMY_RECONCILIATION, vars, template);
assertThat(vars.size(), is(2));
assertThat(vars.get(0).getName(), is("VAR_1"));
assertThat(vars.get(0).getValue(), is("value1"));
assertThat(vars.get(1).getName(), is("VAR_2"));
assertThat(vars.get(1).getValue(), is("value2"));
} |
public CompletableFuture<SendResult> sendMessageAsync(
String brokerAddr,
String brokerName,
Message msg,
SendMessageRequestHeader requestHeader,
long timeoutMillis
) {
SendMessageRequestHeaderV2 requestHeaderV2 = SendMessageRequestHeaderV2.createSendMessageRequestHeaderV2(requestHeader);
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.SEND_MESSAGE_V2, requestHeaderV2);
request.setBody(msg.getBody());
return this.getRemotingClient().invoke(brokerAddr, request, timeoutMillis).thenCompose(response -> {
CompletableFuture<SendResult> future0 = new CompletableFuture<>();
try {
future0.complete(this.processSendResponse(brokerName, msg, response, brokerAddr));
} catch (Exception e) {
future0.completeExceptionally(e);
}
return future0;
});
} | @Test
public void sendMessageAsync() {
String topic = "test";
Message msg = new Message(topic, "test".getBytes());
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
requestHeader.setTopic(topic);
requestHeader.setProducerGroup("test");
requestHeader.setDefaultTopic("test");
requestHeader.setDefaultTopicQueueNums(1);
requestHeader.setQueueId(0);
requestHeader.setSysFlag(0);
requestHeader.setBornTimestamp(0L);
requestHeader.setFlag(0);
requestHeader.setProperties("test");
requestHeader.setReconsumeTimes(0);
requestHeader.setUnitMode(false);
requestHeader.setBatch(false);
CompletableFuture<SendResult> future = mqClientAPIExt.sendMessageAsync("127.0.0.1:10911", "test", msg, requestHeader, 10);
assertThatThrownBy(future::get).getCause().isInstanceOf(RemotingTimeoutException.class);
} |
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
if (remaining.split("/").length > 1) {
throw new IllegalArgumentException("Invalid URI: " + URISupport.sanitizeUri(uri));
}
SplunkHECEndpoint answer = new SplunkHECEndpoint(uri, this, new SplunkHECConfiguration());
setProperties(answer, parameters);
answer.setSplunkURL(remaining);
return answer;
} | @Test
public void testInvalidHostname() throws Exception {
Endpoint endpoint = component.createEndpoint(
"splunk-hec:yo,lo:1234?token=11111111-1111-1111-1111-111111111111");
Exception e = assertThrows(IllegalArgumentException.class, endpoint::init);
assertEquals("Invalid hostname: yo,lo", e.getMessage());
} |
protected void declareConstraintIn(final String patternType, final List<Object> values) {
String constraints = getInNotInConstraint(values);
builder.pattern(patternType).constraint(constraints);
} | @Test
void declareConstraintIn() {
List<Object> values = Arrays.asList("-5", "0.5", "1", "10");
String patternType = "INPUT1";
KiePMMLDescrLhsFactory.factory(lhsBuilder).declareConstraintIn(patternType, values);
final List<BaseDescr> descrs = lhsBuilder.getDescr().getDescrs();
assertThat(descrs).isNotNull();
assertThat(descrs).hasSize(1);
assertThat(descrs.get(0)).isInstanceOf(PatternDescr.class);
PatternDescr patternDescr = (PatternDescr) descrs.get(0);
assertThat(patternDescr.getObjectType()).isEqualTo(patternType);
assertThat(patternDescr.getIdentifier()).isNull();
assertThat(patternDescr.getConstraint()).isInstanceOf(AndDescr.class);
AndDescr andDescr = (AndDescr) patternDescr.getConstraint();
assertThat(andDescr.getDescrs()).hasSize(1);
assertThat(andDescr.getDescrs().get(0)).isInstanceOf(ExprConstraintDescr.class);
ExprConstraintDescr exprConstraintDescr = (ExprConstraintDescr) andDescr.getDescrs().get(0);
assertThat(exprConstraintDescr.isNegated()).isFalse();
assertThat(exprConstraintDescr.getType()).isEqualTo(ExprConstraintDescr.Type.NAMED);
String expected = "value in (-5, 0.5, 1, 10)";
assertThat(exprConstraintDescr.getExpression()).isEqualTo(expected);
} |
@Override
public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final JoinWindows windows) {
return join(otherStream, toValueJoinerWithKey(joiner), windows);
} | @Test
public void shouldNotAllowNullTableOnTableJoin() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.join(null, MockValueJoiner.TOSTRING_JOINER));
assertThat(exception.getMessage(), equalTo("table can't be null"));
} |
public String getMessage(String key, String... params) {
if (StringUtils.isBlank(key)) {
return null;
}
StringBuilder sb = new StringBuilder();
sb.append(getFormattedMessage(key));
String msg = parseStringValue(sb.toString(), new HashSet<String>());
if (params == null || params.length == 0) {
return msg;
}
if (StringUtils.isBlank(msg)) {
return msg;
}
return MessageFormat.format(msg, (Object[])params);
} | @Test
void testGetMessage() {
ResourceBundleUtil resourceBundleUtil = ResourceBundleUtil.getInstance();
String emptyKeyMsg = resourceBundleUtil.getMessage("", ErrorCode.ERR_CONFIG.getCode(),
ErrorCode.ERR_CONFIG.getType());
Assertions.assertNull(emptyKeyMsg);
String errorConfigMsg = resourceBundleUtil.getMessage(ErrorCode.ERR_CONFIG.name(),
ErrorCode.ERR_CONFIG.getCode(), ErrorCode.ERR_CONFIG.getType());
Assertions.assertEquals("ERR-CODE: [Seata-1][ERR_CONFIG] config error, {0} More: [https://seata.apache"
+ ".org/docs/next/overview/faq#1]", errorConfigMsg);
String errorConfigMsgWithParams = resourceBundleUtil.getMessage(ErrorCode.ERR_CONFIG.name(),
ErrorCode.ERR_CONFIG.getCode(), ErrorCode.ERR_CONFIG.getType(), "vgroup_mapping_test");
Assertions.assertEquals(
"ERR-CODE: [Seata-1][ERR_CONFIG] config error, vgroup_mapping_test More: [https://seata.apache"
+ ".org/docs/next/overview/faq#1]", errorConfigMsgWithParams);
} |
public void decode(ByteBuf buffer) {
boolean last;
int statusCode;
while (true) {
switch(state) {
case READ_COMMON_HEADER:
if (buffer.readableBytes() < SPDY_HEADER_SIZE) {
return;
}
int frameOffset = buffer.readerIndex();
int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET;
int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET;
buffer.skipBytes(SPDY_HEADER_SIZE);
boolean control = (buffer.getByte(frameOffset) & 0x80) != 0;
int version;
int type;
if (control) {
// Decode control frame common header
version = getUnsignedShort(buffer, frameOffset) & 0x7FFF;
type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET);
streamId = 0; // Default to session Stream-ID
} else {
// Decode data frame common header
version = spdyVersion; // Default to expected version
type = SPDY_DATA_FRAME;
streamId = getUnsignedInt(buffer, frameOffset);
}
flags = buffer.getByte(flagsOffset);
length = getUnsignedMedium(buffer, lengthOffset);
// Check version first then validity
if (version != spdyVersion) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SPDY Version");
} else if (!isValidFrameHeader(streamId, type, flags, length)) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid Frame Error");
} else {
state = getNextState(type, length);
}
break;
case READ_DATA_FRAME:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0));
break;
}
// Generate data frames that do not exceed maxChunkSize
int dataLength = Math.min(maxChunkSize, length);
// Wait until entire frame is readable
if (buffer.readableBytes() < dataLength) {
return;
}
ByteBuf data = buffer.alloc().buffer(dataLength);
data.writeBytes(buffer, dataLength);
length -= dataLength;
if (length == 0) {
state = State.READ_COMMON_HEADER;
}
last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN);
delegate.readDataFrame(streamId, last, data);
break;
case READ_SYN_STREAM_FRAME:
if (buffer.readableBytes() < 10) {
return;
}
int offset = buffer.readerIndex();
streamId = getUnsignedInt(buffer, offset);
int associatedToStreamId = getUnsignedInt(buffer, offset + 4);
byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07);
last = hasFlag(flags, SPDY_FLAG_FIN);
boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL);
buffer.skipBytes(10);
length -= 10;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_STREAM Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional);
}
break;
case READ_SYN_REPLY_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_REPLY Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynReplyFrame(streamId, last);
}
break;
case READ_RST_STREAM_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (streamId == 0 || statusCode == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid RST_STREAM Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readRstStreamFrame(streamId, statusCode);
}
break;
case READ_SETTINGS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR);
numSettings = getUnsignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
length -= 4;
// Validate frame length against number of entries. Each ID/Value entry is 8 bytes.
if ((length & 0x07) != 0 || length >> 3 != numSettings) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SETTINGS Frame");
} else {
state = State.READ_SETTING;
delegate.readSettingsFrame(clear);
}
break;
case READ_SETTING:
if (numSettings == 0) {
state = State.READ_COMMON_HEADER;
delegate.readSettingsEnd();
break;
}
if (buffer.readableBytes() < 8) {
return;
}
byte settingsFlags = buffer.getByte(buffer.readerIndex());
int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1);
int value = getSignedInt(buffer, buffer.readerIndex() + 4);
boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE);
boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED);
buffer.skipBytes(8);
--numSettings;
delegate.readSetting(id, value, persistValue, persisted);
break;
case READ_PING_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
int pingId = getSignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
state = State.READ_COMMON_HEADER;
delegate.readPingFrame(pingId);
break;
case READ_GOAWAY_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
state = State.READ_COMMON_HEADER;
delegate.readGoAwayFrame(lastGoodStreamId, statusCode);
break;
case READ_HEADERS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid HEADERS Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readHeadersFrame(streamId, last);
}
break;
case READ_WINDOW_UPDATE_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (deltaWindowSize == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid WINDOW_UPDATE Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readWindowUpdateFrame(streamId, deltaWindowSize);
}
break;
case READ_HEADER_BLOCK:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readHeaderBlockEnd();
break;
}
if (!buffer.isReadable()) {
return;
}
int compressedBytes = Math.min(buffer.readableBytes(), length);
ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes);
headerBlock.writeBytes(buffer, compressedBytes);
length -= compressedBytes;
delegate.readHeaderBlock(headerBlock);
break;
case DISCARD_FRAME:
int numBytes = Math.min(buffer.readableBytes(), length);
buffer.skipBytes(numBytes);
length -= numBytes;
if (length == 0) {
state = State.READ_COMMON_HEADER;
break;
}
return;
case FRAME_ERROR:
buffer.skipBytes(buffer.readableBytes());
return;
default:
throw new Error("Shouldn't reach here.");
}
}
} | @Test
public void testUnknownSpdySettingsFrameFlags() throws Exception {
short type = 4;
byte flags = (byte) 0xFE; // undefined flags
int numSettings = 0;
int length = 8 * numSettings + 4;
ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length);
encodeControlFrameHeader(buf, type, flags, length);
buf.writeInt(numSettings);
decoder.decode(buf);
verify(delegate).readSettingsFrame(false);
verify(delegate).readSettingsEnd();
assertFalse(buf.isReadable());
buf.release();
} |
public List<String> toPrefix(String in) {
List<String> tokens = buildTokens(alignINClause(in));
List<String> output = new ArrayList<>();
List<String> stack = new ArrayList<>();
for (String token : tokens) {
if (isOperand(token)) {
if (token.equals(")")) {
while (openParanthesesFound(stack)) {
output.add(stack.remove(stack.size() - 1));
}
if (!stack.isEmpty()) {
// temporarily fix for issue #189
stack.remove(stack.size() - 1);
}
} else {
while (openParanthesesFound(stack) && !hasHigherPrecedence(token, stack.get(stack.size() - 1))) {
output.add(stack.remove(stack.size() - 1));
}
stack.add(token);
}
} else {
output.add(token);
}
}
while (!stack.isEmpty()) {
output.add(stack.remove(stack.size() - 1));
}
return output;
} | @Test
public void shouldNotThrowOnRandomInput() {
Random random = new SecureRandom();
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < 1000; i++) {
stringBuilder.setLength(0);
for (int n = 0; n < 1000; n++) {
stringBuilder.append((char) (random.nextInt() & 0xFFFF));
}
parser.toPrefix(stringBuilder.toString());
}
} |
@Override
public void finalizeCheckpoint() {
try {
checkState(committer.isPresent());
committer.get().get().commitOffset(offset);
} catch (Exception e) {
logger.warn("Failed to finalize checkpoint.", e);
}
} | @Test
public void testFinalize() throws Exception {
mark.finalizeCheckpoint();
verify(committer).commitOffset(OFFSET);
} |
public synchronized ApplicationDescription saveApplication(InputStream stream) {
try (InputStream ais = stream) {
byte[] cache = toByteArray(ais);
InputStream bis = new ByteArrayInputStream(cache);
boolean plainXml = isPlainXml(cache);
ApplicationDescription desc = plainXml ?
parsePlainAppDescription(bis) : parseZippedAppDescription(bis);
checkState(!appFile(desc.name(), APP_XML).exists(),
"Application %s already installed", desc.name());
if (plainXml) {
expandPlainApplication(cache, desc);
} else {
bis.reset();
boolean isSelfContainedJar = expandZippedApplication(bis, desc);
if (isSelfContainedJar) {
bis.reset();
stageSelfContainedJar(bis, desc);
}
/*
* Reset the ZIP file and reparse the app description now
* that the ZIP is expanded onto the filesystem. This way any
* file referenced as part of the description (i.e. app.png)
* can be loaded into the app description.
*/
bis.reset();
desc = parseZippedAppDescription(bis);
bis.reset();
saveApplication(bis, desc, isSelfContainedJar);
}
installArtifacts(desc);
return desc;
} catch (IOException e) {
throw new ApplicationException("Unable to save application", e);
}
} | @Test
public void saveSelfContainedApp() throws IOException {
InputStream stream = getClass().getResourceAsStream("app.scj");
ApplicationDescription app = aar.saveApplication(stream);
validate(app);
stream.close();
} |
@SuppressWarnings("unchecked")
public static <S, F> S visit(final SqlType type, final SqlTypeWalker.Visitor<S, F> visitor) {
final BiFunction<SqlTypeWalker.Visitor<?, ?>, SqlType, Object> handler = HANDLER
.get(type.baseType());
if (handler == null) {
throw new UnsupportedOperationException("Unsupported schema type: " + type.baseType());
}
return (S) handler.apply(visitor, type);
} | @Test
public void shouldVisitInt() {
// Given:
final SqlPrimitiveType type = SqlTypes.INTEGER;
when(visitor.visitInt(any())).thenReturn("Expected");
// When:
final String result = SqlTypeWalker.visit(type, visitor);
// Then:
verify(visitor).visitInt(same(type));
assertThat(result, is("Expected"));
} |
@GET
@Produces(MediaType.APPLICATION_JSON)
@Operation(summary = "Get prekey count",
description = "Gets the number of one-time prekeys uploaded for this device and still available")
@ApiResponse(responseCode = "200", description = "Body contains the number of available one-time prekeys for the device.", useReturnTypeSchema = true)
@ApiResponse(responseCode = "401", description = "Account authentication check failed.")
public CompletableFuture<PreKeyCount> getStatus(@ReadOnly @Auth final AuthenticatedDevice auth,
@QueryParam("identity") @DefaultValue("aci") final IdentityType identityType) {
final CompletableFuture<Integer> ecCountFuture =
keysManager.getEcCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId());
final CompletableFuture<Integer> pqCountFuture =
keysManager.getPqCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId());
return ecCountFuture.thenCombine(pqCountFuture, PreKeyCount::new);
} | @Test
void testMalformedUnidentifiedRequest() {
Response response = resources.getJerseyTest()
.target(String.format("/v2/keys/%s/1", EXISTS_UUID))
.request()
.header(HeaderUtils.UNIDENTIFIED_ACCESS_KEY, "$$$$$$$$$")
.get();
assertThat(response.getStatus()).isEqualTo(401);
verifyNoMoreInteractions(KEYS);
} |
@Override
public void executeWithLock(Runnable task, LockConfiguration lockConfig) {
try {
executeWithLock((Task) task::run, lockConfig);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
// Should not happen
throw new IllegalStateException(throwable);
}
} | @Test
void lockShouldBeReentrant() {
mockLockFor(lockConfig);
AtomicBoolean called = new AtomicBoolean(false);
executor.executeWithLock(
(Runnable) () -> executor.executeWithLock((Runnable) () -> called.set(true), lockConfig), lockConfig);
assertThat(called.get()).isTrue();
} |
public static Type getUnknownType() {
Type unknownType;
try {
unknownType = Type.valueOf("UNKNOWN");
} catch (IllegalArgumentException e) {
unknownType = Type.valueOf("UNTYPED");
}
return unknownType;
} | @Test
public void getGetUnknownType() {
Assertions.assertDoesNotThrow(() -> {
PrometheusExporter.getUnknownType();
});
} |
@Override
public List<Map<String, String>> taskConfigs(int maxTasks) {
// if the heartbeats emission is disabled by setting `emit.heartbeats.enabled` to `false`,
// the interval heartbeat emission will be negative and no `MirrorHeartbeatTask` will be created
if (config.emitHeartbeatsInterval().isNegative()) {
return Collections.emptyList();
}
// just need a single task
return Collections.singletonList(config.originalsStrings());
} | @Test
public void testMirrorHeartbeatConnectorDisabled() {
// disable the heartbeat emission
MirrorHeartbeatConfig config = new MirrorHeartbeatConfig(
makeProps("emit.heartbeats.enabled", "false"));
// MirrorHeartbeatConnector as minimum to run taskConfig()
MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(config);
List<Map<String, String>> output = connector.taskConfigs(1);
// expect no task will be created
assertEquals(0, output.size(), "Expected task to not be created");
} |
public void checkIfComponentNeedIssueSync(DbSession dbSession, String componentKey) {
checkIfAnyComponentsNeedIssueSync(dbSession, Collections.singletonList(componentKey));
} | @Test
public void checkIfComponentNeedIssueSync_single_component() {
ProjectData projectData1 = insertProjectWithBranches(true, 0);
ProjectData projectData2 = insertProjectWithBranches(false, 0);
DbSession session = db.getSession();
// do nothing when need issue sync false
underTest.checkIfComponentNeedIssueSync(session, projectData2.getProjectDto().getKey());
// throws if flag set to TRUE
String key = projectData1.getProjectDto().getKey();
assertThatThrownBy(() -> underTest.checkIfComponentNeedIssueSync(session, key))
.isInstanceOf(EsIndexSyncInProgressException.class)
.hasFieldOrPropertyWithValue("httpCode", 503)
.hasMessage("Results are temporarily unavailable. Indexing of issues is in progress.");
} |
@Udf
public List<String> keys(@UdfParameter final String jsonObj) {
if (jsonObj == null) {
return null;
}
final JsonNode node = UdfJsonMapper.parseJson(jsonObj);
if (node.isMissingNode() || !node.isObject()) {
return null;
}
final List<String> ret = new ArrayList<>();
node.fieldNames().forEachRemaining(ret::add);
return ret;
} | @Test
public void shouldReturnNullForArray() {
assertNull(udf.keys("[]"));
} |
public static void getSemanticPropsDualFromString(
DualInputSemanticProperties result,
String[] forwardedFirst,
String[] forwardedSecond,
String[] nonForwardedFirst,
String[] nonForwardedSecond,
String[] readFieldsFirst,
String[] readFieldsSecond,
TypeInformation<?> inType1,
TypeInformation<?> inType2,
TypeInformation<?> outType) {
getSemanticPropsDualFromString(
result,
forwardedFirst,
forwardedSecond,
nonForwardedFirst,
nonForwardedSecond,
readFieldsFirst,
readFieldsSecond,
inType1,
inType2,
outType,
false);
} | @Test
void testReadFieldsDual() {
String[] readFieldsFirst = {"f1;f2"};
String[] readFieldsSecond = {"f0"};
DualInputSemanticProperties dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(
dsp,
null,
null,
null,
null,
readFieldsFirst,
readFieldsSecond,
threeIntTupleType,
threeIntTupleType,
threeIntTupleType);
assertThat(dsp.getReadFields(0)).containsExactly(1, 2);
assertThat(dsp.getReadFields(1)).containsExactly(0);
readFieldsFirst[0] = "f0.*; f2";
readFieldsSecond[0] = "int1; string1";
dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(
dsp,
null,
null,
null,
null,
readFieldsFirst,
readFieldsSecond,
nestedTupleType,
pojoType,
threeIntTupleType);
assertThat(dsp.getReadFields(0)).containsExactly(0, 1, 2, 4);
assertThat(dsp.getReadFields(1)).containsExactly(0, 3);
readFieldsFirst[0] = "pojo1.int2; string1";
readFieldsSecond[0] = "f2.int2";
dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(
dsp,
null,
null,
null,
null,
readFieldsFirst,
readFieldsSecond,
nestedPojoType,
pojoInTupleType,
threeIntTupleType);
assertThat(dsp.getReadFields(0)).hasSize(2).contains(2, 5);
assertThat(dsp.getReadFields(1)).containsExactly(3);
String[] readFields = {"f0", "f2", "f4"};
dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(
dsp,
null,
null,
null,
null,
readFields,
readFields,
fiveIntTupleType,
fiveIntTupleType,
threeIntTupleType);
assertThat(dsp.getReadFields(0)).containsExactly(0, 2, 4);
assertThat(dsp.getReadFields(1)).containsExactly(0, 2, 4);
} |
public PrimaryKey getHashKey() {
return hashKey;
} | @Test
public void testEqualsWithKeys() {
{
HollowSetSchema s1 = new HollowSetSchema("Test", "TypeA", "f1");
HollowSetSchema s2 = new HollowSetSchema("Test", "TypeA", "f1");
Assert.assertEquals(s1, s2);
Assert.assertEquals(s1.getHashKey(), s2.getHashKey());
Assert.assertEquals(new PrimaryKey("TypeA", "f1"), s2.getHashKey());
}
{
HollowSetSchema s1 = new HollowSetSchema("Test", "TypeA", "f1", "f2");
HollowSetSchema s2 = new HollowSetSchema("Test", "TypeA", "f1", "f2");
Assert.assertEquals(s1, s2);
Assert.assertEquals(s1.getHashKey(), s2.getHashKey());
Assert.assertEquals(new PrimaryKey("TypeA", "f1", "f2"), s2.getHashKey());
}
{
HollowSetSchema s1 = new HollowSetSchema("Test", "TypeA");
HollowSetSchema s2 = new HollowSetSchema("Test", "TypeA", "f1");
Assert.assertNotEquals(s1, s2);
Assert.assertNotEquals(s1.getHashKey(), s2.getHashKey());
}
{
HollowSetSchema s1 = new HollowSetSchema("Test", "TypeA", "f1");
HollowSetSchema s2 = new HollowSetSchema("Test", "TypeA", "f1", "f2");
Assert.assertNotEquals(s1, s2);
Assert.assertNotEquals(s1.getHashKey(), s2.getHashKey());
}
} |
public static void delete(final File file, final boolean ignoreFailures)
{
if (file.exists())
{
if (file.isDirectory())
{
final File[] files = file.listFiles();
if (null != files)
{
for (final File f : files)
{
delete(f, ignoreFailures);
}
}
}
if (!file.delete() && !ignoreFailures)
{
try
{
Files.delete(file.toPath());
}
catch (final IOException ex)
{
LangUtil.rethrowUnchecked(ex);
}
}
}
} | @Test
void deleteIgnoreFailuresDirectory() throws IOException
{
final Path dir2 = tempDir.resolve("dir1").resolve("dir2");
Files.createDirectories(dir2);
Files.createFile(dir2.resolve("file2.txt"));
Files.createFile(dir2.getParent().resolve("file1.txt"));
final File dir = dir2.getParent().toFile();
IoUtil.delete(dir, false);
assertFalse(dir.exists());
assertFalse(Files.exists(dir2));
} |
@Nullable public String localServiceName() {
return localServiceName;
} | @Test void localServiceNameCoercesEmptyToNull() {
MutableSpan span = new MutableSpan();
span.localServiceName("FavStar");
span.localServiceName("");
assertThat(span.localServiceName()).isNull();
} |
@Override
public Optional<DevOpsProjectCreator> getDevOpsProjectCreator(DbSession dbSession, Map<String, String> characteristics) {
String githubApiUrl = characteristics.get(DEVOPS_PLATFORM_URL);
String githubRepository = characteristics.get(DEVOPS_PLATFORM_PROJECT_IDENTIFIER);
if (githubApiUrl == null || githubRepository == null) {
return Optional.empty();
}
DevOpsProjectDescriptor devOpsProjectDescriptor = new DevOpsProjectDescriptor(ALM.GITHUB, githubApiUrl, githubRepository, null);
return dbClient.almSettingDao().selectByAlm(dbSession, ALM.GITHUB).stream()
.filter(almSettingDto -> devOpsProjectDescriptor.url().equals(almSettingDto.getUrl()))
.map(almSettingDto -> findInstallationIdAndCreateDevOpsProjectCreator(devOpsProjectDescriptor, almSettingDto))
.flatMap(Optional::stream)
.findFirst();
} | @Test
public void getDevOpsProjectCreator_whenAppHasNoAccessToRepo_shouldReturnEmpty() {
mockAlmSettingDto(true);
when(githubApplicationClient.getInstallationId(any(), eq(GITHUB_REPO_FULL_NAME))).thenReturn(Optional.empty());
Optional<DevOpsProjectCreator> devOpsProjectCreator = githubProjectCreatorFactory.getDevOpsProjectCreator(dbSession, VALID_GITHUB_PROJECT_COORDINATES);
assertThat(devOpsProjectCreator).isEmpty();
} |
public String doLayout(ILoggingEvent event) {
if (!isStarted()) {
return CoreConstants.EMPTY_STRING;
}
return writeLoopOnConverters(event);
} | @Test
public void testOK() {
pl.setPattern("%d %le [%t] %lo{30} - %m%n");
pl.start();
String val = pl.doLayout(getEventObject());
// 2006-02-01 22:38:06,212 INFO [main] c.q.l.pattern.ConverterTest - Some
// message
// 2010-12-29 19:04:26,137 INFO [pool-1-thread-47] c.q.l.c.pattern.ConverterTest
// - Some message
String regex = ISO_REGEX + " INFO " + MAIN_REGEX + " c.q.l.c.pattern.ConverterTest - Some message\\s*";
assertTrue( val.matches(regex), "val=" + val);
} |
public boolean isMatch(
Invocation invocation, Map<String, String> sourceLabels, Set<TracingContextProvider> contextProviders) {
// Match method
if (getMethod() != null) {
if (!getMethod().isMatch(invocation)) {
return false;
}
}
// Match Source Labels
if (getSourceLabels() != null) {
for (Map.Entry<String, String> entry : getSourceLabels().entrySet()) {
String value = sourceLabels.get(entry.getKey());
if (!entry.getValue().equals(value)) {
return false;
}
}
}
// Match attachment
if (getAttachments() != null) {
return getAttachments().isMatch(invocation, contextProviders);
}
// TODO Match headers
return true;
} | @Test
void isMatch() {
DubboMatchRequest dubboMatchRequest = new DubboMatchRequest();
// methodMatch
DubboMethodMatch dubboMethodMatch = new DubboMethodMatch();
StringMatch nameStringMatch = new StringMatch();
nameStringMatch.setExact("sayHello");
dubboMethodMatch.setName_match(nameStringMatch);
dubboMatchRequest.setMethod(dubboMethodMatch);
RpcInvocation rpcInvocation = new RpcInvocation();
rpcInvocation.setMethodName("sayHello");
assertTrue(dubboMatchRequest.isMatch(rpcInvocation, new HashMap<>(), Collections.emptySet()));
rpcInvocation.setMethodName("satHi");
assertFalse(dubboMatchRequest.isMatch(rpcInvocation, new HashMap<>(), Collections.emptySet()));
// sourceLabels
Map<String, String> sourceLabels = new HashMap<>();
sourceLabels.put("key1", "value1");
sourceLabels.put("key2", "value2");
dubboMatchRequest.setSourceLabels(sourceLabels);
Map<String, String> inputSourceLabelsMap = new HashMap<>();
inputSourceLabelsMap.put("key1", "value1");
inputSourceLabelsMap.put("key2", "value2");
inputSourceLabelsMap.put("key3", "value3");
Map<String, String> inputSourceLabelsMap2 = new HashMap<>();
inputSourceLabelsMap2.put("key1", "other");
inputSourceLabelsMap2.put("key2", "value2");
inputSourceLabelsMap2.put("key3", "value3");
rpcInvocation.setMethodName("sayHello");
assertTrue(dubboMatchRequest.isMatch(rpcInvocation, inputSourceLabelsMap, Collections.emptySet()));
assertFalse(dubboMatchRequest.isMatch(rpcInvocation, inputSourceLabelsMap2, Collections.emptySet()));
// tracingContext
DubboAttachmentMatch dubboAttachmentMatch = new DubboAttachmentMatch();
Map<String, StringMatch> tracingContextMatchMap = new HashMap<>();
StringMatch nameMatch = new StringMatch();
nameMatch.setExact("qinliujie");
tracingContextMatchMap.put("name", nameMatch);
dubboAttachmentMatch.setTracingContext(tracingContextMatchMap);
dubboMatchRequest.setAttachments(dubboAttachmentMatch);
Map<String, String> invokeTracingContextMap = new HashMap<>();
invokeTracingContextMap.put("name", "qinliujie");
invokeTracingContextMap.put("machineGroup", "test_host");
invokeTracingContextMap.put("other", "other");
TracingContextProvider tracingContextProvider = (invocation, key) -> invokeTracingContextMap.get(key);
assertTrue(dubboMatchRequest.isMatch(
rpcInvocation, inputSourceLabelsMap, Collections.singleton(tracingContextProvider)));
Map<String, String> invokeTracingContextMap2 = new HashMap<>();
invokeTracingContextMap2.put("name", "jack");
invokeTracingContextMap2.put("machineGroup", "test_host");
invokeTracingContextMap2.put("other", "other");
TracingContextProvider tracingContextProvider2 = (invocation, key) -> invokeTracingContextMap2.get(key);
assertFalse(dubboMatchRequest.isMatch(
rpcInvocation, inputSourceLabelsMap, Collections.singleton(tracingContextProvider2)));
// dubbo context
dubboAttachmentMatch = new DubboAttachmentMatch();
Map<String, StringMatch> eagleeyecontextMatchMap = new HashMap<>();
nameMatch = new StringMatch();
nameMatch.setExact("qinliujie");
eagleeyecontextMatchMap.put("name", nameMatch);
dubboAttachmentMatch.setTracingContext(eagleeyecontextMatchMap);
Map<String, StringMatch> dubboContextMatchMap = new HashMap<>();
StringMatch dpathMatch = new StringMatch();
dpathMatch.setExact("PRE");
dubboContextMatchMap.put("dpath", dpathMatch);
dubboAttachmentMatch.setDubboContext(dubboContextMatchMap);
dubboMatchRequest.setAttachments(dubboAttachmentMatch);
Map<String, String> invokeDubboContextMap = new HashMap<>();
invokeDubboContextMap.put("dpath", "PRE");
rpcInvocation.setAttachments(invokeDubboContextMap);
TracingContextProvider tracingContextProvider3 = (invocation, key) -> invokeTracingContextMap.get(key);
assertTrue(dubboMatchRequest.isMatch(
rpcInvocation, inputSourceLabelsMap, Collections.singleton(tracingContextProvider3)));
Map<String, String> invokeDubboContextMap2 = new HashMap<>();
invokeDubboContextMap.put("dpath", "other");
rpcInvocation.setAttachments(invokeDubboContextMap2);
assertFalse(dubboMatchRequest.isMatch(
rpcInvocation, inputSourceLabelsMap, Collections.singleton(tracingContextProvider3)));
} |
@Override
public BasicTypeDefine reconvert(Column column) {
BasicTypeDefine.BasicTypeDefineBuilder builder =
BasicTypeDefine.builder()
.name(column.getName())
.precision(column.getColumnLength())
.length(column.getColumnLength())
.nullable(column.isNullable())
.comment(column.getComment())
.scale(column.getScale())
.defaultValue(column.getDefaultValue());
switch (column.getDataType().getSqlType()) {
case NULL:
builder.columnType(IRIS_NULL);
builder.dataType(IRIS_NULL);
break;
case STRING:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(String.format("%s(%s)", IRIS_VARCHAR, MAX_VARCHAR_LENGTH));
builder.dataType(IRIS_VARCHAR);
} else if (column.getColumnLength() < MAX_VARCHAR_LENGTH) {
builder.columnType(
String.format("%s(%s)", IRIS_VARCHAR, column.getColumnLength()));
builder.dataType(IRIS_VARCHAR);
} else {
builder.columnType(IRIS_LONG_VARCHAR);
builder.dataType(IRIS_LONG_VARCHAR);
}
break;
case BOOLEAN:
builder.columnType(IRIS_BIT);
builder.dataType(IRIS_BIT);
break;
case TINYINT:
builder.columnType(IRIS_TINYINT);
builder.dataType(IRIS_TINYINT);
break;
case SMALLINT:
builder.columnType(IRIS_SMALLINT);
builder.dataType(IRIS_SMALLINT);
break;
case INT:
builder.columnType(IRIS_INTEGER);
builder.dataType(IRIS_INTEGER);
break;
case BIGINT:
builder.columnType(IRIS_BIGINT);
builder.dataType(IRIS_BIGINT);
break;
case FLOAT:
builder.columnType(IRIS_FLOAT);
builder.dataType(IRIS_FLOAT);
break;
case DOUBLE:
builder.columnType(IRIS_DOUBLE);
builder.dataType(IRIS_DOUBLE);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getDataType();
long precision = decimalType.getPrecision();
int scale = decimalType.getScale();
if (scale < 0) {
scale = 0;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is scale less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (scale > MAX_SCALE) {
scale = MAX_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_SCALE,
precision,
scale);
}
if (precision < scale) {
precision = scale;
}
if (precision <= 0) {
precision = DEFAULT_PRECISION;
scale = DEFAULT_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is precision less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (precision > MAX_PRECISION) {
scale = MAX_SCALE;
precision = MAX_PRECISION;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum precision of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_PRECISION,
precision,
scale);
}
builder.columnType(String.format("%s(%s,%s)", IRIS_DECIMAL, precision, scale));
builder.dataType(IRIS_DECIMAL);
builder.precision(precision);
builder.scale(scale);
break;
case BYTES:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(IRIS_LONG_BINARY);
builder.dataType(IRIS_LONG_BINARY);
} else if (column.getColumnLength() < MAX_BINARY_LENGTH) {
builder.dataType(IRIS_BINARY);
builder.columnType(
String.format("%s(%s)", IRIS_BINARY, column.getColumnLength()));
} else {
builder.columnType(IRIS_LONG_BINARY);
builder.dataType(IRIS_LONG_BINARY);
}
break;
case DATE:
builder.columnType(IRIS_DATE);
builder.dataType(IRIS_DATE);
break;
case TIME:
builder.dataType(IRIS_TIME);
if (Objects.nonNull(column.getScale()) && column.getScale() > 0) {
Integer timeScale = column.getScale();
if (timeScale > MAX_TIME_SCALE) {
timeScale = MAX_TIME_SCALE;
log.warn(
"The time column {} type time({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to time({})",
column.getName(),
column.getScale(),
MAX_TIME_SCALE,
timeScale);
}
builder.columnType(String.format("%s(%s)", IRIS_TIME, timeScale));
builder.scale(timeScale);
} else {
builder.columnType(IRIS_TIME);
}
break;
case TIMESTAMP:
builder.columnType(IRIS_TIMESTAMP2);
builder.dataType(IRIS_TIMESTAMP2);
break;
default:
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.IRIS,
column.getDataType().getSqlType().name(),
column.getName());
}
return builder.build();
} | @Test
public void testReconvertDecimal() {
Column column =
PhysicalColumn.builder().name("test").dataType(new DecimalType(0, 0)).build();
BasicTypeDefine typeDefine = IrisTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(
String.format(
"%s(%s,%s)",
IrisTypeConverter.IRIS_DECIMAL,
IrisTypeConverter.DEFAULT_PRECISION,
IrisTypeConverter.DEFAULT_SCALE),
typeDefine.getColumnType());
Assertions.assertEquals(IrisTypeConverter.IRIS_DECIMAL, typeDefine.getDataType());
column = PhysicalColumn.builder().name("test").dataType(new DecimalType(10, 2)).build();
typeDefine = IrisTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(
String.format("%s(%s,%s)", IrisTypeConverter.IRIS_DECIMAL, 10, 2),
typeDefine.getColumnType());
Assertions.assertEquals(IrisTypeConverter.IRIS_DECIMAL, typeDefine.getDataType());
} |
@Override
public boolean syncVerifyData(DistroData verifyData, String targetServer) {
if (isNoExistTarget(targetServer)) {
return true;
}
// replace target server as self server so that can callback.
verifyData.getDistroKey().setTargetServer(memberManager.getSelf().getAddress());
DistroDataRequest request = new DistroDataRequest(verifyData, DataOperation.VERIFY);
Member member = memberManager.find(targetServer);
if (checkTargetServerStatusUnhealthy(member)) {
Loggers.DISTRO
.warn("[DISTRO] Cancel distro verify caused by target server {} unhealthy, key: {}", targetServer,
verifyData.getDistroKey());
return false;
}
try {
Response response = clusterRpcClientProxy.sendRequest(member, request);
return checkResponse(response);
} catch (NacosException e) {
Loggers.DISTRO.error("[DISTRO-FAILED] Verify distro data failed! key: {} ", verifyData.getDistroKey(), e);
}
return false;
} | @Test
void testSyncVerifyDataException() throws NacosException {
DistroData verifyData = new DistroData();
verifyData.setDistroKey(new DistroKey());
when(memberManager.hasMember(member.getAddress())).thenReturn(true);
when(memberManager.find(member.getAddress())).thenReturn(member);
member.setState(NodeState.UP);
when(clusterRpcClientProxy.sendRequest(eq(member), any())).thenThrow(new NacosException());
when(clusterRpcClientProxy.isRunning(member)).thenReturn(true);
assertFalse(transportAgent.syncVerifyData(verifyData, member.getAddress()));
} |
public DeletionServiceDeleteTaskProto convertDeletionTaskToProto() {
DeletionServiceDeleteTaskProto.Builder builder =
getBaseDeletionTaskProtoBuilder();
builder.setTaskType(DeletionTaskType.DOCKER_CONTAINER.name());
if (getContainerId() != null) {
builder.setDockerContainerId(getContainerId());
}
return builder.build();
} | @Test
public void testConvertDeletionTaskToProto() {
YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto proto =
deletionTask.convertDeletionTaskToProto();
assertEquals(ID, proto.getId());
assertEquals(USER, proto.getUser());
assertEquals(CONTAINER_ID, proto.getDockerContainerId());
assertEquals(DeletionTaskType.DOCKER_CONTAINER.name(), proto.getTaskType());
} |
@Override
public Iterator<QueueCapacityVectorEntry> iterator() {
return new Iterator<QueueCapacityVectorEntry>() {
private final Iterator<Map.Entry<String, Double>> resources =
resource.iterator();
private int i = 0;
@Override
public boolean hasNext() {
return resources.hasNext() && capacityTypes.size() > i;
}
@Override
public QueueCapacityVectorEntry next() {
Map.Entry<String, Double> resourceInformation = resources.next();
i++;
return new QueueCapacityVectorEntry(
capacityTypes.get(resourceInformation.getKey()),
resourceInformation.getKey(), resourceInformation.getValue());
}
};
} | @Test
public void testIterator() {
QueueCapacityVector capacityVector = QueueCapacityVector.newInstance();
List<QueueCapacityVectorEntry> entries = Lists.newArrayList(capacityVector);
Assert.assertEquals(3, entries.size());
QueueCapacityVector emptyCapacityVector = new QueueCapacityVector();
List<QueueCapacityVectorEntry> emptyEntries = Lists.newArrayList(emptyCapacityVector);
Assert.assertEquals(0, emptyEntries.size());
} |
@Override
public long findConfigMaxId() {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
MapperResult mapperResult = configInfoMapper.findConfigMaxId(null);
try {
return jt.queryForObject(mapperResult.getSql(), Long.class);
} catch (NullPointerException e) {
return 0;
}
} | @Test
void testFindConfigMaxId() {
Mockito.when(jdbcTemplate.queryForObject(anyString(), eq(Long.class))).thenReturn(123456L);
long configMaxId = externalConfigInfoPersistService.findConfigMaxId();
assertEquals(123456L, configMaxId);
} |
static boolean solve(RaidRoom[] rooms)
{
if (rooms == null)
{
return false;
}
List<RaidRoom> match = null;
Integer start = null;
Integer index = null;
int known = 0;
for (int i = 0; i < rooms.length; i++)
{
if (rooms[i] == null || rooms[i].getType() != RoomType.COMBAT || rooms[i] == UNKNOWN_COMBAT)
{
continue;
}
if (start == null)
{
start = i;
}
known++;
}
if (known < 2)
{
return false;
}
if (known == rooms.length)
{
return true;
}
for (List rotation : ROTATIONS)
{
COMPARE:
for (int i = 0; i < rotation.size(); i++)
{
if (rooms[start] == rotation.get(i))
{
for (int j = start + 1; j < rooms.length; j++)
{
if (rooms[j].getType() != RoomType.COMBAT || rooms[j] == UNKNOWN_COMBAT)
{
continue;
}
if (rooms[j] != rotation.get(floorMod(i + j - start, rotation.size())))
{
break COMPARE;
}
}
if (match != null && match != rotation)
{
return false;
}
index = i - start;
match = rotation;
}
}
}
if (match == null)
{
return false;
}
for (int i = 0; i < rooms.length; i++)
{
if (rooms[i] == null)
{
continue;
}
if (rooms[i].getType() != RoomType.COMBAT || rooms[i] == UNKNOWN_COMBAT)
{
rooms[i] = match.get(floorMod(index + i, match.size()));
}
}
return true;
} | @Test
public void testSolve2()
{
RaidRoom[] rooms = new RaidRoom[]{UNKNOWN_COMBAT, UNKNOWN_COMBAT, MUTTADILES, TEKTON};
RotationSolver.solve(rooms);
assertArrayEquals(new RaidRoom[]{VESPULA, GUARDIANS, MUTTADILES, TEKTON}, rooms);
} |
public static <T> IntermediateCompatibilityResult<T> constructIntermediateCompatibilityResult(
TypeSerializerSnapshot<?>[] newNestedSerializerSnapshots,
TypeSerializerSnapshot<?>[] oldNestedSerializerSnapshots) {
Preconditions.checkArgument(
newNestedSerializerSnapshots.length == oldNestedSerializerSnapshots.length,
"Different number of new serializer snapshots and existing serializer snapshots.");
TypeSerializer<?>[] nestedSerializers =
new TypeSerializer[newNestedSerializerSnapshots.length];
// check nested serializers for compatibility
boolean nestedSerializerRequiresMigration = false;
boolean hasReconfiguredNestedSerializers = false;
for (int i = 0; i < oldNestedSerializerSnapshots.length; i++) {
TypeSerializerSchemaCompatibility<?> compatibility =
resolveCompatibility(
newNestedSerializerSnapshots[i], oldNestedSerializerSnapshots[i]);
// if any one of the new nested serializers is incompatible, we can just short circuit
// the result
if (compatibility.isIncompatible()) {
return IntermediateCompatibilityResult.definedIncompatibleResult();
}
if (compatibility.isCompatibleAfterMigration()) {
nestedSerializerRequiresMigration = true;
} else if (compatibility.isCompatibleWithReconfiguredSerializer()) {
hasReconfiguredNestedSerializers = true;
nestedSerializers[i] = compatibility.getReconfiguredSerializer();
} else if (compatibility.isCompatibleAsIs()) {
nestedSerializers[i] = newNestedSerializerSnapshots[i].restoreSerializer();
} else {
throw new IllegalStateException("Undefined compatibility type.");
}
}
if (nestedSerializerRequiresMigration) {
return IntermediateCompatibilityResult.definedCompatibleAfterMigrationResult();
}
if (hasReconfiguredNestedSerializers) {
return IntermediateCompatibilityResult.undefinedReconfigureResult(nestedSerializers);
}
// ends up here if everything is compatible as is
return IntermediateCompatibilityResult.definedCompatibleAsIsResult(nestedSerializers);
} | @Test
void testIncompatibleIntermediateCompatibilityResult() {
final TypeSerializerSnapshot<?>[] previousSerializerSnapshots =
new TypeSerializerSnapshot<?>[] {
new SchemaCompatibilityTestingSerializer().snapshotConfiguration(),
new SchemaCompatibilityTestingSerializer().snapshotConfiguration(),
new SchemaCompatibilityTestingSerializer().snapshotConfiguration(),
new SchemaCompatibilityTestingSerializer().snapshotConfiguration()
};
final TypeSerializerSnapshot<?>[] newSerializerSnapshots =
new TypeSerializerSnapshot<?>[] {
SchemaCompatibilityTestingSnapshot.thatIsCompatibleWithLastSerializer(),
SchemaCompatibilityTestingSnapshot.thatIsIncompatibleWithTheLastSerializer(),
SchemaCompatibilityTestingSnapshot
.thatIsCompatibleWithLastSerializerAfterReconfiguration(),
SchemaCompatibilityTestingSnapshot
.thatIsCompatibleWithLastSerializerAfterMigration(),
};
IntermediateCompatibilityResult<?> intermediateCompatibilityResult =
CompositeTypeSerializerUtil.constructIntermediateCompatibilityResult(
newSerializerSnapshots, previousSerializerSnapshots);
assertThat(intermediateCompatibilityResult.isIncompatible()).isTrue();
assertThat(intermediateCompatibilityResult.getFinalResult().isIncompatible()).isTrue();
} |
public long appendControlMessages(MemoryRecordsCreator valueCreator) {
appendLock.lock();
try {
ByteBuffer buffer = memoryPool.tryAllocate(maxBatchSize);
if (buffer != null) {
try {
forceDrain();
MemoryRecords memoryRecords = valueCreator.create(
nextOffset,
epoch,
compression,
buffer
);
int numberOfRecords = validateMemoryRecordsAndReturnCount(memoryRecords);
completed.add(
new CompletedBatch<>(
nextOffset,
numberOfRecords,
memoryRecords,
memoryPool,
buffer
)
);
nextOffset += numberOfRecords;
} catch (Exception e) {
// Release the buffer now since the buffer was not stored in completed for a delayed release
memoryPool.release(buffer);
throw e;
}
} else {
throw new IllegalStateException("Could not allocate buffer for the control record");
}
return nextOffset - 1;
} finally {
appendLock.unlock();
}
} | @Test
public void testInvalidControlRecordOffset() {
int leaderEpoch = 17;
long baseOffset = 157;
int lingerMs = 50;
int maxBatchSize = 512;
ByteBuffer buffer = ByteBuffer.allocate(maxBatchSize);
Mockito.when(memoryPool.tryAllocate(maxBatchSize))
.thenReturn(buffer);
BatchAccumulator.MemoryRecordsCreator creator = (offset, epoch, compression, buf) -> {
long now = 1234;
try (MemoryRecordsBuilder builder = controlRecordsBuilder(
offset + 1,
epoch,
compression,
now,
buf
)
) {
builder.appendSnapshotHeaderMessage(
now,
new SnapshotHeaderRecord()
.setVersion(ControlRecordUtils.SNAPSHOT_HEADER_CURRENT_VERSION)
.setLastContainedLogTimestamp(now)
);
return builder.build();
}
};
try (BatchAccumulator<String> acc = buildAccumulator(
leaderEpoch,
baseOffset,
lingerMs,
maxBatchSize
)
) {
assertThrows(IllegalArgumentException.class, () -> acc.appendControlMessages(creator));
}
} |
public Optional<Details> updateRuntimeOverview(
WorkflowSummary summary, WorkflowRuntimeOverview overview, Timeline timeline) {
return updateWorkflowInstance(summary, overview, timeline, null, 0);
} | @Test
public void testInvalidWorkflowInstanceUpdate() {
WorkflowSummary summary = new WorkflowSummary();
summary.setWorkflowId(TEST_WORKFLOW_ID);
summary.setWorkflowInstanceId(1);
summary.setWorkflowRunId(2);
Optional<Details> result = instanceDao.updateRuntimeOverview(summary, null, null);
assertTrue(result.isPresent());
assertEquals(
"ERROR: updated [0] (expecting 1) rows for workflow instance [sample-dag-test-3][1][2]",
result.get().getMessage());
result = instanceDao.updateRuntimeOverview(null, null, null);
assertTrue(result.isPresent());
assertEquals(
"ERROR: failed updating Runtime Maestro Workflow with an error", result.get().getMessage());
} |
public static String getRelativeLinkTo(Item p) {
Map<Object, String> ancestors = new HashMap<>();
View view = null;
StaplerRequest request = Stapler.getCurrentRequest();
for (Ancestor a : request.getAncestors()) {
ancestors.put(a.getObject(), a.getRelativePath());
if (a.getObject() instanceof View)
view = (View) a.getObject();
}
String path = ancestors.get(p);
if (path != null) {
return normalizeURI(path + '/');
}
Item i = p;
String url = "";
while (true) {
ItemGroup ig = i.getParent();
url = i.getShortUrl() + url;
if (ig == Jenkins.get() || (view != null && ig == view.getOwner().getItemGroup())) {
assert i instanceof TopLevelItem;
if (view != null) {
// assume p and the current page belong to the same view, so return a relative path
// (even if they did not, View.getItem does not by default verify ownership)
return normalizeURI(ancestors.get(view) + '/' + url);
} else {
// otherwise return a path from the root Hudson
return normalizeURI(request.getContextPath() + '/' + p.getUrl());
}
}
path = ancestors.get(ig);
if (path != null) {
return normalizeURI(path + '/' + url);
}
assert ig instanceof Item; // if not, ig must have been the Hudson instance
i = (Item) ig;
}
} | @Test
public void testGetRelativeLinkTo_JobFromComputer() {
String contextPath = "/jenkins";
StaplerRequest req = createMockRequest(contextPath);
try (
MockedStatic<Stapler> mocked = mockStatic(Stapler.class);
MockedStatic<Jenkins> mockedJenkins = mockStatic(Jenkins.class)
) {
Jenkins j = createMockJenkins(mockedJenkins);
ItemGroup parent = j;
mocked.when(Stapler::getCurrentRequest).thenReturn(req);
Computer computer = mock(Computer.class);
createMockAncestors(req, createAncestor(computer, "."), createAncestor(j, "../.."));
TopLevelItem i = createMockItem(parent, "job/i/");
String result = Functions.getRelativeLinkTo(i);
assertEquals("/jenkins/job/i/", result);
}
} |
public EmailStatusResult getEmailStatus(long accountId) {
Map<String, Object> resultMap = accountClient.getEmailStatus(accountId);
return objectMapper.convertValue(resultMap, EmailStatusResult.class);
} | @Test
public void testGetEmailStatus() {
Map<String, Object> result = Map.of(
"status", "OK",
"error", "custom error",
"email_status", "VERIFIED",
"user_action_needed", "true",
"email_address", "address");
when(accountClient.getEmailStatus(eq(1L))).thenReturn(result);
EmailStatusResult emailStatus = accountService.getEmailStatus(1L);
assertEquals(Status.OK, emailStatus.getStatus());
assertEquals("custom error", emailStatus.getError());
assertEquals(EmailStatus.VERIFIED, emailStatus.getEmailStatus());
assertEquals(true, emailStatus.getActionNeeded());
assertEquals("address", emailStatus.getEmailAddress());
} |
public long getTimeUsedMs() {
return _brokerResponse.has(TIME_USED_MS) ? _brokerResponse.get(TIME_USED_MS).asLong() : -1L;
} | @Test
public void testGetTimeUsedMs() {
// Run the test
final long result = _executionStatsUnderTest.getTimeUsedMs();
// Verify the results
assertEquals(10L, result);
} |
@Override
public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return PathAttributes.EMPTY;
}
final Region region = regionService.lookup(file);
try {
if(containerService.isContainer(file)) {
final ContainerInfo info = session.getClient().getContainerInfo(region,
containerService.getContainer(file).getName());
final PathAttributes attributes = new PathAttributes();
attributes.setSize(info.getTotalSize());
attributes.setRegion(info.getRegion().getRegionId());
return attributes;
}
final ObjectMetadata metadata;
try {
try {
metadata = session.getClient().getObjectMetaData(region,
containerService.getContainer(file).getName(), containerService.getKey(file));
}
catch(GenericException e) {
throw new SwiftExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
}
catch(NotfoundException e) {
if(file.isDirectory()) {
// Directory placeholder file may be missing. Still return empty attributes when we find children
try {
new SwiftObjectListService(session).list(file, new CancellingListProgressListener());
}
catch(ListCanceledException l) {
// Found common prefix
return PathAttributes.EMPTY;
}
catch(NotfoundException n) {
throw e;
}
// Common prefix only
return PathAttributes.EMPTY;
}
// Try to find pending large file upload
final Write.Append append = new SwiftLargeObjectUploadFeature(session, regionService, new SwiftWriteFeature(session, regionService)).append(file, new TransferStatus());
if(append.append) {
return new PathAttributes().withSize(append.offset);
}
throw e;
}
if(file.isDirectory()) {
if(!StringUtils.equals(SwiftDirectoryFeature.DIRECTORY_MIME_TYPE, metadata.getMimeType())) {
throw new NotfoundException(String.format("File %s has set MIME type %s but expected %s",
file.getAbsolute(), metadata.getMimeType(), SwiftDirectoryFeature.DIRECTORY_MIME_TYPE));
}
}
if(file.isFile()) {
if(StringUtils.equals(SwiftDirectoryFeature.DIRECTORY_MIME_TYPE, metadata.getMimeType())) {
throw new NotfoundException(String.format("File %s has set MIME type %s",
file.getAbsolute(), metadata.getMimeType()));
}
}
return this.toAttributes(metadata);
}
catch(GenericException e) {
throw new SwiftExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
} | @Test
public void testFindNotFound() throws Exception {
final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume));
container.attributes().setRegion("IAD");
final SwiftAttributesFinderFeature f = new SwiftAttributesFinderFeature(session);
try {
f.find(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)));
fail();
}
catch(NotfoundException e) {
// Expected
}
try {
f.find(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)));
fail();
}
catch(NotfoundException e) {
// Expected
}
} |
protected List<Label> getTopLabels(Map<String, Label> distribution) {
return getTopLabels(distribution, this.stackSize);
} | @Test
public void testGetTopOutcomes() {
Map<String, Label> scoredOutcomes = new HashMap<>();
scoredOutcomes.put("A", new Label("A", 0.1d));
scoredOutcomes.put("B", new Label("B", 0.2d));
scoredOutcomes.put("C", new Label("C", 0.15d));
scoredOutcomes.put("D", new Label("D", 0.25d));
List<Label> topOutcomes = ViterbiModel.getTopLabels(scoredOutcomes, 4);
assertEquals(4, topOutcomes.size());
assertEquals("D", topOutcomes.get(0).getLabel());
assertEquals("B", topOutcomes.get(1).getLabel());
assertEquals("C", topOutcomes.get(2).getLabel());
assertEquals("A", topOutcomes.get(3).getLabel());
} |
@Override
public void uploadPart(RefCountedFSOutputStream file) throws IOException {
// this is to guarantee that nobody is
// writing to the file we are uploading.
checkState(file.isClosed());
final CompletableFuture<PartETag> future = new CompletableFuture<>();
uploadsInProgress.add(future);
final long partLength = file.getPos();
currentUploadInfo.registerNewPart(partLength);
file.retain(); // keep the file while the async upload still runs
uploadThreadPool.execute(new UploadTask(s3AccessHelper, currentUploadInfo, file, future));
} | @Test
public void singlePartUploadShouldBeIncluded() throws IOException {
final byte[] part = bytesOf("hello world");
uploadPart(part);
assertThat(stubMultiPartUploader, hasMultiPartUploadWithPart(1, part));
} |
@Override
public Graph<Entity> resolveForInstallation(Entity entity,
Map<String, ValueReference> parameters,
Map<EntityDescriptor, Entity> entities) {
if (entity instanceof EntityV1) {
return resolveForInstallationV1((EntityV1) entity, entities);
} else {
throw new IllegalArgumentException("Unsupported entity version: " + entity.getClass());
}
} | @Test
public void resolveMatchingDependecyForInstallation() {
final Entity grokPatternEntity = EntityV1.builder()
.id(ModelId.of("1"))
.type(ModelTypes.GROK_PATTERN_V1)
.data(objectMapper.convertValue(GrokPatternEntity.create("Test", "%{PORTAL}"), JsonNode.class))
.build();
final Entity grokPatternEntityDependency = EntityV1.builder()
.id(ModelId.of("1"))
.type(ModelTypes.GROK_PATTERN_V1)
.data(objectMapper.convertValue(GrokPatternEntity.create("PORTAL", "\\d\\d"), JsonNode.class))
.build();
final EntityDescriptor dependencyDescriptor = grokPatternEntityDependency.toEntityDescriptor();
final Map<EntityDescriptor, Entity> entityDescriptorEntityMap = new HashMap(1);
entityDescriptorEntityMap.put(dependencyDescriptor, grokPatternEntityDependency);
final Map<String, ValueReference> parameters = Collections.emptyMap();
Graph<Entity> graph = facade.resolveForInstallation(grokPatternEntity, parameters, entityDescriptorEntityMap);
assertThat(graph.nodes().toArray()).contains(grokPatternEntityDependency);
} |
@SuppressWarnings("unchecked")
public synchronized T load(File jsonFile)
throws IOException, JsonParseException, JsonMappingException {
if (!jsonFile.exists()) {
throw new FileNotFoundException("No such file: " + jsonFile);
}
if (!jsonFile.isFile()) {
throw new FileNotFoundException("Not a file: " + jsonFile);
}
if (jsonFile.length() == 0) {
throw new EOFException("File is empty: " + jsonFile);
}
try {
return mapper.readValue(jsonFile, classType);
} catch (IOException e) {
LOG.warn("Exception while parsing json file {}", jsonFile, e);
throw e;
}
} | @Test
public void testFileSystemEmptyPath() throws Throwable {
File tempFile = File.createTempFile("Keyval", ".json");
Path tempPath = new Path(tempFile.toURI());
LocalFileSystem fs = FileSystem.getLocal(new Configuration());
try {
LambdaTestUtils.intercept(PathIOException.class,
() -> serDeser.load(fs, tempPath));
fs.delete(tempPath, false);
LambdaTestUtils.intercept(FileNotFoundException.class,
() -> serDeser.load(fs, tempPath));
} finally {
fs.delete(tempPath, false);
}
} |
public static MepId valueOf(short id) {
if (id < 1 || id > 8191) {
throw new IllegalArgumentException(
"Invalid value for Mep Id - must be between 1-8191 inclusive. "
+ "Rejecting " + id);
}
return new MepId(id);
} | @Test
public void testHighRange() {
try {
MepId.valueOf((short) 8192);
fail("Exception expected for MepId = 8192");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Invalid value for Mep Id"));
}
try {
MepId.valueOf((short) 33333); //Above the range of short
fail("Exception expected for MepId = 33333");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Invalid value for Mep Id"));
}
} |
static void addClusterToMirrorMaker2ConnectorConfig(Map<String, Object> config, KafkaMirrorMaker2ClusterSpec cluster, String configPrefix) {
config.put(configPrefix + "alias", cluster.getAlias());
config.put(configPrefix + AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
String securityProtocol = addTLSConfigToMirrorMaker2ConnectorConfig(config, cluster, configPrefix);
if (cluster.getAuthentication() != null) {
if (cluster.getAuthentication() instanceof KafkaClientAuthenticationTls) {
config.put(configPrefix + SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "PKCS12");
config.put(configPrefix + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, STORE_LOCATION_ROOT + cluster.getAlias() + KEYSTORE_SUFFIX);
config.put(configPrefix + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "${file:" + CONNECTORS_CONFIG_FILE + ":" + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG + "}");
} else if (cluster.getAuthentication() instanceof KafkaClientAuthenticationPlain plainAuthentication) {
securityProtocol = cluster.getTls() != null ? "SASL_SSL" : "SASL_PLAINTEXT";
config.put(configPrefix + SaslConfigs.SASL_MECHANISM, "PLAIN");
config.put(configPrefix + SaslConfigs.SASL_JAAS_CONFIG,
AuthenticationUtils.jaasConfig("org.apache.kafka.common.security.plain.PlainLoginModule",
Map.of("username", plainAuthentication.getUsername(),
"password", "${file:" + CONNECTORS_CONFIG_FILE + ":" + cluster.getAlias() + ".sasl.password}")));
} else if (cluster.getAuthentication() instanceof KafkaClientAuthenticationScram scramAuthentication) {
securityProtocol = cluster.getTls() != null ? "SASL_SSL" : "SASL_PLAINTEXT";
config.put(configPrefix + SaslConfigs.SASL_MECHANISM, scramAuthentication instanceof KafkaClientAuthenticationScramSha256 ? "SCRAM-SHA-256" : "SCRAM-SHA-512");
config.put(configPrefix + SaslConfigs.SASL_JAAS_CONFIG,
AuthenticationUtils.jaasConfig("org.apache.kafka.common.security.scram.ScramLoginModule",
Map.of("username", scramAuthentication.getUsername(),
"password", "${file:" + CONNECTORS_CONFIG_FILE + ":" + cluster.getAlias() + ".sasl.password}")));
} else if (cluster.getAuthentication() instanceof KafkaClientAuthenticationOAuth oauthAuthentication) {
securityProtocol = cluster.getTls() != null ? "SASL_SSL" : "SASL_PLAINTEXT";
config.put(configPrefix + SaslConfigs.SASL_MECHANISM, "OAUTHBEARER");
config.put(configPrefix + SaslConfigs.SASL_JAAS_CONFIG,
oauthJaasConfig(cluster, oauthAuthentication));
config.put(configPrefix + SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler");
}
}
// Security protocol
config.put(configPrefix + AdminClientConfig.SECURITY_PROTOCOL_CONFIG, securityProtocol);
config.putAll(cluster.getConfig().entrySet().stream()
.collect(Collectors.toMap(entry -> configPrefix + entry.getKey(), Map.Entry::getValue)));
config.putAll(cluster.getAdditionalProperties());
} | @Test
public void testAddClusterToMirrorMaker2ConnectorConfigWithAccessTokenLocationOauth() {
Map<String, Object> config = new HashMap<>();
KafkaMirrorMaker2ClusterSpec cluster = new KafkaMirrorMaker2ClusterSpecBuilder()
.withAlias("sourceClusterAlias")
.withBootstrapServers("sourceClusterAlias.sourceNamespace.svc:9092")
.withNewKafkaClientAuthenticationOAuth()
.withAccessTokenLocation("/var/run/secrets/kubernetes.io/serviceaccount/token")
.endKafkaClientAuthenticationOAuth()
.build();
KafkaMirrorMaker2Connectors.addClusterToMirrorMaker2ConnectorConfig(config, cluster, PREFIX);
String jaasConfig = (String) config.remove("prefix.sasl.jaas.config");
AppConfigurationEntry configEntry = AuthenticationUtilsTest.parseJaasConfig(jaasConfig);
assertThat(configEntry.getLoginModuleName(), is("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule"));
assertThat(configEntry.getOptions(),
is(Map.of("oauth.access.token.location", "/var/run/secrets/kubernetes.io/serviceaccount/token")));
assertThat(config,
is(Map.of("prefix.alias", "sourceClusterAlias",
"prefix.bootstrap.servers", "sourceClusterAlias.sourceNamespace.svc:9092",
"prefix.sasl.login.callback.handler.class", "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler",
"prefix.sasl.mechanism", "OAUTHBEARER",
"prefix.security.protocol", "SASL_PLAINTEXT")));
} |
@SuppressWarnings("java:S5443")
Path createJarPath() throws IOException {
Path jarPath;
if (jobMetaDataParameterObject.getUploadDirectoryPath() != null) {
Path path = Paths.get(jobMetaDataParameterObject.getUploadDirectoryPath());
// Create a new temporary file in the given directory
jarPath = Files.createTempFile(path, jobMetaDataParameterObject.getFileName(), ".jar");
} else {
// Create a new temporary file in the default temporary file directory
jarPath = Files.createTempFile(jobMetaDataParameterObject.getFileName(), ".jar");
}
return jarPath;
} | @Test
public void testGetJarPath() throws IOException {
Path jarPath = null;
try {
Path path = Paths.get("target");
Files.createDirectories(path);
String uploadPath = path.toAbsolutePath().toString();
when(jobMetaDataParameterObject.getUploadDirectoryPath()).thenReturn(uploadPath);
jarPath = jobUploadStatus.createJarPath();
assertTrue(Files.exists(jarPath));
} finally {
if (jarPath != null) {
Files.delete(jarPath);
}
}
} |
public <T> HttpRestResult<T> postJson(String url, Header header, Query query, String body, Type responseType)
throws Exception {
RequestHttpEntity requestHttpEntity = new RequestHttpEntity(header.setContentType(MediaType.APPLICATION_JSON),
query, body);
return execute(url, HttpMethod.POST, requestHttpEntity, responseType);
} | @Test
void testPostJson() throws Exception {
when(requestClient.execute(any(), eq("POST"), any())).thenReturn(mockResponse);
when(mockResponse.getStatusCode()).thenReturn(200);
when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes()));
Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML);
HttpRestResult<String> result = restTemplate.postJson("http://127.0.0.1:8848/nacos/test", header, "body", String.class);
assertTrue(result.ok());
assertEquals(Header.EMPTY, result.getHeader());
assertEquals("test", result.getData());
assertEquals(MediaType.APPLICATION_JSON, header.getValue(HttpHeaderConsts.CONTENT_TYPE));
} |
@Override
public Optional<String> resolveQueryFailure(QueryStats controlQueryStats, QueryException queryException, Optional<QueryObjectBundle> test)
{
if (!test.isPresent()) {
return Optional.empty();
}
// Decouple from com.facebook.presto.hive.HiveErrorCode.HIVE_TOO_MANY_OPEN_PARTITIONS
ErrorCodeSupplier errorCodeSupplier = new ErrorCodeSupplier() {
@Override
public ErrorCode toErrorCode()
{
int errorCodeMask = 0x0100_0000;
return new ErrorCode(21 + errorCodeMask, "HIVE_TOO_MANY_OPEN_PARTITIONS", ErrorType.USER_ERROR);
}
};
return mapMatchingPrestoException(queryException, TEST_MAIN, ImmutableSet.of(errorCodeSupplier),
e -> {
try {
ShowCreate showCreate = new ShowCreate(TABLE, test.get().getObjectName());
String showCreateResult = getOnlyElement(prestoAction.execute(showCreate, DESCRIBE, resultSet -> Optional.of(resultSet.getString(1))).getResults());
CreateTable createTable = (CreateTable) sqlParser.createStatement(showCreateResult, ParsingOptions.builder().setDecimalLiteralTreatment(AS_DOUBLE).build());
List<Property> bucketCountProperty = createTable.getProperties().stream()
.filter(property -> property.getName().getValue().equals("bucket_count"))
.collect(toImmutableList());
if (bucketCountProperty.size() != 1) {
return Optional.empty();
}
long bucketCount = ((LongLiteral) getOnlyElement(bucketCountProperty).getValue()).getValue();
int testClusterSize = this.testClusterSizeSupplier.get();
if (testClusterSize * maxBucketPerWriter < bucketCount) {
return Optional.of("Not enough workers on test cluster");
}
return Optional.empty();
}
catch (Throwable t) {
log.warn(t, "Exception when resolving HIVE_TOO_MANY_OPEN_PARTITIONS");
return Optional.empty();
}
});
} | @Test
public void testResolved()
{
createTable.set(format("CREATE TABLE %s (x varchar, ds varchar) WITH (partitioned_by = ARRAY[\"ds\"], bucket_count = 101)", TABLE_NAME));
getFailureResolver().resolveQueryFailure(CONTROL_QUERY_STATS, HIVE_TOO_MANY_OPEN_PARTITIONS_EXCEPTION, Optional.of(TEST_BUNDLE));
assertEquals(
getFailureResolver().resolveQueryFailure(CONTROL_QUERY_STATS, HIVE_TOO_MANY_OPEN_PARTITIONS_EXCEPTION, Optional.of(TEST_BUNDLE)),
Optional.of("Not enough workers on test cluster"));
} |
public void importCounters(String[] counterNames, String[] counterKinds, long[] counterDeltas) {
final int length = counterNames.length;
if (counterKinds.length != length || counterDeltas.length != length) {
throw new AssertionError("array lengths do not match");
}
for (int i = 0; i < length; ++i) {
final CounterName name = CounterName.named(counterPrefix + counterNames[i]);
final String kind = counterKinds[i];
final long delta = counterDeltas[i];
switch (kind) {
case "sum":
counterFactory.longSum(name).addValue(delta);
break;
case "max":
counterFactory.longMax(name).addValue(delta);
break;
case "min":
counterFactory.longMin(name).addValue(delta);
break;
default:
throw new IllegalArgumentException("unsupported counter kind: " + kind);
}
}
} | @Test
public void testArrayLengthMismatch() throws Exception {
String[] names = {"sum_counter"};
String[] kinds = {"sum", "max"};
long[] deltas = {122};
try {
counters.importCounters(names, kinds, deltas);
} catch (AssertionError e) {
// expected
}
} |
public Future<KafkaVersionChange> reconcile() {
return getPods()
.compose(this::detectToAndFromVersions)
.compose(i -> prepareVersionChange());
} | @Test
public void testNewClusterWithKafkaVersionOnly(VertxTestContext context) {
VersionChangeCreator vcc = mockVersionChangeCreator(
mockKafka(VERSIONS.defaultVersion().version(), null, null),
mockRos(List.of())
);
Checkpoint async = context.checkpoint();
vcc.reconcile().onComplete(context.succeeding(c -> context.verify(() -> {
assertThat(c.from(), is(VERSIONS.defaultVersion()));
assertThat(c.to(), is(VERSIONS.defaultVersion()));
assertThat(c.metadataVersion(), is(VERSIONS.defaultVersion().metadataVersion()));
async.flag();
})));
} |
@Override
public String toString() {
return "Counter{"
+ "value=" + value
+ '}';
} | @Test
public void test_toString() {
String s = counter.toString();
assertEquals("Counter{value=0}", s);
} |
public double calculateDensity(Graph graph, boolean isGraphDirected) {
double result;
double edgesCount = graph.getEdgeCount();
double nodesCount = graph.getNodeCount();
double multiplier = 1;
if (!isGraphDirected) {
multiplier = 2;
}
result = (multiplier * edgesCount) / (nodesCount * nodesCount - nodesCount);
return result;
} | @Test
public void testOneNodeDensity() {
GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1);
Graph graph = graphModel.getGraph();
GraphDensity d = new GraphDensity();
double density = d.calculateDensity(graph, false);
assertEquals(density, Double.NaN);
} |
private void emit(ByteBuffer record, int targetSubpartition, Buffer.DataType dataType)
throws IOException {
checkInProduceState();
checkNotNull(memoryDataManager).append(record, targetSubpartition, dataType);
} | @Test
void testEmit() throws Exception {
int numBuffers = 100;
int numSubpartitions = 10;
int numRecords = 1000;
Random random = new Random();
BufferPool bufferPool = globalPool.createBufferPool(numBuffers, numBuffers);
try (HsResultPartition partition = createHsResultPartition(numSubpartitions, bufferPool)) {
Queue<Tuple2<ByteBuffer, Buffer.DataType>>[] dataWritten = new Queue[numSubpartitions];
Queue<Buffer>[] buffersRead = new Queue[numSubpartitions];
for (int i = 0; i < numSubpartitions; ++i) {
dataWritten[i] = new ArrayDeque<>();
buffersRead[i] = new ArrayDeque<>();
}
int[] numBytesWritten = new int[numSubpartitions];
int[] numBytesRead = new int[numSubpartitions];
Arrays.fill(numBytesWritten, 0);
Arrays.fill(numBytesRead, 0);
for (int i = 0; i < numRecords; ++i) {
ByteBuffer record = generateRandomData(random.nextInt(2 * bufferSize) + 1, random);
boolean isBroadCast = random.nextBoolean();
if (isBroadCast) {
partition.broadcastRecord(record);
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
recordDataWritten(
record,
dataWritten,
subpartition,
numBytesWritten,
Buffer.DataType.DATA_BUFFER);
}
} else {
int subpartition = random.nextInt(numSubpartitions);
partition.emitRecord(record, subpartition);
recordDataWritten(
record,
dataWritten,
subpartition,
numBytesWritten,
Buffer.DataType.DATA_BUFFER);
}
}
partition.finish();
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
ByteBuffer record = EventSerializer.toSerializedEvent(EndOfPartitionEvent.INSTANCE);
recordDataWritten(
record,
dataWritten,
subpartition,
numBytesWritten,
Buffer.DataType.END_OF_PARTITION);
}
Tuple2<ResultSubpartitionView, TestingBufferAvailabilityListener>[] viewAndListeners =
createSubpartitionViews(partition, numSubpartitions);
readData(
viewAndListeners,
(buffer, subpartitionId) -> {
int numBytes = buffer.readableBytes();
numBytesRead[subpartitionId] += numBytes;
MemorySegment segment =
MemorySegmentFactory.allocateUnpooledSegment(numBytes);
segment.put(0, buffer.getNioBufferReadable(), numBytes);
buffersRead[subpartitionId].add(
new NetworkBuffer(
segment, (buf) -> {}, buffer.getDataType(), numBytes));
});
checkWriteReadResult(
numSubpartitions, numBytesWritten, numBytesRead, dataWritten, buffersRead);
}
} |
@Override
public void doInject(RequestResource resource, RamContext context, LoginIdentityContext result) {
String accessKey = context.getAccessKey();
String secretKey = context.getSecretKey();
// STS 临时凭证鉴权的优先级高于 AK/SK 鉴权
if (StsConfig.getInstance().isStsOn()) {
StsCredential stsCredential = StsCredentialHolder.getInstance().getStsCredential();
accessKey = stsCredential.getAccessKeyId();
secretKey = stsCredential.getAccessKeySecret();
result.setParameter(IdentifyConstants.SECURITY_TOKEN_HEADER, stsCredential.getSecurityToken());
}
if (StringUtils.isNotEmpty(accessKey) && StringUtils.isNotBlank(secretKey)) {
result.setParameter(ACCESS_KEY_HEADER, accessKey);
}
String signatureKey = secretKey;
if (StringUtils.isNotEmpty(context.getRegionId())) {
signatureKey = CalculateV4SigningKeyUtil
.finalSigningKeyStringWithDefaultInfo(secretKey, context.getRegionId());
result.setParameter(RamConstants.SIGNATURE_VERSION, RamConstants.V4);
}
Map<String, String> signHeaders = SpasAdapter
.getSignHeaders(getResource(resource.getNamespace(), resource.getGroup()), signatureKey);
result.setParameters(signHeaders);
} | @Test
void testDoInjectForSts() throws NoSuchFieldException, IllegalAccessException {
prepareForSts();
LoginIdentityContext actual = new LoginIdentityContext();
configResourceInjector.doInject(resource, ramContext, actual);
assertEquals(4, actual.getAllKey().size());
assertEquals("test-sts-ak", actual.getParameter("Spas-AccessKey"));
assertTrue(actual.getAllKey().contains("Timestamp"));
assertTrue(actual.getAllKey().contains("Spas-Signature"));
assertTrue(actual.getAllKey().contains(IdentifyConstants.SECURITY_TOKEN_HEADER));
} |
@Override
public void putAll(Map<K, V> map) {
cache.putAll(map);
} | @Test
public void testPutAll() {
Map<Integer, String> expectedResult = new HashMap<>();
expectedResult.put(23, "value-23");
expectedResult.put(42, "value-42");
adapter.putAll(expectedResult);
assertEquals(expectedResult.size(), cache.size());
for (Integer key : expectedResult.keySet()) {
assertTrue(cache.containsKey(key));
}
} |
static void parseRouteAddress(Chain chain, Span span) {
if (span.isNoop()) return;
Connection connection = chain.connection();
if (connection == null) return;
InetSocketAddress socketAddress = connection.route().socketAddress();
span.remoteIpAndPort(socketAddress.getHostString(), socketAddress.getPort());
} | @Test void parseRouteAddress_skipsOnNoop() {
when(span.isNoop()).thenReturn(true);
TracingInterceptor.parseRouteAddress(chain, span);
verify(span).isNoop();
verifyNoMoreInteractions(span);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.