focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public boolean hasAnyMethodHandlerAnnotation() { return !operationsWithHandlerAnnotation.isEmpty(); }
@Test public void testHandlerClass() { BeanInfo info = new BeanInfo(context, MyClass.class); assertTrue(info.hasAnyMethodHandlerAnnotation()); }
String getDownloadURL(double lat, double lon) { int lonInt = getMinLonForTile(lon); int latInt = getMinLatForTile(lat); String north = getNorthString(latInt); String dir; if (north.equals("N")) { dir = "North/"; if (lat >= 30) dir += "North_30_60/"; else dir += "North_0_29/"; } else { dir = "South/"; } return dir + north + getPaddedLatString(latInt) + getEastString(lonInt) + getPaddedLonString(lonInt) + ".hgt"; }
@Test public void testGetDownloadUrl() { // Created a couple of random tests and compared to https://topotools.cr.usgs.gov/gmted_viewer/viewer.htm assertEquals("North/North_30_60/N42E011.hgt", instance.getDownloadURL(42.940339, 11.953125)); assertEquals("North/North_30_60/N38W078.hgt", instance.getDownloadURL(38.548165, -77.167969)); assertEquals("North/North_0_29/N14W005.hgt", instance.getDownloadURL(14.116047, -4.277344)); assertEquals("South/S52W058.hgt", instance.getDownloadURL(-51.015725, -57.621094)); assertEquals("North/North_0_29/N24E120.hgt", instance.getDownloadURL(24.590108, 120.640625)); assertEquals("South/S42W063.hgt", instance.getDownloadURL(-41.015725, -62.949219)); }
private PythonMap( PythonCallableSource pythonFunction, Coder<?> outputCoder, String pythonTransform) { this.pythonFunction = pythonFunction; this.outputCoder = outputCoder; this.pythonTransform = pythonTransform; this.extraPackages = new ArrayList<>(); }
@Test @Category({ValidatesRunner.class, UsesPythonExpansionService.class}) public void testPythonMap() { PCollection<String> output = testPipeline .apply("CreateData", Create.of(ImmutableList.of("a", "b", "c", "d"))) .apply( "ApplyPythonMap", PythonMap.<String, String>viaMapFn("lambda x:3*x", StringUtf8Coder.of()) .withExpansionService(expansionAddr)); PAssert.that(output).containsInAnyOrder("aaa", "bbb", "ccc", "ddd"); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final List<Header> headers = new ArrayList<Header>(this.headers()); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } headers.add(new BasicHeader(HttpHeaders.RANGE, header)); // Disable compression headers.add(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "identity")); } try { final HttpRequestBase request = this.toRequest(file, status); for(Header header : headers) { request.addHeader(header); } final HttpResponse response = session.getClient().execute(request); final VoidResponseHandler handler = new VoidResponseHandler(); try { handler.handleResponse(response); // Will abort the read when closed before EOF. final ContentLengthStatusInputStream stream = new ContentLengthStatusInputStream(new HttpMethodReleaseInputStream(response, status), response.getEntity().getContentLength(), response.getStatusLine().getStatusCode()); if(status.isAppend()) { if(stream.getCode() == HttpStatus.SC_OK) { if(TransferStatus.UNKNOWN_LENGTH != status.getLength()) { if(stream.getLength() != status.getLength()) { log.warn(String.format("Range header not supported. Skipping %d bytes in file %s.", status.getOffset(), file)); stream.skip(status.getOffset()); } } } } return stream; } catch(IOException ex) { request.abort(); throw ex; } } catch(SardineException e) { throw new DAVExceptionMappingService().map("Download {0} failed", e, file); } catch(IOException e) { throw new HttpExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadCloseReleaseEntity() throws Exception { final Path test = new DAVTouchFeature(session).touch(new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final TransferStatus status = new TransferStatus(); final CountingInputStream in = new CountingInputStream(new DAVReadFeature(session).read(test, status, new DisabledConnectionCallback())); in.close(); assertEquals(0L, in.getByteCount(), 0L); new DAVDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
synchronized boolean processUpdate(FunctionMetaData updateRequestFs) throws IllegalArgumentException { log.debug("Process update request: {}", updateRequestFs); boolean needsScheduling = false; // Worker doesn't know about the function so far if (!this.containsFunctionMetaData(updateRequestFs)) { // Since this is the first time worker has seen function, just put it into internal function metadata store setFunctionMetaData(updateRequestFs); needsScheduling = true; } else { // The request is an update to an existing function since this worker already has a record of this function // in its function metadata store // Check if request is outdated if (!isRequestOutdated(updateRequestFs)) { // update the function metadata setFunctionMetaData(updateRequestFs); needsScheduling = true; } else { throw new IllegalArgumentException("Update request " + "ignored because it is out of date. Please try again."); } } return needsScheduling; }
@Test public void processUpdateTest() throws PulsarClientException { SchedulerManager schedulerManager = mock(SchedulerManager.class); WorkerConfig workerConfig = new WorkerConfig(); workerConfig.setWorkerId("worker-1"); FunctionMetaDataManager functionMetaDataManager = spy( new FunctionMetaDataManager(workerConfig, schedulerManager, mockPulsarClient(), ErrorNotifier.getDefaultImpl())); Function.FunctionMetaData m1 = Function.FunctionMetaData.newBuilder() .setVersion(1) .setFunctionDetails(Function.FunctionDetails.newBuilder().setName("func-1") .setNamespace("namespace-1").setTenant("tenant-1")).build(); Assert.assertTrue(functionMetaDataManager.processUpdate(m1)); verify(functionMetaDataManager, times(1)) .setFunctionMetaData(any(Function.FunctionMetaData.class)); verify(schedulerManager, times(0)).schedule(); Assert.assertEquals(m1, functionMetaDataManager.functionMetaDataMap.get( "tenant-1").get("namespace-1").get("func-1")); Assert.assertEquals(1, functionMetaDataManager.functionMetaDataMap.get( "tenant-1").get("namespace-1").size()); // outdated request try { functionMetaDataManager.processUpdate(m1); Assert.assertTrue(false); } catch (IllegalArgumentException e) { Assert.assertEquals(e.getMessage(), "Update request ignored because it is out of date. Please try again."); } verify(functionMetaDataManager, times(1)) .setFunctionMetaData(any(Function.FunctionMetaData.class)); verify(schedulerManager, times(0)).schedule(); Assert.assertEquals(m1, functionMetaDataManager.functionMetaDataMap.get( "tenant-1").get("namespace-1").get("func-1")); Assert.assertEquals(1, functionMetaDataManager.functionMetaDataMap.get( "tenant-1").get("namespace-1").size()); // udpate with new version m1 = m1.toBuilder().setVersion(2).build(); Assert.assertTrue(functionMetaDataManager.processUpdate(m1)); verify(functionMetaDataManager, times(2)) .setFunctionMetaData(any(Function.FunctionMetaData.class)); verify(schedulerManager, times(0)).schedule(); Assert.assertEquals(m1, functionMetaDataManager.functionMetaDataMap.get( "tenant-1").get("namespace-1").get("func-1")); Assert.assertEquals(1, functionMetaDataManager.functionMetaDataMap.get( "tenant-1").get("namespace-1").size()); }
public BigMatrix submatrix(int i, int j, int k, int l) { if (i < 0 || i >= m || k < i || k >= m || j < 0 || j >= n || l < j || l >= n) { throw new IllegalArgumentException(String.format("Invalid submatrix range (%d:%d, %d:%d) of %d x %d", i, k, j, l, m, n)); } long offset = index(i, j); long length = index(k, l) - offset + 1; DoublePointer B = A.getPointer(offset).limit(length); if (layout() == COL_MAJOR) { return new BigMatrix(k - i + 1, l - j + 1, ld, B); } else { return new RowMajor(k - i + 1, l - j + 1, ld, B); } }
@Test public void testSubmatrix() { BigMatrix sub = matrix.submatrix(0, 1, 2, 2); System.out.println(matrix); System.out.println(sub); assertEquals(3, sub.nrow()); assertEquals(2, sub.ncol()); assertEquals(0.4, sub.get(0,0), 1E-7); assertEquals(0.8, sub.get(2,1), 1E-7); BigMatrix sub2 = sub.submatrix(0, 0, 1, 1); assertEquals(2, sub2.nrow()); assertEquals(2, sub2.ncol()); assertEquals(0.4, sub.get(0,0), 1E-7); assertEquals(0.3, sub.get(1,1), 1E-7); }
public static boolean bindRemotePort(Session session, int bindPort, String host, int port) throws JschRuntimeException { if (session != null && session.isConnected()) { try { session.setPortForwardingR(bindPort, host, port); } catch (JSchException e) { throw new JschRuntimeException(e, "From [{}] mapping to [{}] error!", bindPort, port); } return true; } return false; }
@Test @Disabled public void bindRemotePort() throws InterruptedException { // 建立会话 Session session = JschUtil.getSession("looly.centos", 22, "test", "123456"); // 绑定ssh服务端8089端口到本机的8000端口上 boolean b = JschUtil.bindRemotePort(session, 8089, "localhost", 8000); assertTrue(b); // 保证一直运行 // while (true){ // Thread.sleep(3000); // } }
private Optional<BindingTableRule> findBindingTableRule(final Collection<String> logicTableNames) { for (String each : logicTableNames) { Optional<BindingTableRule> result = findBindingTableRule(each); if (result.isPresent()) { return result; } } return Optional.empty(); }
@Test void assertGetBindingTableRuleForFound() { ShardingRule actual = createMaximumShardingRule(); assertTrue(actual.findBindingTableRule("logic_Table").isPresent()); assertThat(actual.findBindingTableRule("logic_Table").get().getShardingTables().size(), is(2)); }
public static boolean fullyDeleteContents(final File dir) { return fullyDeleteContents(dir, false); }
@Test (timeout = 30000) public void testFullyDeleteContents() throws IOException { boolean ret = FileUtil.fullyDeleteContents(del); Assert.assertTrue(ret); Verify.exists(del); Assert.assertEquals(0, Objects.requireNonNull(del.listFiles()).length); validateTmpDir(); }
@Override public String named() { return PluginEnum.SPRING_CLOUD.getName(); }
@Test public void named() { final String result = springCloudPlugin.named(); assertEquals(PluginEnum.SPRING_CLOUD.getName(), result); }
public List<CredentialRetriever> asList() throws FileNotFoundException { List<CredentialRetriever> credentialRetrievers = new ArrayList<>(); if (knownCredentialRetriever != null) { credentialRetrievers.add(knownCredentialRetriever); } if (credentialHelper != null) { // If credential helper contains file separator, treat as path; otherwise treat as suffix if (credentialHelper.contains(FileSystems.getDefault().getSeparator())) { if (!Files.exists(Paths.get(credentialHelper))) { String osName = systemProperties.getProperty("os.name").toLowerCase(Locale.ENGLISH); if (!osName.contains("windows") || (!Files.exists(Paths.get(credentialHelper + ".cmd")) && !Files.exists(Paths.get(credentialHelper + ".exe")))) { throw new FileNotFoundException( "Specified credential helper was not found: " + credentialHelper); } } credentialRetrievers.add( credentialRetrieverFactory.dockerCredentialHelper(credentialHelper)); } else { String suffix = credentialHelper; // not path; treat as suffix credentialRetrievers.add( credentialRetrieverFactory.dockerCredentialHelper("docker-credential-" + suffix)); } } if (inferredCredentialRetriever != null) { credentialRetrievers.add(inferredCredentialRetriever); } Set<Path> dockerConfigFiles = new LinkedHashSet<>(); String xdgRuntime = environment.get("XDG_RUNTIME_DIR"); if (xdgRuntime != null) { dockerConfigFiles.add(Paths.get(xdgRuntime).resolve(XDG_AUTH_FILE)); } String xdgConfigHome = environment.get("XDG_CONFIG_HOME"); if (xdgConfigHome != null) { dockerConfigFiles.add(Paths.get(xdgConfigHome).resolve(XDG_AUTH_FILE)); } String homeProperty = systemProperties.getProperty("user.home"); if (homeProperty != null) { dockerConfigFiles.add(Paths.get(homeProperty).resolve(".config").resolve(XDG_AUTH_FILE)); } String homeEnvVar = environment.get("HOME"); if (homeEnvVar != null) { dockerConfigFiles.add(Paths.get(homeEnvVar).resolve(".config").resolve(XDG_AUTH_FILE)); } String dockerConfigEnv = environment.get("DOCKER_CONFIG"); if (dockerConfigEnv != null) { dockerConfigFiles.addAll(getDockerFiles(Paths.get(dockerConfigEnv))); } if (homeProperty != null) { dockerConfigFiles.addAll(getDockerFiles(Paths.get(homeProperty).resolve(".docker"))); } if (homeEnvVar != null) { dockerConfigFiles.addAll(getDockerFiles(Paths.get(homeEnvVar).resolve(".docker"))); } dockerConfigFiles.stream() .map( path -> path.endsWith(LEGACY_DOCKER_CONFIG_FILE) ? credentialRetrieverFactory.legacyDockerConfig(path) : credentialRetrieverFactory.dockerConfig(path)) .forEach(credentialRetrievers::add); credentialRetrievers.add(credentialRetrieverFactory.wellKnownCredentialHelpers()); credentialRetrievers.add(credentialRetrieverFactory.googleApplicationDefaultCredentials()); return credentialRetrievers; }
@Test public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException { List<CredentialRetriever> retrievers = new DefaultCredentialRetrievers( mockCredentialRetrieverFactory, new Properties(), new HashMap<>()) .asList(); assertThat(retrievers) .containsExactly( mockWellKnownCredentialHelpersCredentialRetriever, mockApplicationDefaultCredentialRetriever) .inOrder(); }
public static Thread daemonThread(Runnable r, Class<?> context, String description) { return daemonThread(r, "hollow", context, description); }
@Test public void nullPlatform() { try { daemonThread(() -> {}, null, getClass(), "boom"); fail("expected an exception"); } catch (NullPointerException e) { assertEquals("platform required", e.getMessage()); } }
public Flowable<String> getKeys() { return getKeysByPattern(null); }
@Test public void testUnlinkByPattern() { RBucketRx<String> bucket = redisson.getBucket("test1"); sync(bucket.set("someValue")); RMapRx<String, String> map = redisson.getMap("test2"); sync(map.fastPut("1", "2")); Assertions.assertEquals(2, sync(redisson.getKeys().unlinkByPattern("test?")).intValue()); }
private synchronized boolean validateClientAcknowledgement(long h) { if (h < 0) { throw new IllegalArgumentException("Argument 'h' cannot be negative, but was: " + h); } if (h > MASK) { throw new IllegalArgumentException("Argument 'h' cannot be larger than 2^32 -1, but was: " + h); } final long oldH = clientProcessedStanzas.get(); final Long lastUnackedX = unacknowledgedServerStanzas.isEmpty() ? null : unacknowledgedServerStanzas.getLast().x; return validateClientAcknowledgement(h, oldH, lastUnackedX); }
@Test public void testValidateClientAcknowledgement_rollover_edgecase_sent() throws Exception { // Setup test fixture. final long MAX = new BigInteger( "2" ).pow( 32 ).longValue() - 1; final long h = MAX; final long oldH = MAX - 1; final Long lastUnackedX = MAX; // Execute system under test. final boolean result = StreamManager.validateClientAcknowledgement(h, oldH, lastUnackedX); // Verify results. assertTrue(result); }
public boolean isEnabled() { return enabled; }
@Test public void isEnabled() { assertTrue(new MapStoreConfig().isEnabled()); }
public static ParameterTool paramsFromGenericOptionsParser(String[] args) throws IOException { Option[] options = new GenericOptionsParser(args).getCommandLine().getOptions(); Map<String, String> map = new HashMap<String, String>(); for (Option option : options) { String[] split = option.getValue().split("="); map.put(split[0], split[1]); } return ParameterTool.fromMap(map); }
@Test void testParamsFromGenericOptionsParser() throws IOException { ParameterTool parameter = HadoopUtils.paramsFromGenericOptionsParser( new String[] {"-D", "input=myInput", "-DexpectedCount=15"}); validate(parameter); }
@Override public void doFilter(IConfigRequest request, IConfigResponse response, IConfigFilterChain filterChain) throws NacosException { if (Objects.nonNull(request) && request instanceof ConfigRequest && Objects.isNull(response)) { // Publish configuration, encrypt ConfigRequest configRequest = (ConfigRequest) request; String dataId = configRequest.getDataId(); String content = configRequest.getContent(); Pair<String, String> pair = EncryptionHandler.encryptHandler(dataId, content); String secretKey = pair.getFirst(); String encryptContent = pair.getSecond(); if (!StringUtils.isBlank(encryptContent) && !encryptContent.equals(content)) { ((ConfigRequest) request).setContent(encryptContent); } if (!StringUtils.isBlank(secretKey) && !secretKey.equals(((ConfigRequest) request).getEncryptedDataKey())) { ((ConfigRequest) request).setEncryptedDataKey(secretKey); } else if (StringUtils.isBlank(((ConfigRequest) request).getEncryptedDataKey()) && StringUtils.isBlank(secretKey)) { ((ConfigRequest) request).setEncryptedDataKey(""); } } if (Objects.nonNull(response) && response instanceof ConfigResponse && Objects.isNull(request)) { // Get configuration, decrypt ConfigResponse configResponse = (ConfigResponse) response; String dataId = configResponse.getDataId(); String encryptedDataKey = configResponse.getEncryptedDataKey(); String content = configResponse.getContent(); Pair<String, String> pair = EncryptionHandler.decryptHandler(dataId, encryptedDataKey, content); String secretKey = pair.getFirst(); String decryptContent = pair.getSecond(); if (!StringUtils.isBlank(decryptContent) && !decryptContent.equals(content)) { ((ConfigResponse) response).setContent(decryptContent); } if (!StringUtils.isBlank(secretKey) && !secretKey.equals(((ConfigResponse) response).getEncryptedDataKey())) { ((ConfigResponse) response).setEncryptedDataKey(secretKey); } else if (StringUtils.isBlank(((ConfigResponse) response).getEncryptedDataKey()) && StringUtils.isBlank(secretKey)) { ((ConfigResponse) response).setEncryptedDataKey(""); } } filterChain.doFilter(request, response); }
@Test void doFilter() throws NacosException { Mockito.when(configRequest.getDataId()).thenReturn("cipher-aes-test"); Mockito.when(configRequest.getContent()).thenReturn("nacos"); configEncryptionFilter.doFilter(configRequest, null, iConfigFilterChain); Mockito.verify(configRequest, Mockito.atLeast(1)).getDataId(); Mockito.verify(configRequest, Mockito.atLeast(1)).getContent(); Mockito.when(configResponse.getDataId()).thenReturn("test-dataid"); Mockito.when(configResponse.getContent()).thenReturn("nacos"); Mockito.when(configResponse.getEncryptedDataKey()).thenReturn("1234567890"); configEncryptionFilter.doFilter(null, configResponse, iConfigFilterChain); Mockito.verify(configResponse, Mockito.atLeast(1)).getDataId(); Mockito.verify(configResponse, Mockito.atLeast(1)).getContent(); Mockito.verify(configResponse, Mockito.atLeast(1)).getEncryptedDataKey(); }
public static long parseLongAscii(final CharSequence cs, final int index, final int length) { if (length <= 0) { throw new AsciiNumberFormatException("empty string: index=" + index + " length=" + length); } final boolean negative = MINUS_SIGN == cs.charAt(index); int i = index; if (negative) { i++; if (1 == length) { throwParseLongError(cs, index, length); } } final int end = index + length; if (end - i < LONG_MAX_DIGITS) { final long tally = parsePositiveLongAscii(cs, index, length, i, end); return negative ? -tally : tally; } else if (negative) { return -parseLongAsciiOverflowCheck(cs, index, length, LONG_MIN_VALUE_DIGITS, i, end); } else { return parseLongAsciiOverflowCheck(cs, index, length, LONG_MAX_VALUE_DIGITS, i, end); } }
@Test void parseLongAsciiRoundTrip() { final String prefix = "long to test"; final StringBuilder buffer = new StringBuilder(64); buffer.append(prefix); for (int i = 0; i < ITERATIONS; i++) { final long value = ThreadLocalRandom.current().nextLong(); buffer.append(value); final long parsedValue = parseLongAscii(buffer, prefix.length(), buffer.length() - prefix.length()); assertEquals(parsedValue, value); buffer.delete(prefix.length(), 64); } }
public Optional<GroupDto> findGroup(DbSession dbSession, String groupName) { return dbClient.groupDao().selectByName(dbSession, groupName); }
@Test public void findGroup_whenGroupExists_returnsIt() { GroupDto groupDto = mockGroupDto(); when(dbClient.groupDao().selectByName(dbSession, GROUP_NAME)) .thenReturn(Optional.of(groupDto)); assertThat(groupService.findGroup(dbSession, GROUP_NAME)).contains(groupDto); }
@Override protected String buildHandle(final List<URIRegisterDTO> uriList, final SelectorDO selectorDO) { List<DivideUpstream> addList = buildDivideUpstreamList(uriList); List<DivideUpstream> canAddList = new CopyOnWriteArrayList<>(); boolean isEventDeleted = uriList.size() == 1 && EventType.DELETED.equals(uriList.get(0).getEventType()); if (isEventDeleted) { addList.get(0).setStatus(false); } List<DivideUpstream> existList = GsonUtils.getInstance().fromCurrentList(selectorDO.getHandle(), DivideUpstream.class); if (CollectionUtils.isEmpty(existList)) { canAddList = addList; } else { List<DivideUpstream> diffList = addList.stream().filter(upstream -> !existList.contains(upstream)).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(diffList)) { canAddList.addAll(diffList); existList.addAll(diffList); } List<DivideUpstream> diffStatusList = addList.stream().filter(upstream -> !upstream.isStatus() || existList.stream().anyMatch(e -> e.equals(upstream) && e.isStatus() != upstream.isStatus())).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(diffStatusList)) { canAddList.addAll(diffStatusList); } } if (doSubmit(selectorDO.getId(), canAddList)) { return null; } return GsonUtils.getInstance().toJson(CollectionUtils.isEmpty(existList) ? canAddList : existList); }
@Test public void testBuildHandle() { shenyuClientRegisterDivideService = spy(shenyuClientRegisterDivideService); final String returnStr = "[{protocol:'http://',upstreamHost:'localhost',upstreamUrl:'localhost:8090',warmup:10,weight:50,status:true,timestamp:1637826588267}," + "{protocol:'http://',upstreamHost:'localhost',upstreamUrl:'localhost:8091',warmup:10,weight:50,status:true,timestamp:1637826588267}]"; final String expected = "[{\"weight\":50,\"warmup\":10,\"protocol\":\"http://\",\"upstreamHost\":\"localhost\",\"upstreamUrl\":\"localhost:8090\",\"status\":true,\"timestamp\":1637826588267}," + "{\"weight\":50,\"warmup\":10,\"protocol\":\"http://\",\"upstreamHost\":\"localhost\",\"upstreamUrl\":\"localhost:8091\",\"status\":true,\"timestamp\":1637826588267}]"; List<URIRegisterDTO> list = new ArrayList<>(); list.add(URIRegisterDTO.builder().protocol("http://").appName("test1").rpcType(RpcTypeEnum.HTTP.getName()).host(LOCALHOST).port(8090).build()); SelectorDO selectorDO = mock(SelectorDO.class); when(selectorDO.getHandle()).thenReturn(returnStr); doReturn(false).when(shenyuClientRegisterDivideService).doSubmit(any(), any()); String actual = shenyuClientRegisterDivideService.buildHandle(list, selectorDO); assertEquals(expected, actual); List<TarsUpstream> resultList = GsonUtils.getInstance().fromCurrentList(actual, TarsUpstream.class); assertEquals(resultList.size(), 2); list.clear(); list.add(URIRegisterDTO.builder().appName("test1").rpcType(RpcTypeEnum.HTTP.getName()).host(LOCALHOST).port(8092).build()); selectorDO = mock(SelectorDO.class); when(selectorDO.getHandle()).thenReturn(returnStr); doReturn(false).when(shenyuClientRegisterDivideService).doSubmit(any(), any()); actual = shenyuClientRegisterDivideService.buildHandle(list, selectorDO); resultList = GsonUtils.getInstance().fromCurrentList(actual, TarsUpstream.class); assertEquals(resultList.size(), 3); list.clear(); list.add(URIRegisterDTO.builder().appName("test1").rpcType(RpcTypeEnum.HTTP.getName()).host(LOCALHOST).port(8090).build()); doReturn(false).when(shenyuClientRegisterDivideService).doSubmit(any(), any()); selectorDO = mock(SelectorDO.class); actual = shenyuClientRegisterDivideService.buildHandle(list, selectorDO); resultList = GsonUtils.getInstance().fromCurrentList(actual, TarsUpstream.class); assertEquals(resultList.size(), 1); }
@Override public AllocateResponse allocate(AllocateRequest allocateRequest) throws YarnException, IOException { AllocateResponse allocateResponse = null; long startTime = System.currentTimeMillis(); synchronized (this) { if(this.shutdown){ throw new YarnException("Allocate called after AMRMClientRelayer for " + "RM " + rmId + " shutdown."); } addNewAllocateRequest(allocateRequest); ArrayList<ResourceRequest> askList = new ArrayList<>(ask.size()); for (ResourceRequest r : ask) { // create a copy of ResourceRequest as we might change it while the // RPC layer is using it to send info across askList.add(ResourceRequest.clone(r)); } allocateRequest = AllocateRequest.newBuilder() .responseId(allocateRequest.getResponseId()) .progress(allocateRequest.getProgress()).askList(askList) .releaseList(new ArrayList<>(this.release)) .resourceBlacklistRequest(ResourceBlacklistRequest.newInstance( new ArrayList<>(this.blacklistAdditions), new ArrayList<>(this.blacklistRemovals))) .updateRequests(new ArrayList<>(this.change.values())) .schedulingRequests(new ArrayList<>(this.schedulingRequest)) .build(); if (this.resetResponseId != -1) { LOG.info("Override allocate responseId from " + allocateRequest.getResponseId() + " to " + this.resetResponseId + " for " + this.appId); allocateRequest.setResponseId(this.resetResponseId); } } // Do the actual allocate call try { allocateResponse = this.rmClient.allocate(allocateRequest); // Heartbeat succeeded, wipe out responseId overriding this.resetResponseId = -1; } catch (ApplicationMasterNotRegisteredException e) { // This is a retriable exception - we will re register and mke a // recursive call to retry LOG.warn("ApplicationMaster is out of sync with RM " + rmId + " for " + this.appId + ", hence resyncing."); this.metrics.incrRMMasterSlaveSwitch(this.rmId); synchronized (this) { // Add all remotePending data into to-send data structures for (ResourceRequestSet requestSet : this.remotePendingAsks .values()) { for (ResourceRequest rr : requestSet.getRRs()) { addResourceRequestToAsk(rr); } } this.release.addAll(this.remotePendingRelease); this.blacklistAdditions.addAll(this.remoteBlacklistedNodes); this.change.putAll(this.remotePendingChange); for (List<SchedulingRequest> reqs : this.remotePendingSchedRequest .values()) { this.schedulingRequest.addAll(reqs); } } // re-register with RM, then retry allocate recursively reRegisterApplicationMaster(this.amRegistrationRequest); // Reset responseId after re-register allocateRequest.setResponseId(0); allocateResponse = allocate(allocateRequest); return allocateResponse; } catch (Throwable t) { // Unexpected exception - rethrow and increment heart beat failure metric this.metrics.addHeartbeatFailure(this.rmId, System.currentTimeMillis() - startTime); // If RM is complaining about responseId out of sync, force reset next // time if (t instanceof InvalidApplicationMasterRequestException) { int responseId = AMRMClientUtils .parseExpectedResponseIdFromException(t.getMessage()); if (responseId != -1) { this.resetResponseId = responseId; LOG.info("ResponseId out of sync with RM, expect " + responseId + " but " + allocateRequest.getResponseId() + " used by " + this.appId + ". Will override in the next allocate."); } else { LOG.warn("Failed to parse expected responseId out of exception for " + this.appId); } } throw t; } synchronized (this) { if (this.shutdown) { throw new YarnException("Allocate call succeeded for " + this.appId + " after AMRMClientRelayer for RM " + rmId + " shutdown."); } updateMetrics(allocateResponse, startTime); AMRMClientUtils.removeFromOutstandingSchedulingRequests( allocateResponse.getAllocatedContainers(), this.remotePendingSchedRequest); AMRMClientUtils.removeFromOutstandingSchedulingRequests( allocateResponse.getContainersFromPreviousAttempts(), this.remotePendingSchedRequest); this.ask.clear(); this.release.clear(); this.blacklistAdditions.clear(); this.blacklistRemovals.clear(); this.change.clear(); this.schedulingRequest.clear(); return allocateResponse; } }
@Test public void testResendRequestsOnRMRestart() throws YarnException, IOException { ContainerId c1 = createContainerId(1); ContainerId c2 = createContainerId(2); ContainerId c3 = createContainerId(3); // Ask for two containers, one with location preference this.asks.add(createResourceRequest(0, "node1", 2048, 1, 1, ExecutionType.GUARANTEED, 1)); this.asks.add(createResourceRequest(0, "rack", 2048, 1, 1, ExecutionType.GUARANTEED, 1)); this.asks.add(createResourceRequest(0, ResourceRequest.ANY, 2048, 1, 1, ExecutionType.GUARANTEED, 2)); this.releases.add(c1); this.blacklistAdditions.add("node1"); this.blacklistRemoval.add("node0"); // 1. a fully loaded request this.relayer.allocate(getAllocateRequest()); assertAsksAndReleases(3, 1); assertBlacklistAdditionsAndRemovals(1, 1); clearAllocateRequestLists(); // 2. empty request this.relayer.allocate(getAllocateRequest()); assertAsksAndReleases(0, 0); assertBlacklistAdditionsAndRemovals(0, 0); clearAllocateRequestLists(); // Set RM restart and failover flag this.mockAMS.setFailoverFlag(); // More requests this.blacklistAdditions.add("node2"); this.releases.add(c2); this.relayer.allocate(getAllocateRequest()); // verify pending requests are fully re-sent assertAsksAndReleases(3, 2); assertBlacklistAdditionsAndRemovals(2, 0); clearAllocateRequestLists(); }
public static Builder custom() { return new Builder(); }
@Test public void buildTimeoutDurationIsNotWithinLimits() { exception.expect(ThrowableCauseMatcher.hasCause(isA(ArithmeticException.class))); exception.expectMessage("TimeoutDuration too large"); RateLimiterConfig.custom() .timeoutDuration(Duration.ofSeconds(Long.MAX_VALUE)); }
@Override public List<ValidationMessage> validate(ValidationContext context) { return context.query().tokens().stream() .filter(this::isInvalidOperator) .map(token -> { final String errorMessage = String.format(Locale.ROOT, "Query contains invalid operator \"%s\". All AND / OR / NOT operators have to be written uppercase", token.image()); return ValidationMessage.builder(ValidationStatus.WARNING, ValidationType.INVALID_OPERATOR) .errorMessage(errorMessage) .relatedProperty(token.image()) .position(QueryPosition.from(token)) .build(); }).collect(Collectors.toList()); }
@Test void testLongStringOfInvalidTokens() { final ValidationContext context = TestValidationContext.create("and and and or or or") .build(); final List<ValidationMessage> messages = sut.validate(context); assertThat(messages.size()).isEqualTo(6); assertThat(messages.stream().allMatch(v -> v.validationType() == ValidationType.INVALID_OPERATOR)).isTrue(); Assertions.assertThat(messages) .extracting(v -> v.relatedProperty().orElse("invalid-property")) .containsOnly("and", "or"); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testWatermarkAddBeforeReadEndOfWindow() throws Exception { StateTag<WatermarkHoldState> addr = StateTags.watermarkStateInternal("watermark", TimestampCombiner.END_OF_WINDOW); WatermarkHoldState bag = underTest.state(NAMESPACE, addr); SettableFuture<Instant> future = SettableFuture.create(); when(mockReader.watermarkFuture(key(NAMESPACE, "watermark"), STATE_FAMILY)).thenReturn(future); // Requests a future once bag.readLater(); bag.add(new Instant(3000)); waitAndSet(future, new Instant(3000), 200); // read() requests a future again, receiving the same one assertThat(bag.read(), Matchers.equalTo(new Instant(3000))); Mockito.verify(mockReader, times(2)).watermarkFuture(key(NAMESPACE, "watermark"), STATE_FAMILY); Mockito.verifyNoMoreInteractions(mockReader); // Adding another value doesn't create another future, but does update the result. bag.add(new Instant(3000)); assertThat(bag.read(), Matchers.equalTo(new Instant(3000))); Mockito.verifyNoMoreInteractions(mockReader); }
public SearchQuery parse(String encodedQueryString) { if (Strings.isNullOrEmpty(encodedQueryString) || "*".equals(encodedQueryString)) { return new SearchQuery(encodedQueryString); } final var queryString = URLDecoder.decode(encodedQueryString, StandardCharsets.UTF_8); final Matcher matcher = querySplitterMatcher(requireNonNull(queryString).trim()); final ImmutableMultimap.Builder<String, FieldValue> builder = ImmutableMultimap.builder(); final ImmutableSet.Builder<String> disallowedKeys = ImmutableSet.builder(); while (matcher.find()) { final String entry = matcher.group(); if (!entry.contains(":")) { builder.put(withPrefixIfNeeded(defaultField), createFieldValue(defaultFieldKey.getFieldType(), entry, false)); continue; } final Iterator<String> entryFields = FIELD_VALUE_SPLITTER.splitToList(entry).iterator(); checkArgument(entryFields.hasNext(), INVALID_ENTRY_MESSAGE, entry); final String key = entryFields.next(); // Skip if there are no valid k/v pairs. (i.e. "action:") if (!entryFields.hasNext()) { continue; } final boolean negate = key.startsWith("-"); final String cleanKey = key.replaceFirst("^-", ""); final String value = entryFields.next(); VALUE_SPLITTER.splitToList(value).forEach(v -> { if (!dbFieldMapping.containsKey(cleanKey)) { disallowedKeys.add(cleanKey); } final SearchQueryField translatedKey = dbFieldMapping.get(cleanKey); if (translatedKey != null) { builder.put(withPrefixIfNeeded(translatedKey.getDbField()), createFieldValue(translatedKey.getFieldType(), v, negate)); } else { builder.put(withPrefixIfNeeded(defaultField), createFieldValue(defaultFieldKey.getFieldType(), v, negate)); } }); checkArgument(!entryFields.hasNext(), INVALID_ENTRY_MESSAGE, entry); } return new SearchQuery(queryString, builder.build(), disallowedKeys.build()); }
@Test void explicitAllowedField() { SearchQueryParser parser = new SearchQueryParser("defaultfield", ImmutableSet.of("name", "id")); final SearchQuery query = parser.parse("name:foo"); final Multimap<String, SearchQueryParser.FieldValue> queryMap = query.getQueryMap(); assertThat(queryMap.size()).isEqualTo(1); assertThat(queryMap.get("name")).containsOnly(new SearchQueryParser.FieldValue("foo", false)); assertThat(query.hasDisallowedKeys()).isFalse(); assertThat(query.getDisallowedKeys()).isEmpty(); final DBQuery.Query dbQuery = query.toDBQuery(); final Collection<String> fieldNamesUsed = extractFieldNames(dbQuery.conditions()); assertThat(fieldNamesUsed).containsExactly("name"); }
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { String group = commandLine.getOptionValue("g").trim(); String topic = commandLine.getOptionValue("t").trim(); String originClientId = ""; if (commandLine.hasOption("i")) { originClientId = commandLine.getOptionValue("i").trim(); } if (commandLine.hasOption('n')) { defaultMQAdminExt.setNamesrvAddr(commandLine.getOptionValue('n').trim()); } defaultMQAdminExt.start(); Map<String, Map<MessageQueue, Long>> consumerStatusTable = defaultMQAdminExt.getConsumeStatus(topic, group, originClientId); System.out.printf("get consumer status from client. group=%s, topic=%s, originClientId=%s%n", group, topic, originClientId); System.out.printf("%-50s %-15s %-15s %-20s%n", "#clientId", "#brokerName", "#queueId", "#offset"); for (Map.Entry<String, Map<MessageQueue, Long>> entry : consumerStatusTable.entrySet()) { String clientId = entry.getKey(); Map<MessageQueue, Long> mqTable = entry.getValue(); for (Map.Entry<MessageQueue, Long> entry1 : mqTable.entrySet()) { MessageQueue mq = entry1.getKey(); System.out.printf("%-50s %-15s %-15d %-20d%n", UtilAll.frontStringAtLeast(clientId, 50), mq.getBrokerName(), mq.getQueueId(), mqTable.get(mq)); } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { GetConsumerStatusCommand cmd = new GetConsumerStatusCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-g default-group", "-t unit-test", "-i clientid", String.format("-n localhost:%d", nameServerMocker.listenPort())}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
public String join(final Stream<?> parts) { return join(parts.iterator()); }
@Test public void shouldHandleNulls() { assertThat(joiner.join(Arrays.asList("a", null, "c")), is("a, null or c")); }
public static long findAndVerifyWindowGrace(final GraphNode graphNode) { return findAndVerifyWindowGrace(graphNode, ""); }
@Test public void shouldThrowOnNull() { try { GraphGraceSearchUtil.findAndVerifyWindowGrace(null); fail("Should have thrown."); } catch (final TopologyException e) { assertThat(e.getMessage(), is("Invalid topology: Window close time is only defined for windowed computations. Got [].")); } }
public BundleProcessor getProcessor( BeamFnApi.ProcessBundleDescriptor descriptor, List<RemoteInputDestination> remoteInputDesinations) { checkState( !descriptor.hasStateApiServiceDescriptor(), "The %s cannot support a %s containing a state %s.", BundleProcessor.class.getSimpleName(), BeamFnApi.ProcessBundleDescriptor.class.getSimpleName(), Endpoints.ApiServiceDescriptor.class.getSimpleName()); return getProcessor(descriptor, remoteInputDesinations, NoOpStateDelegator.INSTANCE); }
@Test public void testRegisterWithStateRequiresStateDelegator() throws Exception { ProcessBundleDescriptor descriptor = ProcessBundleDescriptor.newBuilder() .setId("test") .setStateApiServiceDescriptor(ApiServiceDescriptor.newBuilder().setUrl("foo")) .build(); List<RemoteInputDestination> remoteInputs = Collections.singletonList( RemoteInputDestination.of( (FullWindowedValueCoder) FullWindowedValueCoder.of(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), SDK_GRPC_READ_TRANSFORM)); thrown.expect(IllegalStateException.class); thrown.expectMessage("containing a state"); sdkHarnessClient.getProcessor(descriptor, remoteInputs); }
public static List<String> resolveCompsDependency(Service service) { List<String> components = new ArrayList<String>(); for (Component component : service.getComponents()) { int depSize = component.getDependencies().size(); if (!components.contains(component.getName())) { components.add(component.getName()); } if (depSize != 0) { for (String depComp : component.getDependencies()) { if (!components.contains(depComp)) { components.add(0, depComp); } } } } return components; }
@Test public void testResolveCompsDependency() { Service service = createExampleApplication(); List<String> dependencies = new ArrayList<String>(); dependencies.add("compb"); Component compa = createComponent("compa"); compa.setDependencies(dependencies); Component compb = createComponent("compb"); service.addComponent(compa); service.addComponent(compb); List<String> order = ServiceApiUtil.resolveCompsDependency(service); List<String> expected = new ArrayList<String>(); expected.add("compb"); expected.add("compa"); for (int i = 0; i < expected.size(); i++) { Assert.assertEquals("Components are not equal.", expected.get(i), order.get(i)); } }
@Override public void properties(SAPropertiesFetcher saPropertiesFetcher) { // read super property JSONObject superProperties = mSensorsDataAPI.getSuperProperties(); // read dynamic property JSONObject dynamicProperty = mSensorsDataAPI.getDynamicProperty(); // merge super and dynamic property JSONObject removeDuplicateSuperProperties = JSONUtils.mergeSuperJSONObject(dynamicProperty, superProperties); // merge custom property and super property JSONUtils.mergeJSONObject(removeDuplicateSuperProperties, saPropertiesFetcher.getProperties()); }
@Test public void properties() { JSONObject superProperty = new JSONObject(); try { superProperty.put("superProperty", "superProperty"); } catch (JSONException e) { e.printStackTrace(); } SensorsDataAPI sensorsDataAPI = SAHelper.initSensors(mApplication); sensorsDataAPI.registerSuperProperties(superProperty); sensorsDataAPI.setTrackEventCallBack(new SensorsDataTrackEventCallBack() { @Override public boolean onTrackEvent(String eventName, JSONObject eventProperties) { if ("AppTest".equals(eventName)) { Assert.assertTrue(eventProperties.has("superProperty")); } return false; } }); sensorsDataAPI.track("AppTest"); }
@Override @SuppressWarnings("CallToSystemGC") public void execute(Map<String, List<String>> parameters, PrintWriter output) { final int count = parseRuns(parameters); for (int i = 0; i < count; i++) { output.println("Running GC..."); output.flush(); runtime.gc(); } output.println("Done!"); }
@Test void defaultsToOneRunIfTheQueryParamDoesNotParse() throws Exception { task.execute(Collections.singletonMap("runs", Collections.singletonList("$")), output); verify(runtime, times(1)).gc(); }
public static void copyInstances(Collection<InstanceInfo> instances, Applications result) { if (instances != null) { for (InstanceInfo instance : instances) { Application app = result.getRegisteredApplications(instance.getAppName()); if (app == null) { app = new Application(instance.getAppName()); result.addApplication(app); } app.addInstance(instance); } } }
@Test public void testCopyInstancesIfNotNullReturnCollectionOfInstanceInfo() { Application application = createSingleInstanceApp("foo", "foo", InstanceInfo.ActionType.ADDED); Assert.assertEquals(1, EurekaEntityFunctions.copyInstances( new ArrayList<>(Arrays.asList( application.getByInstanceId("foo"))), InstanceInfo.ActionType.ADDED).size()); }
@Override public YamlShardingRuleConfiguration swapToYamlConfiguration(final ShardingRuleConfiguration data) { YamlShardingRuleConfiguration result = new YamlShardingRuleConfiguration(); data.getTables().forEach(each -> result.getTables().put(each.getLogicTable(), tableSwapper.swapToYamlConfiguration(each))); data.getAutoTables().forEach(each -> result.getAutoTables().put(each.getLogicTable(), autoTableSwapper.swapToYamlConfiguration(each))); result.getBindingTables().addAll(data.getBindingTableGroups().stream().map(YamlShardingTableReferenceRuleConfigurationConverter::convertToYamlString).collect(Collectors.toList())); setYamlStrategies(data, result); setYamlAlgorithms(data, result); result.setDefaultShardingColumn(data.getDefaultShardingColumn()); if (null != data.getShardingCache()) { result.setShardingCache(shardingCacheSwapper.swapToYamlConfiguration(data.getShardingCache())); } return result; }
@Test void assertSwapToYamlConfiguration() { YamlShardingRuleConfiguration actual = getSwapper().swapToYamlConfiguration(createMaximumShardingRuleConfiguration()); assertThat(actual.getTables().size(), is(2)); assertThat(actual.getAutoTables().size(), is(1)); assertThat(actual.getBindingTables().size(), is(1)); assertYamlStrategies(actual); assertYamlAlgorithms(actual); assertThat(actual.getDefaultShardingColumn(), is("table_id")); assertThat(actual.getShardingCache().getAllowedMaxSqlLength(), is(100)); }
@Override public Xid[] recover(final int flags) throws XAException { try { return delegate.recover(flags); } catch (final XAException ex) { throw mapXAException(ex); } }
@Test void assertRecover() throws XAException { singleXAResource.recover(1); verify(xaResource).recover(1); }
@Override public ProtobufSystemInfo.Section toProtobuf() { Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("Settings"); PropertyDefinitions definitions = settings.getDefinitions(); TreeMap<String, String> orderedProps = new TreeMap<>(settings.getProperties()); orderedProps.entrySet() .stream() .filter(prop -> nodeInformation.isStandalone() || !IGNORED_SETTINGS_IN_CLUSTER.contains(prop.getKey())) .forEach(prop -> includeSetting(protobuf, definitions, prop)); addDefaultNewCodeDefinition(protobuf); return protobuf.build(); }
@Test public void return_default_new_code_definition_with_no_specified_value() { dbTester.newCodePeriods().insert(NewCodePeriodType.PREVIOUS_VERSION,null); ProtobufSystemInfo.Section protobuf = underTest.toProtobuf(); assertThatAttributeIs(protobuf, "Default New Code Definition", "PREVIOUS_VERSION"); }
public static Method getMethod(Class<?> clazz, String methodName, Class<?>... paramTypes) throws SecurityException { return getMethod(clazz, false, methodName, paramTypes); }
@Test @Disabled public void getMethodBenchTest() { // 预热 getMethodWithReturnTypeCheck(TestBenchClass.class, false, "getH"); final TimeInterval timer = DateUtil.timer(); timer.start(); for (int i = 0; i < 100000000; i++) { ReflectUtil.getMethod(TestBenchClass.class, false, "getH"); } Console.log(timer.interval()); timer.restart(); for (int i = 0; i < 100000000; i++) { getMethodWithReturnTypeCheck(TestBenchClass.class, false, "getH"); } Console.log(timer.interval()); }
@Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CeTask ceTask = (CeTask) o; return uuid.equals(ceTask.uuid); }
@Test public void submitter_equals_and_hashCode_on_uuid() { CeTask.User user1 = new CeTask.User("UUID_1", null); CeTask.User user1bis = new CeTask.User("UUID_1", null); CeTask.User user2 = new CeTask.User("UUID_2", null); CeTask.User user1_diff_login = new CeTask.User("UUID_1", "LOGIN"); assertThat(user1.equals(null)).isFalse(); assertThat(user1) .isEqualTo(user1) .isEqualTo(user1bis) .isNotEqualTo(user2) .hasSameHashCodeAs(user1) .hasSameHashCodeAs(user1bis) .hasSameHashCodeAs(user1_diff_login); }
public static Type instantiateType(String solidityType, Object value) throws InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException, ClassNotFoundException { return instantiateType(makeTypeReference(solidityType), value); }
@SuppressWarnings("unchecked") @Test public void multiDimArrays() throws Exception { byte[] bytes1d = new byte[] {1, 2, 3}; byte[][] bytes2d = new byte[][] {bytes1d, bytes1d, bytes1d}; final byte[][][] bytes3d = new byte[][][] {bytes2d, bytes2d, bytes2d}; assertEquals(TypeDecoder.instantiateType("bytes", bytes1d), (new DynamicBytes(bytes1d))); Type twoDim = TypeDecoder.instantiateType("uint256[][3]", bytes2d); assertTrue(twoDim instanceof StaticArray3); StaticArray3<DynamicArray<Uint256>> staticArray3 = (StaticArray3<DynamicArray<Uint256>>) twoDim; assertEquals(staticArray3.getComponentType(), DynamicArray.class); DynamicArray<Uint256> row1 = staticArray3.getValue().get(1); assertEquals(row1.getValue().get(2), new Uint256(3)); Type threeDim = TypeDecoder.instantiateType("uint256[][3][3]", bytes3d); assertTrue(threeDim instanceof StaticArray3); StaticArray3<StaticArray3<DynamicArray<Uint256>>> staticArray3StaticArray3 = (StaticArray3<StaticArray3<DynamicArray<Uint256>>>) threeDim; assertEquals(staticArray3StaticArray3.getComponentType(), StaticArray3.class); row1 = staticArray3StaticArray3.getValue().get(1).getValue().get(1); assertEquals(row1.getValue().get(1), (new Uint256(2))); }
@Override public boolean equals(@Nullable Object obj) { if (!(obj instanceof S3ResourceId)) { return false; } S3ResourceId o = (S3ResourceId) obj; return scheme.equals(o.scheme) && bucket.equals(o.bucket) && key.equals(o.key); }
@Test public void testEquals() { S3ResourceId a = S3ResourceId.fromComponents("s3", "bucket", "a/b/c"); S3ResourceId b = S3ResourceId.fromComponents("s3", "bucket", "a/b/c"); assertEquals(a, b); b = S3ResourceId.fromComponents("s3", a.getBucket(), "a/b/c/"); assertNotEquals(a, b); b = S3ResourceId.fromComponents("s3", a.getBucket(), "x/y/z"); assertNotEquals(a, b); b = S3ResourceId.fromComponents("s3", "other-bucket", a.getKey()); assertNotEquals(a, b); assertNotEquals(b, a); b = S3ResourceId.fromComponents("other", "bucket", "a/b/c"); assertNotEquals(a, b); assertNotEquals(b, a); }
public void validate(AlmSettingDto dto) { try { azureDevOpsHttpClient.checkPAT(requireNonNull(dto.getUrl()), requireNonNull(dto.getDecryptedPersonalAccessToken(settings.getEncryption()))); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Invalid Azure URL or Personal Access Token", e); } }
@Test public void validate_givenHttpClientThrowingException_throwException() { AlmSettingDto dto = createMockDto(); doThrow(new IllegalArgumentException()).when(azureDevOpsHttpClient).checkPAT(any(), any()); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> underTest.validate(dto)) .withMessage("Invalid Azure URL or Personal Access Token"); }
public static boolean setFieldValue(Object target, String fieldName, Object value) { final Optional<Field> fieldOption = getField(target, fieldName); if (!fieldOption.isPresent()) { return false; } final Field field = fieldOption.get(); if (isFinalField(field)) { updateFinalModifierField(field); } try { field.set(target, value); return true; } catch (IllegalAccessException ex) { LOGGER.warning(String.format(Locale.ENGLISH, "Set value for field [%s] failed! %s", fieldName, ex.getMessage())); return false; } }
@Test public void setFieldValue() throws NoSuchFieldException, IllegalAccessException { final TestReflect reflect = new TestReflect(); int x = 102, y = 1899; ReflectUtils.setFieldValue(reflect, "x", x); ReflectUtils.setFieldValue(reflect, "y", y); Assert.assertEquals(reflect.x + reflect.y, x + y); }
public static Duration parse(final String text) { try { final String[] parts = text.split("\\s"); if (parts.length != 2) { throw new IllegalArgumentException("Expected 2 tokens, got: " + parts.length); } final long size = parseNumeric(parts[0]); return buildDuration(size, parts[1]); } catch (final Exception e) { throw new IllegalArgumentException("Invalid duration: '" + text + "'. " + e.getMessage(), e); } }
@Test public void shouldParseSingular() { assertThat(DurationParser.parse("1 Second"), is(Duration.ofSeconds(1))); }
@VisibleForTesting void handleResponse(DiscoveryResponseData response) { ResourceType resourceType = response.getResourceType(); switch (resourceType) { case NODE: handleD2NodeResponse(response); break; case D2_URI_MAP: handleD2URIMapResponse(response); break; case D2_URI: handleD2URICollectionResponse(response); break; default: throw new AssertionError("Missing case in enum switch: " + resourceType); } }
@Test public void testHandleD2URICollectionResponseWithRemoval() { DiscoveryResponseData removeClusterResponse = new DiscoveryResponseData(D2_URI, null, Collections.singletonList(CLUSTER_GLOB_COLLECTION), NONCE, null); XdsClientImplFixture fixture = new XdsClientImplFixture(); fixture._clusterSubscriber.setData(D2_URI_MAP_UPDATE_WITH_DATA1); fixture._xdsClientImpl.handleResponse(removeClusterResponse); fixture.verifyAckSent(1); verify(fixture._resourceWatcher).onChanged(eq(D2_URI_MAP_UPDATE_WITH_DATA1)); verify(fixture._clusterSubscriber).onRemoval(); verifyZeroInteractions(fixture._serverMetricsProvider); D2URIMapUpdate actualData = (D2URIMapUpdate) fixture._clusterSubscriber.getData(); // removed resource will not overwrite the original valid data Assert.assertEquals(actualData.getURIMap(), D2_URI_MAP_UPDATE_WITH_DATA1.getURIMap()); }
public static DataSchema avroToDataSchema(String avroSchemaInJson, AvroToDataSchemaTranslationOptions options) throws IllegalArgumentException { ValidationOptions validationOptions = SchemaParser.getDefaultSchemaParserValidationOptions(); validationOptions.setAvroUnionMode(true); SchemaParserFactory parserFactory = SchemaParserFactory.instance(validationOptions); DataSchemaResolver resolver = getResolver(parserFactory, options); PegasusSchemaParser parser = parserFactory.create(resolver); parser.parse(avroSchemaInJson); if (parser.hasError()) { throw new IllegalArgumentException(parser.errorMessage()); } assert(parser.topLevelDataSchemas().size() == 1); DataSchema dataSchema = parser.topLevelDataSchemas().get(0); DataSchema resultDataSchema = null; AvroToDataSchemaTranslationMode translationMode = options.getTranslationMode(); if (translationMode == AvroToDataSchemaTranslationMode.RETURN_EMBEDDED_SCHEMA || translationMode == AvroToDataSchemaTranslationMode.VERIFY_EMBEDDED_SCHEMA) { // check for embedded schema Object dataProperty = dataSchema.getProperties().get(SchemaTranslator.DATA_PROPERTY); if (dataProperty != null && dataProperty.getClass() == DataMap.class) { Object schemaProperty = ((DataMap) dataProperty).get(SchemaTranslator.SCHEMA_PROPERTY); if (schemaProperty.getClass() == DataMap.class) { SchemaParser embeddedSchemaParser = SchemaParserFactory.instance().create(null); embeddedSchemaParser.parse(Arrays.asList(schemaProperty)); if (embeddedSchemaParser.hasError()) { throw new IllegalArgumentException("Embedded schema is invalid\n" + embeddedSchemaParser.errorMessage()); } assert(embeddedSchemaParser.topLevelDataSchemas().size() == 1); resultDataSchema = embeddedSchemaParser.topLevelDataSchemas().get(0); if (translationMode == AvroToDataSchemaTranslationMode.VERIFY_EMBEDDED_SCHEMA) { // additional verification to make sure that embedded schema translates to Avro schema DataToAvroSchemaTranslationOptions dataToAvroSchemaOptions = new DataToAvroSchemaTranslationOptions(); Object optionalDefaultModeProperty = ((DataMap) dataProperty).get(SchemaTranslator.OPTIONAL_DEFAULT_MODE_PROPERTY); dataToAvroSchemaOptions.setOptionalDefaultMode(OptionalDefaultMode.valueOf(optionalDefaultModeProperty.toString())); Schema avroSchemaFromEmbedded = dataToAvroSchema(resultDataSchema, dataToAvroSchemaOptions); Schema avroSchemaFromJson = AvroCompatibilityHelper.parse(avroSchemaInJson, SchemaParseConfiguration.STRICT, null).getMainSchema(); Object embededSchemaPropertyVal = avroSchemaFromJson.getObjectProp(DATA_PROPERTY); if (embededSchemaPropertyVal != null) { avroSchemaFromEmbedded.addProp(DATA_PROPERTY, embededSchemaPropertyVal); } if (!avroSchemaFromEmbedded.equals(avroSchemaFromJson)) { throw new IllegalArgumentException("Embedded schema does not translate to input Avro schema: " + avroSchemaInJson); } } } } } if (resultDataSchema == null) { // translationMode == TRANSLATE or no embedded schema DataSchemaTraverse traverse = new DataSchemaTraverse(); traverse.traverse(dataSchema, AvroToDataSchemaConvertCallback.INSTANCE); // convert default values traverse.traverse(dataSchema, DefaultAvroToDataConvertCallback.INSTANCE); // make sure it can round-trip String dataSchemaJson = dataSchema.toString(); resultDataSchema = DataTemplateUtil.parseSchema(dataSchemaJson); } return resultDataSchema; }
@Test(dataProvider = "fromAvroSchemaData") public void testFromAvroSchema(String avroText, String schemaText) throws Exception { AvroToDataSchemaTranslationOptions options[] = { new AvroToDataSchemaTranslationOptions(AvroToDataSchemaTranslationMode.TRANSLATE), new AvroToDataSchemaTranslationOptions(AvroToDataSchemaTranslationMode.RETURN_EMBEDDED_SCHEMA), new AvroToDataSchemaTranslationOptions(AvroToDataSchemaTranslationMode.VERIFY_EMBEDDED_SCHEMA) }; // test generating Pegasus schema from Avro schema for (AvroToDataSchemaTranslationOptions option : options) { DataSchema schema = SchemaTranslator.avroToDataSchema(avroText, option); String schemaTextFromAvro = SchemaToJsonEncoder.schemaToJson(schema, JsonBuilder.Pretty.SPACES); assertEquals(TestUtil.dataMapFromString(schemaTextFromAvro), TestUtil.dataMapFromString(schemaText)); Schema avroSchema = AvroCompatibilityHelper.parse(avroText, new SchemaParseConfiguration(false, false), null).getMainSchema(); String preTranslateAvroSchema = avroSchema.toString(); schema = SchemaTranslator.avroToDataSchema(avroSchema, option); schemaTextFromAvro = SchemaToJsonEncoder.schemaToJson(schema, JsonBuilder.Pretty.SPACES); assertEquals(TestUtil.dataMapFromString(schemaTextFromAvro), TestUtil.dataMapFromString(schemaText)); String postTranslateAvroSchema = avroSchema.toString(); assertEquals(preTranslateAvroSchema, postTranslateAvroSchema); } }
@Override public String retrieveIPfilePath(String id, String dstDir, Map<Path, List<String>> localizedResources) { // Assume .aocx IP file is distributed by DS to local dir String ipFilePath = null; LOG.info("Got environment: " + id + ", search IP file in localized resources"); if (null == id || id.isEmpty()) { LOG.warn("IP_ID environment is empty, skip downloading"); return null; } if (localizedResources != null) { Optional<Path> aocxPath = localizedResources .keySet() .stream() .filter(path -> matchesIpid(path, id)) .findFirst(); if (aocxPath.isPresent()) { ipFilePath = aocxPath.get().toString(); LOG.info("Found: {}", ipFilePath); } else { LOG.warn("Requested IP file not found"); } } else { LOG.warn("Localized resource is null!"); } return ipFilePath; }
@Test public void testIPfileNotDefined() { Map<Path, List<String>> resources = createResources(); String path = plugin.retrieveIPfilePath(null, "workDir", resources); assertNull("Retrieved IP file path", path); }
@GetMapping(value = "/{appId}/{clusterName}/{namespace:.+}") public ApolloConfig queryConfig(@PathVariable String appId, @PathVariable String clusterName, @PathVariable String namespace, @RequestParam(value = "dataCenter", required = false) String dataCenter, @RequestParam(value = "releaseKey", defaultValue = "-1") String clientSideReleaseKey, @RequestParam(value = "ip", required = false) String clientIp, @RequestParam(value = "label", required = false) String clientLabel, @RequestParam(value = "messages", required = false) String messagesAsString, HttpServletRequest request, HttpServletResponse response) throws IOException { String originalNamespace = namespace; //strip out .properties suffix namespace = namespaceUtil.filterNamespaceName(namespace); //fix the character case issue, such as FX.apollo <-> fx.apollo namespace = namespaceUtil.normalizeNamespace(appId, namespace); if (Strings.isNullOrEmpty(clientIp)) { clientIp = WebUtils.tryToGetClientIp(request); } ApolloNotificationMessages clientMessages = transformMessages(messagesAsString); List<Release> releases = Lists.newLinkedList(); String appClusterNameLoaded = clusterName; if (!ConfigConsts.NO_APPID_PLACEHOLDER.equalsIgnoreCase(appId)) { Release currentAppRelease = configService.loadConfig(appId, clientIp, clientLabel, appId, clusterName, namespace, dataCenter, clientMessages); if (currentAppRelease != null) { releases.add(currentAppRelease); //we have cluster search process, so the cluster name might be overridden appClusterNameLoaded = currentAppRelease.getClusterName(); } } //if namespace does not belong to this appId, should check if there is a public configuration if (!namespaceBelongsToAppId(appId, namespace)) { Release publicRelease = this.findPublicConfig(appId, clientIp, clientLabel, clusterName, namespace, dataCenter, clientMessages); if (Objects.nonNull(publicRelease)) { releases.add(publicRelease); } } if (releases.isEmpty()) { response.sendError(HttpServletResponse.SC_NOT_FOUND, String.format( "Could not load configurations with appId: %s, clusterName: %s, namespace: %s", appId, clusterName, originalNamespace)); Tracer.logEvent("Apollo.Config.NotFound", assembleKey(appId, clusterName, originalNamespace, dataCenter)); return null; } auditReleases(appId, clusterName, dataCenter, clientIp, releases); String mergedReleaseKey = releases.stream().map(Release::getReleaseKey) .collect(Collectors.joining(ConfigConsts.CLUSTER_NAMESPACE_SEPARATOR)); if (mergedReleaseKey.equals(clientSideReleaseKey)) { // Client side configuration is the same with server side, return 304 response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); Tracer.logEvent("Apollo.Config.NotModified", assembleKey(appId, appClusterNameLoaded, originalNamespace, dataCenter)); return null; } ApolloConfig apolloConfig = new ApolloConfig(appId, appClusterNameLoaded, originalNamespace, mergedReleaseKey); apolloConfig.setConfigurations(mergeReleaseConfigurations(releases)); Tracer.logEvent("Apollo.Config.Found", assembleKey(appId, appClusterNameLoaded, originalNamespace, dataCenter)); return apolloConfig; }
@Test public void testQueryConfigForNoAppIdPlaceHolder() throws Exception { String someClientSideReleaseKey = "1"; HttpServletResponse someResponse = mock(HttpServletResponse.class); String appId = ConfigConsts.NO_APPID_PLACEHOLDER; ApolloConfig result = configController.queryConfig(appId, someClusterName, defaultNamespaceName, someDataCenter, someClientSideReleaseKey, someClientIp, someClientLabel, someMessagesAsString, someRequest, someResponse); verify(configService, never()).loadConfig(appId, someClientIp, someAppId, someClientLabel, someClusterName, defaultNamespaceName, someDataCenter, someNotificationMessages); verify(appNamespaceService, never()).findPublicNamespaceByName(defaultNamespaceName); assertNull(result); verify(someResponse, times(1)).sendError(eq(HttpServletResponse.SC_NOT_FOUND), anyString()); }
@Override @Deprecated public <VR> KStream<K, VR> transformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullNamedOnTransformValuesWithValueTransformerSupplierAndStores() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transformValues( valueTransformerSupplier, (Named) null, "storeName")); assertThat(exception.getMessage(), equalTo("named can't be null")); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void retryOnResultFailAfterMaxAttemptsUsingSingle() throws InterruptedException { RetryConfig config = RetryConfig.<String>custom() .retryOnResult("retry"::equals) .waitDuration(Duration.ofMillis(50)) .maxAttempts(3).build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willReturn("retry"); Single.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertValue("retry") .assertComplete(); then(helloWorldService).should(times(3)).returnHelloWorld(); }
@Override public UnderFileSystem create(String path, UnderFileSystemConfiguration conf) { Preconditions.checkNotNull(path, "Unable to create UnderFileSystem instance:" + " URI path should not be null"); if (checkOSSCredentials(conf)) { try { return OSSUnderFileSystem.createInstance(new AlluxioURI(path), conf); } catch (Exception e) { throw Throwables.propagate(e); } } String err = "OSS Credentials not available, cannot create OSS Under File System."; throw Throwables.propagate(new IOException(err)); }
@Test public void createInstanceWithNullPath() { Exception e = Assert.assertThrows(NullPointerException.class, () -> mFactory.create( null, mConf)); Assert.assertTrue(e.getMessage().contains("Unable to create UnderFileSystem instance: URI " + "path should not be null")); }
public static void delete(final File file, final boolean ignoreFailures) { if (file.exists()) { if (file.isDirectory()) { final File[] files = file.listFiles(); if (null != files) { for (final File f : files) { delete(f, ignoreFailures); } } } if (!file.delete() && !ignoreFailures) { try { Files.delete(file.toPath()); } catch (final IOException ex) { LangUtil.rethrowUnchecked(ex); } } } }
@Test void deleteIgnoreFailuresShouldThrowExceptionIfDeleteOfADirectoryFails() { final File dir = mock(File.class); when(dir.exists()).thenReturn(true); when(dir.isDirectory()).thenReturn(true); when(dir.delete()).thenReturn(false); assertThrows(NullPointerException.class, () -> IoUtil.delete(dir, false)); }
@Override public void onEvent(Event event) { Set<NacosTraceSubscriber> subscribers = interestedEvents.get(event.getClass()); if (null == subscribers) { return; } TraceEvent traceEvent = (TraceEvent) event; for (NacosTraceSubscriber each : subscribers) { if (null != each.executor()) { each.executor().execute(() -> onEvent0(each, traceEvent)); } else { onEvent0(each, traceEvent); } } }
@Test void testOnEventWithExecutor() { Executor executor = mock(Executor.class); doAnswer(invocationOnMock -> { invocationOnMock.getArgument(0, Runnable.class).run(); return null; }).when(executor).execute(any(Runnable.class)); when(mockInstanceSubscriber.executor()).thenReturn(executor); RegisterInstanceTraceEvent event = new RegisterInstanceTraceEvent(1L, "", true, "", "", "", "", 1); combinedTraceSubscriber.onEvent(event); verify(mockInstanceSubscriber).onEvent(event); }
@Override public boolean canPass(Node node, int acquireCount) { return canPass(node, acquireCount, false); }
@Test public void testCanPassForThreadCount() { int threshold = 8; TrafficShapingController controller = new DefaultController(threshold, RuleConstant.FLOW_GRADE_THREAD); Node node = mock(Node.class); when(node.curThreadNum()).thenReturn(threshold - 1) .thenReturn(threshold); assertTrue(controller.canPass(node, 1)); assertFalse(controller.canPass(node, 1)); }
protected SQLStatement parseAdd() { Lexer.SavePoint mark = lexer.mark(); lexer.nextToken(); if (lexer.identifierEquals("JAR")) { lexer.nextPath(); String path = lexer.stringVal(); HiveAddJarStatement stmt = new HiveAddJarStatement(); stmt.setPath(path); lexer.nextToken(); return stmt; } lexer.reset(mark); return null; }
@Test public void testAddJarStatement() { String s = "add jar hdfs:///hadoop/parser.h.file"; HiveStatementParser hiveStatementParser = new HiveStatementParser(s); SQLStatement sqlStatement = hiveStatementParser.parseAdd(); assertTrue(sqlStatement instanceof HiveAddJarStatement); assertEquals("ADD JAR hdfs:///hadoop/parser.h.file", sqlStatement.toString()); }
@Override public HttpAction restore(final CallContext ctx, final String defaultUrl) { val webContext = ctx.webContext(); val sessionStore = ctx.sessionStore(); val optRequestedUrl = sessionStore.get(webContext, Pac4jConstants.REQUESTED_URL); HttpAction requestedAction = null; if (optRequestedUrl.isPresent()) { sessionStore.set(webContext, Pac4jConstants.REQUESTED_URL, null); val requestedUrl = optRequestedUrl.get(); if (requestedUrl instanceof String) { requestedAction = new FoundAction((String) requestedUrl); } else if (requestedUrl instanceof RedirectionAction) { requestedAction = (RedirectionAction) requestedUrl; } } if (requestedAction == null) { requestedAction = new FoundAction(defaultUrl); } LOGGER.debug("requestedAction: {}", requestedAction.getMessage()); if (requestedAction instanceof FoundAction) { return HttpActionHelper.buildRedirectUrlAction(webContext, ((FoundAction) requestedAction).getLocation()); } else { return HttpActionHelper.buildFormPostContentAction(webContext, ((OkAction) requestedAction).getContent()); } }
@Test public void testRestoreEmptyString() { val context = MockWebContext.create(); val sessionStore = new MockSessionStore(); sessionStore.set(context, Pac4jConstants.REQUESTED_URL, null); val action = handler.restore(new CallContext(context, sessionStore), LOGIN_URL); assertTrue(action instanceof FoundAction); assertEquals(LOGIN_URL, ((FoundAction) action).getLocation()); assertFalse(sessionStore.get(context, Pac4jConstants.REQUESTED_URL).isPresent()); }
public PaginationContext createPaginationContext(final TopProjectionSegment topProjectionSegment, final Collection<ExpressionSegment> expressions, final List<Object> params) { Collection<AndPredicate> andPredicates = expressions.stream().flatMap(each -> ExpressionExtractUtils.getAndPredicates(each).stream()).collect(Collectors.toList()); Optional<ExpressionSegment> rowNumberPredicate = expressions.isEmpty() ? Optional.empty() : getRowNumberPredicate(andPredicates, topProjectionSegment.getAlias()); Optional<PaginationValueSegment> offset = rowNumberPredicate.isPresent() ? createOffsetWithRowNumber(rowNumberPredicate.get()) : Optional.empty(); PaginationValueSegment rowCount = topProjectionSegment.getTop(); return new PaginationContext(offset.orElse(null), rowCount, params); }
@Test void assertCreatePaginationContextWhenPredicateInRightValue() { String name = "rowNumberAlias"; ColumnSegment columnSegment = new ColumnSegment(0, 10, new IdentifierValue(name)); InExpression inExpression = new InExpression(0, 0, columnSegment, new ListExpression(0, 0), false); PaginationContext paginationContext = topPaginationContextEngine.createPaginationContext( new TopProjectionSegment(0, 10, null, name), Collections.singletonList(inExpression), Collections.emptyList()); assertFalse(paginationContext.getOffsetSegment().isPresent()); assertFalse(paginationContext.getRowCountSegment().isPresent()); }
public static void main(final String[] args) { SpringApplication.run(SimpleDemoApplication.class, args); }
@Test void checkPossibilityToSimplyStartAndRestartApplication() { this.configuration.getStorageInstance().stop(); SimpleDemoApplication.main(new String[]{}); }
public static Collection<ExternalResource> getExternalResourcesCollection( Configuration config) { return getExternalResourceAmountMap(config).entrySet().stream() .map(entry -> new ExternalResource(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); }
@Test public void testRecognizeEmptyResourceList() { final Configuration config = new Configuration(); config.setString( ExternalResourceOptions.EXTERNAL_RESOURCE_LIST.key(), ExternalResourceOptions.NONE); config.setLong( ExternalResourceOptions.getAmountConfigOptionForResource(RESOURCE_NAME_1), RESOURCE_AMOUNT_1); final Collection<ExternalResource> externalResources = ExternalResourceUtils.getExternalResourcesCollection(config); assertThat(externalResources, is(empty())); }
private Function<KsqlConfig, Kudf> getUdfFactory( final Method method, final UdfDescription udfDescriptionAnnotation, final String functionName, final FunctionInvoker invoker, final String sensorName ) { return ksqlConfig -> { final Object actualUdf = FunctionLoaderUtils.instantiateFunctionInstance( method.getDeclaringClass(), udfDescriptionAnnotation.name()); if (actualUdf instanceof Configurable) { ExtensionSecurityManager.INSTANCE.pushInUdf(); try { ((Configurable) actualUdf) .configure(ksqlConfig.getKsqlFunctionsConfigProps(functionName)); } finally { ExtensionSecurityManager.INSTANCE.popOutUdf(); } } final PluggableUdf theUdf = new PluggableUdf(invoker, actualUdf); return metrics.<Kudf>map(m -> new UdfMetricProducer( m.getSensor(sensorName), theUdf, Time.SYSTEM )).orElse(theUdf); }; }
@Test public void shouldNotLoadCustomUDfsIfLoadCustomUdfsFalse() { // udf in udf-example.jar try { FUNC_REG_WITHOUT_CUSTOM.getUdfFactory(FunctionName.of("tostring")); fail("Should have thrown as function doesn't exist"); } catch (final KsqlException e) { // pass } }
@Override public void configure(final Map<String, ?> config) { configure( config, new Options(), org.rocksdb.LRUCache::new, org.rocksdb.WriteBufferManager::new ); }
@Test public void shouldUseConfiguredBlockCacheSize() { KsqlBoundedMemoryRocksDBConfigSetter.configure( CONFIG_PROPS, rocksOptions, cacheFactory, bufferManagerFactory ); // Then: verify(cacheFactory).create(eq(16 * 1024 * 1024 * 1024L), anyInt(), anyBoolean(), anyDouble()); }
@Override public String[] getManagedIndices() { final Set<String> indexNames = indices.getIndexNamesAndAliases(getIndexWildcard()).keySet(); // also allow restore archives to be returned final List<String> result = indexNames.stream() .filter(this::isManagedIndex) .toList(); return result.toArray(new String[result.size()]); }
@Test public void getAllGraylogIndexNames() { final Map<String, Set<String>> indexNameAliases = ImmutableMap.of( "graylog_1", Collections.emptySet(), "graylog_2", Collections.emptySet(), "graylog_3", Collections.emptySet(), "graylog_4_restored_archive", Collections.emptySet(), "graylog_5", Collections.singleton("graylog_deflector")); when(indices.getIndexNamesAndAliases(anyString())).thenReturn(indexNameAliases); final MongoIndexSet mongoIndexSet = createIndexSet(config); final String[] allGraylogIndexNames = mongoIndexSet.getManagedIndices(); assertThat(allGraylogIndexNames).containsExactlyElementsOf(indexNameAliases.keySet()); }
public File getDatabaseFile(String filename) { File dbFile = null; if (filename != null && filename.trim().length() > 0) { dbFile = new File(filename); } if (dbFile == null || dbFile.isDirectory()) { dbFile = new File(new AndroidContextUtil().getDatabasePath("logback.db")); } return dbFile; }
@Test public void setsDatabaseFilename() throws IOException { final File tmpFile = tmp.newFile(); final File file = appender.getDatabaseFile(tmpFile.getAbsolutePath()); assertThat(file, is(notNullValue())); assertThat(file.getName(), is(tmpFile.getName())); }
public static ErrorDetail createErrorDetail(String idempotentId, String title, Exception e, boolean canSkip) { return ErrorDetail.builder() .setId(idempotentId) .setTitle(title) .setException(Throwables.getStackTraceAsString(e)) .setCanSkip(canSkip).build(); }
@Test public void test_createErrorDetail() { String id = "testId"; String title = "testTitle"; Exception exception = new IOException(); boolean canSkip = false; ErrorDetail expected = ErrorDetail.builder() .setId(id) .setTitle(title) .setException(Throwables.getStackTraceAsString(exception)) .setCanSkip(canSkip) .build(); ErrorDetail result = GoogleErrorLogger.createErrorDetail( id, title, exception, canSkip ); assertThat(result).isEqualTo(expected); }
static JavaInput reorderModifiers(String text) throws FormatterException { return reorderModifiers( new JavaInput(text), ImmutableList.of(Range.closedOpen(0, text.length()))); }
@Test public void sealedClass() throws FormatterException { assume().that(Runtime.version().feature()).isAtLeast(16); assertThat(ModifierOrderer.reorderModifiers("non-sealed sealed public").getText()) .isEqualTo("public sealed non-sealed"); }
@ProcessElement public void processElement( @Element DataChangeRecord dataChangeRecord, OutputReceiver<DataChangeRecord> receiver) { final Instant commitInstant = new Instant(dataChangeRecord.getCommitTimestamp().toSqlTimestamp().getTime()); metrics.incDataRecordCounter(); measureCommitTimestampToEmittedMillis(dataChangeRecord); measureStreamMillis(dataChangeRecord); receiver.outputWithTimestamp(dataChangeRecord, commitInstant); }
@Test public void testPostProcessingMetrics() { DataChangeRecord dataChangeRecord = new DataChangeRecord( "partitionToken", Timestamp.ofTimeMicroseconds(1L), "serverTransactionId", true, "recordSequence", "tableName", Arrays.asList( new ColumnType("column1", new TypeCode("type1"), true, 1L), new ColumnType("column2", new TypeCode("type2"), false, 2L)), Collections.singletonList( new Mod( "{\"column1\": \"value1\"}", "{\"column2\": \"oldValue2\"}", "{\"column2\": \"newValue2\"}")), ModType.UPDATE, ValueCaptureType.OLD_AND_NEW_VALUES, 10L, 2L, "transactionTag", true, ChangeStreamRecordMetadata.newBuilder() .withRecordStreamStartedAt(Timestamp.ofTimeMicroseconds(1L)) .withRecordStreamEndedAt(Timestamp.ofTimeMicroseconds(2L)) .build()); doNothing().when(changeStreamMetrics).incDataRecordCounter(); doNothing().when(changeStreamMetrics).updateDataRecordCommittedToEmitted(any()); processingMetricsDoFn.processElement(dataChangeRecord, receiver); verify(changeStreamMetrics, times(1)).incDataRecordCounter(); verify(changeStreamMetrics, times(1)).updateDataRecordCommittedToEmitted(any()); }
public Stream<SqlMigration> getMigrations() { SqlMigrationProvider migrationProvider = getMigrationProvider(); try { final Map<String, SqlMigration> commonMigrations = getCommonMigrations(migrationProvider).stream().collect(toMap(SqlMigration::getFileName, m -> m)); final Map<String, SqlMigration> databaseSpecificMigrations = getDatabaseSpecificMigrations(migrationProvider).stream().collect(toMap(SqlMigration::getFileName, p -> p)); final HashMap<String, SqlMigration> actualMigrations = new HashMap<>(commonMigrations); actualMigrations.putAll(databaseSpecificMigrations); return actualMigrations.values().stream(); } catch (IllegalStateException e) { if(e.getMessage().startsWith("Duplicate key")) { throw new IllegalStateException("It seems you have JobRunr twice on your classpath. Please make sure to only have one JobRunr jar in your classpath.", e); } throw e; } }
@Test void testDatabaseSpecificMigrations() { final DatabaseMigrationsProvider databaseCreator = new DatabaseMigrationsProvider(MariaDbStorageProviderStub.class); final Stream<SqlMigration> databaseSpecificMigrations = databaseCreator.getMigrations(); assertThat(databaseSpecificMigrations).anyMatch(migration -> contains(migration, "DATETIME(6)")); }
public static String encodingParams(Map<String, String> params, String encoding) throws UnsupportedEncodingException { StringBuilder sb = new StringBuilder(); if (null == params || params.isEmpty()) { return null; } for (Map.Entry<String, String> entry : params.entrySet()) { if (StringUtils.isEmpty(entry.getValue())) { continue; } sb.append(entry.getKey()).append('='); sb.append(URLEncoder.encode(entry.getValue(), encoding)); sb.append('&'); } return sb.toString(); }
@Test void testEncodingParamsMapWithNullOrEmpty() throws UnsupportedEncodingException { assertNull(HttpUtils.encodingParams((Map<String, String>) null, "UTF-8")); assertNull(HttpUtils.encodingParams(Collections.emptyMap(), "UTF-8")); }
@SuppressWarnings({ "nullness" // TODO(https://github.com/apache/beam/issues/21068) }) /* * Returns an iterables containing all distinct keys in this multimap. */ public PrefetchableIterable<K> keys() { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); if (isCleared) { List<K> keys = new ArrayList<>(pendingAdds.size()); for (Map.Entry<?, KV<K, List<V>>> entry : pendingAdds.entrySet()) { keys.add(entry.getValue().getKey()); } return PrefetchableIterables.concat(keys); } Set<Object> pendingRemovesNow = new HashSet<>(pendingRemoves.keySet()); Map<Object, K> pendingAddsNow = new HashMap<>(); for (Map.Entry<Object, KV<K, List<V>>> entry : pendingAdds.entrySet()) { pendingAddsNow.put(entry.getKey(), entry.getValue().getKey()); } return new PrefetchableIterables.Default<K>() { @Override public PrefetchableIterator<K> createIterator() { return new PrefetchableIterator<K>() { PrefetchableIterator<K> persistedKeysIterator = persistedKeys.iterator(); Iterator<K> pendingAddsNowIterator; boolean hasNext; K nextKey; @Override public boolean isReady() { return persistedKeysIterator.isReady(); } @Override public void prefetch() { if (!isReady()) { persistedKeysIterator.prefetch(); } } @Override public boolean hasNext() { if (hasNext) { return true; } while (persistedKeysIterator.hasNext()) { nextKey = persistedKeysIterator.next(); Object nextKeyStructuralValue = mapKeyCoder.structuralValue(nextKey); if (!pendingRemovesNow.contains(nextKeyStructuralValue)) { // Remove all keys that we will visit when passing over the persistedKeysIterator // so we do not revisit them when passing over the pendingAddsNowIterator if (pendingAddsNow.containsKey(nextKeyStructuralValue)) { pendingAddsNow.remove(nextKeyStructuralValue); } hasNext = true; return true; } } if (pendingAddsNowIterator == null) { pendingAddsNowIterator = pendingAddsNow.values().iterator(); } while (pendingAddsNowIterator.hasNext()) { nextKey = pendingAddsNowIterator.next(); hasNext = true; return true; } return false; } @Override public K next() { if (!hasNext()) { throw new NoSuchElementException(); } hasNext = false; return nextKey; } }; } }; }
@Test public void testKeys() throws Exception { FakeBeamFnStateClient fakeClient = new FakeBeamFnStateClient( ImmutableMap.of( createMultimapKeyStateKey(), KV.of(ByteArrayCoder.of(), singletonList(A1)), createMultimapValueStateKey(A1), KV.of(StringUtf8Coder.of(), asList("V1", "V2")))); MultimapUserState<byte[], String> userState = new MultimapUserState<>( Caches.noop(), fakeClient, "instructionId", createMultimapKeyStateKey(), ByteArrayCoder.of(), StringUtf8Coder.of()); userState.put(A2, "V1"); Iterable<byte[]> initKeys = userState.keys(); userState.put(A3, "V1"); userState.put(A1, "V3"); assertArrayEquals(new byte[][] {A1, A2}, Iterables.toArray(initKeys, byte[].class)); assertArrayEquals(new byte[][] {A1, A2, A3}, Iterables.toArray(userState.keys(), byte[].class)); userState.clear(); assertArrayEquals(new byte[][] {A1, A2}, Iterables.toArray(initKeys, byte[].class)); assertArrayEquals(new byte[][] {}, Iterables.toArray(userState.keys(), byte[].class)); userState.asyncClose(); assertThrows(IllegalStateException.class, () -> userState.keys()); }
@Override public String toString() { return String.format( "%s{leaderLatchPath='%s'}", getClass().getSimpleName(), leaderLatchPath); }
@Test void testToStringContainingLeaderLatchPath() throws Exception { new Context() { { runTest( () -> assertThat(leaderElectionDriver.toString()) .as( "toString() should contain the leader latch path for human-readable representation of the driver instance.") .contains( ZooKeeperUtils.generateLeaderLatchPath( curatorFramework .asCuratorFramework() .getNamespace()))); } }; }
@Override public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { Class<?> loadedClass = findLoadedClass(name); if (loadedClass != null) { return loadedClass; } if (isClosed) { throw new ClassNotFoundException("This ClassLoader is closed"); } if (config.shouldAcquire(name)) { loadedClass = PerfStatsCollector.getInstance() .measure("load sandboxed class", () -> maybeInstrumentClass(name)); } else { loadedClass = getParent().loadClass(name); } if (resolve) { resolveClass(loadedClass); } return loadedClass; } }
@Test public void shouldMakeClassesNonFinal() throws Exception { Class<?> clazz = loadClass(AFinalClass.class); assertEquals(0, clazz.getModifiers() & Modifier.FINAL); }
public int read(final MessageHandler handler) { return read(handler, Integer.MAX_VALUE); }
@Test void shouldNotReadSingleMessagePartWayThroughWriting() { final long head = 0L; final int headIndex = (int)head; when(buffer.getLong(HEAD_COUNTER_INDEX)).thenReturn(head); when(buffer.getIntVolatile(lengthOffset(headIndex))).thenReturn(0); final MutableInteger times = new MutableInteger(); final MessageHandler handler = (msgTypeId, buffer, index, length) -> times.increment(); final int messagesRead = ringBuffer.read(handler); assertThat(messagesRead, is(0)); assertThat(times.get(), is(0)); final InOrder inOrder = inOrder(buffer); inOrder.verify(buffer, times(1)).getIntVolatile(lengthOffset(headIndex)); inOrder.verify(buffer, times(0)).setMemory(headIndex, 0, (byte)0); inOrder.verify(buffer, times(0)).putLongOrdered(HEAD_COUNTER_INDEX, headIndex); }
public static NodeBadge number(int n) { // TODO: consider constraints, e.g. 1 <= n <= 999 return new NodeBadge(Status.INFO, false, Integer.toString(n), null); }
@Test public void numberError() { badge = NodeBadge.number(Status.ERROR, NUM); checkFields(badge, Status.ERROR, false, NUM_STR, null); }
public static String javaforLoop(String str) { StringBuilder result = new StringBuilder(); for (int i = 0; i < str.length(); i++) { char c = str.charAt(i); result.append(c); } return result.toString(); }
@Test public void whenUseJavaForLoop_thenIterate() { String input = "Hello, Baeldung!"; String expectedOutput = "Hello, Baeldung!"; String result = StringIterator.javaforLoop(input); assertEquals(expectedOutput, result); }
@Override public List<MetricFamilySamples> collect() { try { return exporter.export("Prometheus") .<List<MetricFamilySamples>>map(optional -> Collections.singletonList((GaugeMetricFamily) optional.getRawMetricFamilyObject())).orElse(Collections.emptyList()); // CHECKSTYLE:OFF } catch (final Exception ex) { // CHECKSTYLE:ON log.warn("Collect metrics error: {}", ex.getMessage()); } return Collections.emptyList(); }
@Test void assertCollectWithPresentMetricsExporter() { MetricsExporter exporter = mock(MetricsExporter.class); when(exporter.export("Prometheus")).thenReturn(Optional.of(mock(GaugeMetricFamilyMetricsCollector.class))); assertThat(new PrometheusMetricsExporter(exporter).collect().size(), is(1)); }
public ModelMBeanInfo getMBeanInfo(Object defaultManagedBean, Object customManagedBean, String objectName) throws JMException { if ((defaultManagedBean == null && customManagedBean == null) || objectName == null) return null; // skip proxy classes if (defaultManagedBean != null && Proxy.isProxyClass(defaultManagedBean.getClass())) { LOGGER.trace("Skip creating ModelMBeanInfo due proxy class {}", defaultManagedBean.getClass()); return null; } // maps and lists to contain information about attributes and operations Map<String, ManagedAttributeInfo> attributes = new LinkedHashMap<>(); Set<ManagedOperationInfo> operations = new LinkedHashSet<>(); Set<ModelMBeanAttributeInfo> mBeanAttributes = new LinkedHashSet<>(); Set<ModelMBeanOperationInfo> mBeanOperations = new LinkedHashSet<>(); Set<ModelMBeanNotificationInfo> mBeanNotifications = new LinkedHashSet<>(); // extract details from default managed bean if (defaultManagedBean != null) { extractAttributesAndOperations(defaultManagedBean.getClass(), attributes, operations); extractMbeanAttributes(defaultManagedBean, attributes, mBeanAttributes, mBeanOperations); extractMbeanOperations(defaultManagedBean, operations, mBeanOperations); extractMbeanNotifications(defaultManagedBean, mBeanNotifications); } // extract details from custom managed bean if (customManagedBean != null) { extractAttributesAndOperations(customManagedBean.getClass(), attributes, operations); extractMbeanAttributes(customManagedBean, attributes, mBeanAttributes, mBeanOperations); extractMbeanOperations(customManagedBean, operations, mBeanOperations); extractMbeanNotifications(customManagedBean, mBeanNotifications); } // create the ModelMBeanInfo String name = getName(customManagedBean != null ? customManagedBean : defaultManagedBean, objectName); String description = getDescription(customManagedBean != null ? customManagedBean : defaultManagedBean, objectName); ModelMBeanAttributeInfo[] arrayAttributes = mBeanAttributes.toArray(new ModelMBeanAttributeInfo[mBeanAttributes.size()]); ModelMBeanOperationInfo[] arrayOperations = mBeanOperations.toArray(new ModelMBeanOperationInfo[mBeanOperations.size()]); ModelMBeanNotificationInfo[] arrayNotifications = mBeanNotifications.toArray(new ModelMBeanNotificationInfo[mBeanNotifications.size()]); ModelMBeanInfo info = new ModelMBeanInfoSupport(name, description, arrayAttributes, null, arrayOperations, arrayNotifications); LOGGER.trace("Created ModelMBeanInfo {}", info); return info; }
@Test(expected = IllegalArgumentException.class) public void testAttributeGetterNameNotCapitial() throws JMException { mbeanInfoAssembler.getMBeanInfo(new BadAttributeGetterNameNotCapital(), null, "someName"); }
public static Row toBeamRow(GenericRecord record, Schema schema, ConversionOptions options) { List<Object> valuesInOrder = schema.getFields().stream() .map( field -> { try { org.apache.avro.Schema.Field avroField = record.getSchema().getField(field.getName()); Object value = avroField != null ? record.get(avroField.pos()) : null; return convertAvroFormat(field.getType(), value, options); } catch (Exception cause) { throw new IllegalArgumentException( "Error converting field " + field + ": " + cause.getMessage(), cause); } }) .collect(toList()); return Row.withSchema(schema).addValues(valuesInOrder).build(); }
@Test public void testToBeamRow_inlineArrayNulls() { Row beamRow = BigQueryUtils.toBeamRow(ARRAY_TYPE_NULLS, BQ_INLINE_ARRAY_ROW_NULLS); assertEquals(ARRAY_ROW_NULLS, beamRow); }
public synchronized MutableQuantiles newQuantiles(String name, String desc, String sampleName, String valueName, int interval) { checkMetricName(name); if (interval <= 0) { throw new MetricsException("Interval should be positive. Value passed" + " is: " + interval); } MutableQuantiles ret = new MutableQuantiles(name, desc, sampleName, valueName, interval); metricsMap.put(name, ret); return ret; }
@Test public void testAddIllegalParameters() { final MetricsRegistry r = new MetricsRegistry("IllegalParamTest"); expectMetricsException("Interval should be positive. Value passed is: -20", new Runnable() { @Override public void run() { r.newQuantiles("q1", "New Quantile 1", "qq1", "qv1", (int)-20); } }); }
@Override public FilterBindings get() { return filterBindings; }
@Test void requireThatEmptyInputGivesEmptyOutput() { final FilterChainRepository filterChainRepository = new FilterChainRepository( new ChainsConfig(new ChainsConfig.Builder()), new ComponentRegistry<>(), new ComponentRegistry<>(), new ComponentRegistry<>(), new ComponentRegistry<>()); final FilterBindingsProvider provider = new FilterBindingsProvider( new ComponentId("foo"), new ServerConfig(configBuilder), filterChainRepository, new ComponentRegistry<>()); final FilterBindings filterBindings = provider.get(); assertNotNull(filterBindings); assertTrue(filterBindings.requestFilterIds().isEmpty()); assertTrue(filterBindings.responseFilterIds().isEmpty()); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { DecimalMetaData decimalMetaData = new DecimalMetaData(columnDef.getColumnMeta()); return toDecimal(decimalMetaData, payload.readStringFixByBytes(decimalMetaData.getTotalByteLength())); }
@Test void assertDecodeNegativeNewDecimalWithLargeNumber() { columnDef = new MySQLBinlogColumnDef(MySQLBinaryColumnType.NEWDECIMAL); columnDef.setColumnMeta(32 << 8 | 6); byte[] newDecimalBytes = ByteBufUtil.decodeHexDump("7DFEFDB5CC2741EFDEBE4154FD52E7"); when(payload.readStringFixByBytes(newDecimalBytes.length)).thenReturn(newDecimalBytes); BigDecimal actual = (BigDecimal) new MySQLDecimalBinlogProtocolValue().read(columnDef, payload); assertThat(actual.toString(), is("-33620554869842448557956779.175384")); }
public Map<String, ParamDefinition> getDefaultWorkflowParams() { return preprocessParams(defaultSystemParams); }
@Test public void testValidDefaultWorkflowParams() { assertFalse(defaultParamManager.getDefaultWorkflowParams().isEmpty()); assertNotNull(defaultParamManager.getDefaultWorkflowParams().get("TARGET_RUN_HOUR").getName()); }
@Override public boolean containsFreeSlot(AllocationID allocationId) { return freeSlots.contains(allocationId); }
@Test void testContainsFreeSlotReturnsFalseIfSlotDoesNotExist() { final DefaultAllocatedSlotPool slotPool = new DefaultAllocatedSlotPool(); assertThat(slotPool.containsFreeSlot(new AllocationID())).isFalse(); }
List<Condition> run(boolean useKRaft) { List<Condition> warnings = new ArrayList<>(); checkKafkaReplicationConfig(warnings); checkKafkaBrokersStorage(warnings); if (useKRaft) { // Additional checks done for KRaft clusters checkKRaftControllerStorage(warnings); checkKRaftControllerCount(warnings); checkKafkaMetadataVersion(warnings); checkInterBrokerProtocolVersionInKRaft(warnings); checkLogMessageFormatVersionInKRaft(warnings); } else { // Additional checks done for ZooKeeper-based clusters checkKafkaLogMessageFormatVersion(warnings); checkKafkaInterBrokerProtocolVersion(warnings); checkKRaftMetadataStorageConfiguredForZooBasedCLuster(warnings); } return warnings; }
@Test public void checkKafkaSingleMixedNodeIntentionalProducesNoWarning() { Kafka kafka = new KafkaBuilder(KAFKA) .editSpec() .editKafka() .withConfig(Map.of( // We want to avoid unrelated warnings KafkaConfiguration.DEFAULT_REPLICATION_FACTOR, 1, KafkaConfiguration.MIN_INSYNC_REPLICAS, 1 )) .endKafka() .endSpec() .build(); KafkaNodePool singleNode = new KafkaNodePoolBuilder(MIXED) .editSpec() .withReplicas(1) .endSpec() .build(); KafkaSpecChecker checker = generateChecker(kafka, List.of(singleNode), KafkaVersionTestUtils.DEFAULT_KRAFT_VERSION_CHANGE); List<Condition> warnings = checker.run(true); assertThat(warnings, hasSize(0)); }
@Override public AppResponse process(Flow flow, CancelFlowRequest request) { if (appAuthenticator != null && "no_nfc".equals(request.getCode())) { appAuthenticator.setNfcSupport(false); } appSession.setAbortCode(request.getCode()); return new OkResponse(); }
@Test public void processReturnsOkResponseWithAppAuthenticatorAndNoNfcCode() { //given cancelFlowRequest.setCode("no_nfc"); //when AppResponse appResponse = aborted.process(mockedFlow, cancelFlowRequest); //then assertTrue(appResponse instanceof OkResponse); Assertions.assertEquals(false, mockedAppAuthenticator.getNfcSupport()); Assertions.assertEquals("no_nfc", mockedAppSession.getAbortCode()); }
public void setContract(@Nullable Produce contract) { this.contract = contract; setStoredContract(contract); handleContractState(); }
@Test public void cabbageContractCabbageGrowingAndCabbageDead() { final long unixNow = Instant.now().getEpochSecond(); final long expected = unixNow + 60; // Get the two allotment patches final FarmingPatch patch1 = farmingGuildPatches.get(Varbits.FARMING_4773); final FarmingPatch patch2 = farmingGuildPatches.get(Varbits.FARMING_4774); assertNotNull(patch1); assertNotNull(patch2); // Specify the two allotment patches when(farmingTracker.predictPatch(patch1)) .thenReturn(new PatchPrediction(Produce.CABBAGE, CropState.GROWING, expected, 2, 3)); when(farmingTracker.predictPatch(patch2)) .thenReturn(new PatchPrediction(Produce.CABBAGE, CropState.DEAD, 0, 2, 3)); farmingContractManager.setContract(Produce.CABBAGE); assertEquals(SummaryState.IN_PROGRESS, farmingContractManager.getSummary()); }
@PostMapping() @Secured(action = ActionTypes.WRITE, signType = SignType.CONFIG) public Result<Boolean> publishConfig(ConfigForm configForm, HttpServletRequest request) throws NacosException { // check required field configForm.validate(); String encryptedDataKeyFinal = configForm.getEncryptedDataKey(); if (StringUtils.isBlank(encryptedDataKeyFinal)) { // encrypted Pair<String, String> pair = EncryptionHandler.encryptHandler(configForm.getDataId(), configForm.getContent()); configForm.setContent(pair.getSecond()); encryptedDataKeyFinal = pair.getFirst(); } //fix issue #9783 configForm.setNamespaceId(NamespaceUtil.processNamespaceParameter(configForm.getNamespaceId())); // check param ParamUtils.checkTenantV2(configForm.getNamespaceId()); ParamUtils.checkParam(configForm.getDataId(), configForm.getGroup(), "datumId", configForm.getContent()); ParamUtils.checkParamV2(configForm.getTag()); if (StringUtils.isBlank(configForm.getSrcUser())) { configForm.setSrcUser(RequestUtil.getSrcUserName(request)); } if (!ConfigType.isValidType(configForm.getType())) { configForm.setType(ConfigType.getDefaultType().getType()); } ConfigRequestInfo configRequestInfo = new ConfigRequestInfo(); configRequestInfo.setSrcIp(RequestUtil.getRemoteIp(request)); configRequestInfo.setRequestIpApp(RequestUtil.getAppName(request)); configRequestInfo.setBetaIps(request.getHeader("betaIps")); configRequestInfo.setCasMd5(request.getHeader("casMd5")); return Result.success(configOperationService.publishConfig(configForm, configRequestInfo, encryptedDataKeyFinal)); }
@Test void testPublishConfigWhenNameSpaceIsPublic() throws Exception { ConfigForm configForm = new ConfigForm(); configForm.setDataId(TEST_DATA_ID); configForm.setGroup(TEST_GROUP); configForm.setNamespaceId(TEST_NAMESPACE_ID_PUBLIC); configForm.setContent(TEST_CONTENT); MockHttpServletRequest request = new MockHttpServletRequest(); when(configOperationService.publishConfig(any(ConfigForm.class), any(ConfigRequestInfo.class), anyString())).thenAnswer( (Answer<Boolean>) invocation -> { if (invocation.getArgument(0, ConfigForm.class).getNamespaceId().equals(TEST_NAMESPACE_ID)) { return true; } return false; }); Result<Boolean> booleanResult = configControllerV2.publishConfig(configForm, request); verify(configOperationService).publishConfig(any(ConfigForm.class), any(ConfigRequestInfo.class), anyString()); assertEquals(ErrorCode.SUCCESS.getCode(), booleanResult.getCode()); assertTrue(booleanResult.getData()); }
@Override public TypeDescriptor<Set<T>> getEncodedTypeDescriptor() { return new TypeDescriptor<Set<T>>() {}.where( new TypeParameter<T>() {}, getElemCoder().getEncodedTypeDescriptor()); }
@Test public void testEncodedTypeDescriptor() throws Exception { TypeDescriptor<Set<Integer>> typeDescriptor = new TypeDescriptor<Set<Integer>>() {}; assertThat(TEST_CODER.getEncodedTypeDescriptor(), equalTo(typeDescriptor)); }
@Override public CEFParserResult evaluate(FunctionArgs args, EvaluationContext context) { final String cef = valueParam.required(args, context); final boolean useFullNames = useFullNamesParam.optional(args, context).orElse(false); final CEFParser parser = CEFParserFactory.create(); if (cef == null || cef.isEmpty()) { LOG.debug("NULL or empty parameter passed to CEF parser function. Not evaluating."); return null; } LOG.debug("Running CEF parser for [{}].", cef); final MappedMessage message; try (Timer.Context timer = parseTime.time()) { message = new MappedMessage(parser.parse(cef.trim()), useFullNames); } catch (Exception e) { LOG.error("Error while parsing CEF message: {}", cef, e); return null; } final Map<String, Object> fields = new HashMap<>(); /* * Add all CEF standard fields. We are prefixing with cef_ to avoid overwriting existing fields or to be * overwritten ourselves later in the processing. The user is encouraged to run another pipeline function * to clean up field names if desired. */ fields.put("cef_version", message.cefVersion()); fields.put("device_vendor", message.deviceVendor()); fields.put("device_product", message.deviceProduct()); fields.put("device_version", message.deviceVersion()); fields.put("device_event_class_id", message.deviceEventClassId()); fields.put("name", message.name()); fields.put("severity", message.severity()); // Add all custom CEF fields. fields.putAll(message.mappedExtensions()); return new CEFParserResult(fields); }
@Test public void evaluate_returns_null_for_missing_CEF_string() throws Exception { final FunctionArgs functionArgs = new FunctionArgs(function, Collections.emptyMap()); final Message message = messageFactory.createMessage("__dummy", "__dummy", DateTime.parse("2010-07-30T16:03:25Z")); final EvaluationContext evaluationContext = new EvaluationContext(message); final CEFParserResult result = function.evaluate(functionArgs, evaluationContext); assertNull(result); }
@Udf(description = "Returns the inverse (arc) cosine of an INT value") public Double acos( @UdfParameter( value = "value", description = "The value to get the inverse cosine of." ) final Integer value ) { return acos(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleNegative() { assertThat(udf.acos(-0.43), closeTo(2.0152891037307157, 0.000000000000001)); assertThat(udf.acos(-0.5), closeTo(2.0943951023931957, 0.000000000000001)); assertThat(udf.acos(-1.0), closeTo(3.141592653589793, 0.000000000000001)); assertThat(udf.acos(-1), closeTo(3.141592653589793, 0.000000000000001)); assertThat(udf.acos(-1L), closeTo(3.141592653589793, 0.000000000000001)); }
@POST @Path("{noteId}") @ZeppelinApi public Response cloneNote(@PathParam("noteId") String noteId, String message) throws IOException, IllegalArgumentException { LOGGER.info("Clone note by JSON {}", message); checkIfUserCanWrite(noteId, "Insufficient privileges you cannot clone this note"); NewNoteRequest request = GSON.fromJson(message, NewNoteRequest.class); String newNoteName = null; String revisionId = null; if (request != null) { newNoteName = request.getName(); revisionId = request.getRevisionId(); } AuthenticationInfo subject = new AuthenticationInfo(authenticationService.getPrincipal()); String newNoteId = notebookService.cloneNote(noteId, revisionId, newNoteName, getServiceContext(), new RestServiceCallback<Note>() { @Override public void onSuccess(Note newNote, ServiceContext context) throws IOException { notebookServer.broadcastNote(newNote); notebookServer.broadcastNoteList(subject, context.getUserAndRoles()); } }); return new JsonResponse<>(Status.OK, "", newNoteId).build(); }
@Test void testCloneNote() throws IOException { LOG.info("Running testCloneNote"); String note1Id = null; List<String> clonedNoteIds = new ArrayList<>(); String text1 = "%text clone note"; String text2 = "%text clone revision of note"; try { note1Id = notebook.createNote("note1", anonymous); // add text and commit note NotebookRepoWithVersionControl.Revision first_commit = notebook.processNote(note1Id, note -> { Paragraph p1 = note.addNewParagraph(anonymous); p1.setText(text2); notebook.saveNote(note, AuthenticationInfo.ANONYMOUS); return notebook.checkpointNote(note.getId(), note.getPath(), "first commit", anonymous); }); // change the text of note notebook.processNote(note1Id, note -> { note.getParagraph(0).setText(text1); return null; }); // Clone a note CloseableHttpResponse post1 = httpPost("/notebook/" + note1Id, ""); // Clone a revision of note CloseableHttpResponse post2 = httpPost("/notebook/" + note1Id, "{ revisionId: " + first_commit.id + "}"); // Verify the responses for (int i = 0; i < 2; i++) { CloseableHttpResponse post = Arrays.asList(post1, post2).get(i); String text = Arrays.asList(text1, text2).get(i); String postResponse = EntityUtils.toString(post.getEntity(), StandardCharsets.UTF_8); LOG.info("testCloneNote response: {}", postResponse); assertThat(post, isAllowed()); Map<String, Object> resp = gson.fromJson(postResponse, new TypeToken<Map<String, Object>>() { }.getType()); clonedNoteIds.add((String) resp.get("body")); post.close(); CloseableHttpResponse get = httpGet("/notebook/" + clonedNoteIds.get(clonedNoteIds.size() - 1)); assertThat(get, isAllowed()); Map<String, Object> resp2 = gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), new TypeToken<Map<String, Object>>() { }.getType()); Map<String, Object> resp2Body = (Map<String, Object>) resp2.get("body"); List<Map<String, String>> paragraphs = (List<Map<String, String>>) resp2Body.get("paragraphs"); // Verify that the original and copied text are consistent assertEquals(text, paragraphs.get(0).get("text")); // assertEquals(resp2Body.get("name"), "Note " + clonedNoteId); get.close(); } } finally { // cleanup if (null != note1Id) { notebook.removeNote(note1Id, anonymous); } if (null != clonedNoteIds) { for (String clonedNoteId : clonedNoteIds) { notebook.removeNote(clonedNoteId, anonymous); } } } }
public AuthenticationRequest startAuthenticationProcess(HttpServletRequest httpRequest) throws ComponentInitializationException, MessageDecodingException, SamlValidationException, SharedServiceClientException, DienstencatalogusException, SamlSessionException { BaseHttpServletRequestXMLMessageDecoder decoder = decodeXMLRequest(httpRequest); AuthenticationRequest authenticationRequest = createAuthenticationRequest(httpRequest, decoder); SAMLBindingContext bindingContext = createAndValidateBindingContext(decoder); validateAuthenticationRequest(authenticationRequest); parseAuthentication(authenticationRequest); validateWithOtherDomainServices(authenticationRequest, bindingContext); return authenticationRequest; }
@Test //entrance public void parseAuthenticationSuccessfulEntranceForBvdTest() throws SamlSessionException, SharedServiceClientException, DienstencatalogusException, ComponentInitializationException, SamlValidationException, MessageDecodingException, SamlParseException { String samlRequest = readXMLFile(authnRequestEntranceBvdFile); String decodeSAMLRequest = encodeAuthnRequest(samlRequest); httpServletRequestMock.setParameter("SAMLRequest", decodeSAMLRequest); AuthenticationRequest result = authenticationService.startAuthenticationProcess(httpServletRequestMock); assertNotNull(result); assertEquals("urn:nl-eid-gdi:1.0:BVD:00000004003214345001:entities:9000", result.getAuthnRequest().getScoping().getRequesterIDs().get(0).getRequesterID()); }
@Override public AttributedList<Path> run(final Session<?> session) throws BackgroundException { try { final AttributedList<Path> list; listener.reset(); if(this.isCached()) { list = cache.get(directory); listener.chunk(directory, list); } else { final ListService service = session.getFeature(ListService.class); if(log.isDebugEnabled()) { log.debug(String.format("Run with feature %s", service)); } list = service.list(directory, listener); if(list.isEmpty()) { listener.chunk(directory, list); } if(log.isDebugEnabled()) { log.debug(String.format("Notify listener %s", listener)); } } listener.finish(directory, list, Optional.empty()); return list; } catch(ListCanceledException e) { if(log.isWarnEnabled()) { log.warn(String.format("Return partial directory listing for %s", directory)); } listener.finish(directory, e.getChunk(), Optional.of(e)); return e.getChunk(); } catch(BackgroundException e) { if(log.isWarnEnabled()) { log.warn(String.format("Notify listener for %s with error %s", directory, e)); } listener.finish(directory, AttributedList.emptyList(), Optional.of(e)); throw e; } }
@Test public void testCacheListCanceledWithController() throws Exception { final Host host = new Host(new TestProtocol(), "localhost"); final Session<?> session = new NullSession(host) { @Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { throw new ListCanceledException(AttributedList.emptyList()); } }; final PathCache cache = new PathCache(1); final Path directory = new Path("/home/notfound", EnumSet.of(Path.Type.directory)); cache.put(directory, new AttributedList<>(Collections.singletonList(new Path(directory, "f", EnumSet.of(Path.Type.file))))); final ListWorker worker = new ListWorker(cache, directory, new DisabledListProgressListener()); final Controller c = new AbstractController() { @Override public void invoke(final MainAction runnable, final boolean wait) { runnable.run(); } }; final Future<AttributedList<Path>> task = c.background(new WorkerBackgroundAction<>(c, new StatelessSessionPool( new TestLoginConnectionService(), session, new DisabledTranscriptListener(), new DefaultVaultRegistry(new DisabledPasswordCallback())), worker)); assertNotNull(task.get()); assertTrue(cache.containsKey(directory)); assertEquals(1, cache.get(directory).size()); }
public static Schema removeFields(Schema schema, Set<String> fieldsToRemove) { List<Schema.Field> filteredFields = schema.getFields() .stream() .filter(field -> !fieldsToRemove.contains(field.name())) .map(field -> new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultVal())) .collect(Collectors.toList()); return createNewSchemaFromFieldsWithReference(schema, filteredFields); }
@Test public void testRemoveFields() { // partitioned table test. String schemaStr = "{\"type\": \"record\",\"name\": \"testrec\",\"fields\": [ " + "{\"name\": \"timestamp\",\"type\": \"double\"},{\"name\": \"_row_key\", \"type\": \"string\"}," + "{\"name\": \"non_pii_col\", \"type\": \"string\"}]}"; Schema expectedSchema = new Schema.Parser().parse(schemaStr); GenericRecord rec = new GenericData.Record(new Schema.Parser().parse(EXAMPLE_SCHEMA)); rec.put("_row_key", "key1"); rec.put("non_pii_col", "val1"); rec.put("pii_col", "val2"); rec.put("timestamp", 3.5); GenericRecord rec1 = HoodieAvroUtils.removeFields(rec, Collections.singleton("pii_col")); assertEquals("key1", rec1.get("_row_key")); assertEquals("val1", rec1.get("non_pii_col")); assertEquals(3.5, rec1.get("timestamp")); if (HoodieAvroUtils.gteqAvro1_10()) { GenericRecord finalRec1 = rec1; assertThrows(AvroRuntimeException.class, () -> finalRec1.get("pii_col")); } else { assertNull(rec1.get("pii_col")); } assertEquals(expectedSchema, rec1.getSchema()); // non-partitioned table test with empty list of fields. schemaStr = "{\"type\": \"record\",\"name\": \"testrec\",\"fields\": [ " + "{\"name\": \"timestamp\",\"type\": \"double\"},{\"name\": \"_row_key\", \"type\": \"string\"}," + "{\"name\": \"non_pii_col\", \"type\": \"string\"}," + "{\"name\": \"pii_col\", \"type\": \"string\"}]}"; expectedSchema = new Schema.Parser().parse(schemaStr); rec1 = HoodieAvroUtils.removeFields(rec, Collections.singleton("")); assertEquals(expectedSchema, rec1.getSchema()); }
public static JRTClientConfigRequest createFromRaw(RawConfig config, long serverTimeout) { // TODO: Get trace from caller return JRTClientConfigRequestV3.createFromRaw(config, serverTimeout, Trace.createNew(), compressionType, getVespaVersion()); }
@Test public void testCreateFromRaw() { Class<FunctionTestConfig> clazz = FunctionTestConfig.class; final String configId = "foo"; RawConfig config = new RawConfig(new ConfigKey<>(clazz, configId), "595f44fec1e92a71d3e9e77456ba80d1"); JRTClientConfigRequest request = JRTConfigRequestFactory.createFromRaw(config, 1000); assertThat(request.getVespaVersion().get(), is(defaultVespaVersion)); }
public <T> T unmarshal(XStream xs) { return (T) xs.unmarshal(newReader()); }
@Test public void testUnmarshal() throws Exception { Foo foo; try (InputStream is = XStreamDOMTest.class.getResourceAsStream("XStreamDOMTest.data1.xml")) { foo = (Foo) xs.fromXML(is); } assertEquals("test1", foo.bar.getTagName()); assertEquals("value", foo.bar.getAttribute("key")); assertEquals("text!", foo.bar.getValue()); }
@Override public RowIdLifetime getRowIdLifetime() { return null; }
@Test void assertGetRowIdLifetime() { assertNull(metaData.getRowIdLifetime()); }
public void isEqualTo(@Nullable Object expected) { standardIsEqualTo(expected); }
@SuppressWarnings("TruthIncompatibleType") // Intentional for testing purposes. @Test public void toStringsAreIdentical() { IntWrapper wrapper = new IntWrapper(); wrapper.wrapped = 5; expectFailure.whenTesting().that(5).isEqualTo(wrapper); assertFailureKeys("expected", "an instance of", "but was", "an instance of"); assertFailureValue("expected", "5"); assertFailureValueIndexed( "an instance of", 0, "com.google.common.truth.SubjectTest$IntWrapper"); assertFailureValue("but was", "(non-equal value with same string representation)"); assertFailureValueIndexed("an instance of", 1, "java.lang.Integer"); }
@Override public synchronized boolean onReportingPeriodEnd() { firstEventReceived = false; return false; }
@Test public void testOnReportingPeriodEnd() { assertTrue(strategy.onActivity(), "First call of onActivity() should return true."); assertFalse(strategy.onReportingPeriodEnd(), "onReportingPeriodEnd() should always return false."); assertTrue(strategy.onActivity(), "onActivity() should return true after onReportingPeriodEnd()."); assertFalse(strategy.onReportingPeriodEnd(), "onReportingPeriodEnd() should always return false."); }
@Override public String toString() { return "MemberStateImpl{" + "address=" + address + ", uuid=" + uuid + ", cpMemberUuid=" + cpMemberUuid + ", name=" + name + ", clients=" + clients + ", mapsWithStats=" + mapsWithStats + ", multiMapsWithStats=" + multiMapsWithStats + ", replicatedMapsWithStats=" + replicatedMapsWithStats + ", queuesWithStats=" + queuesWithStats + ", topicsWithStats=" + topicsWithStats + ", reliableTopicsWithStats=" + reliableTopicsWithStats + ", pnCountersWithStats=" + pnCountersWithStats + ", executorStats=" + executorsWithStats + ", scheduledExecutorStats=" + scheduledExecutorsWithStats + ", durableExecutorStats=" + durableExecutorsWithStats + ", cachesWithStats=" + cachesWithStats + ", flakeIdGeneratorsWithStats=" + flakeIdGeneratorsWithStats + ", userCodeNamespacesWithStats=" + userCodeNamespacesWithStats + ", vectorCollections=" + vectorCollections + ", wanStats=" + wanStats + ", operationStats=" + operationStats + ", memberPartitionState=" + memberPartitionState + ", nodeState=" + nodeState + ", hotRestartState=" + hotRestartState + ", clusterHotRestartStatus=" + clusterHotRestartStatus + '}'; }
@Test public void testDefaultConstructor() { MemberStateImpl memberState = new MemberStateImpl(); assertNotNull(memberState.toString()); }
public static <T extends Throwable> Optional<T> findThrowable( Throwable throwable, Class<T> searchType) { if (throwable == null || searchType == null) { return Optional.empty(); } Throwable t = throwable; while (t != null) { if (searchType.isAssignableFrom(t.getClass())) { return Optional.of(searchType.cast(t)); } else { t = t.getCause(); } } return Optional.empty(); }
@Test void testFindThrowableByType() { assertThat( ExceptionUtils.findThrowable( new RuntimeException(new IllegalStateException()), IllegalStateException.class)) .isPresent(); }
public static TraceTransferBean encoderFromContextBean(TraceContext ctx) { if (ctx == null) { return null; } //build message trace of the transferring entity content bean TraceTransferBean transferBean = new TraceTransferBean(); StringBuilder sb = new StringBuilder(256); switch (ctx.getTraceType()) { case Pub: { TraceBean bean = ctx.getTraceBeans().get(0); //append the content of context and traceBean to transferBean's TransData sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRegionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getGroupName()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTopic()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTags()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getStoreHost()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getBodyLength()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getCostTime()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgType().ordinal()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getOffsetMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.isSuccess()).append(TraceConstants.FIELD_SPLITOR);// } break; case SubBefore: { for (TraceBean bean : ctx.getTraceBeans()) { sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRegionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getGroupName()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRequestId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getRetryTimes()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.FIELD_SPLITOR);// } } break; case SubAfter: { for (TraceBean bean : ctx.getTraceBeans()) { sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRequestId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getCostTime()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.isSuccess()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getContextCode()).append(TraceConstants.CONTENT_SPLITOR); if (!ctx.getAccessChannel().equals(AccessChannel.CLOUD)) { sb.append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR); sb.append(ctx.getGroupName()); } sb.append(TraceConstants.FIELD_SPLITOR); } } break; case EndTransaction: { TraceBean bean = ctx.getTraceBeans().get(0); sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRegionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getGroupName()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTopic()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTags()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getStoreHost()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgType().ordinal()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTransactionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTransactionState().name()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.isFromTransactionCheck()).append(TraceConstants.FIELD_SPLITOR); } break; default: } transferBean.setTransData(sb.toString()); for (TraceBean bean : ctx.getTraceBeans()) { transferBean.getTransKey().add(bean.getMsgId()); if (bean.getKeys() != null && bean.getKeys().length() > 0) { String[] keys = bean.getKeys().split(MessageConst.KEY_SEPARATOR); transferBean.getTransKey().addAll(Arrays.asList(keys)); } } return transferBean; }
@Test public void testSubAfterTraceDataFormatTest() { TraceContext subAfterContext = new TraceContext(); subAfterContext.setTraceType(TraceType.SubAfter); subAfterContext.setRequestId("3455848576927"); subAfterContext.setCostTime(20); subAfterContext.setSuccess(true); subAfterContext.setTimeStamp(1625883640000L); subAfterContext.setGroupName("GroupName-test"); subAfterContext.setContextCode(98623046); subAfterContext.setAccessChannel(AccessChannel.LOCAL); TraceBean bean = new TraceBean(); bean.setMsgId("AC1415116D1418B4AAC217FE1B4E0000"); bean.setKeys("keys"); subAfterContext.setTraceBeans(new ArrayList<>(1)); subAfterContext.getTraceBeans().add(bean); TraceTransferBean traceTransferBean = TraceDataEncoder.encoderFromContextBean(subAfterContext); String transData = traceTransferBean.getTransData(); Assert.assertNotNull(transData); String[] items = transData.split(String.valueOf(TraceConstants.CONTENT_SPLITOR)); Assert.assertEquals(9, items.length); }