focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@VisibleForTesting synchronized void updateStateStore() { String routerId = router.getRouterId(); if (routerId == null) { LOG.error("Cannot heartbeat for router: unknown router id"); return; } if (isStoreAvailable()) { RouterStore routerStore = router.getRouterStateManager(); try { RouterState record = RouterState.newInstance( routerId, router.getStartTime(), router.getRouterState()); StateStoreVersion stateStoreVersion = StateStoreVersion.newInstance( getStateStoreVersion(MembershipStore.class), getStateStoreVersion(MountTableStore.class)); record.setStateStoreVersion(stateStoreVersion); // if admin server not started then hostPort will be empty String hostPort = StateStoreUtils.getHostPortString(router.getAdminServerAddress()); record.setAdminAddress(hostPort); RouterHeartbeatRequest request = RouterHeartbeatRequest.newInstance(record); RouterHeartbeatResponse response = routerStore.routerHeartbeat(request); if (!response.getStatus()) { LOG.warn("Cannot heartbeat router {}", routerId); } else { LOG.debug("Router heartbeat for router {}", routerId); } } catch (IOException e) { LOG.error("Cannot heartbeat router {}", routerId, e); } } else { LOG.warn("Cannot heartbeat router {}: State Store unavailable", routerId); } }
@Test public void testStateStoreUnavailable() throws IOException { curatorFramework.close(); testingServer.stop(); router.getStateStore().stop(); // The driver is not ready assertFalse(router.getStateStore().isDriverReady()); // Do a heartbeat, and no exception thrown out RouterHeartbeatService heartbeatService = new RouterHeartbeatService(router); heartbeatService.updateStateStore(); }
@SuppressWarnings("deprecation") public boolean setSocketOpt(int option, Object optval) { final ValueReference<Boolean> result = new ValueReference<>(false); switch (option) { case ZMQ.ZMQ_SNDHWM: sendHwm = (Integer) optval; if (sendHwm < 0) { throw new IllegalArgumentException("sendHwm " + optval); } return true; case ZMQ.ZMQ_RCVHWM: recvHwm = (Integer) optval; if (recvHwm < 0) { throw new IllegalArgumentException("recvHwm " + optval); } return true; case ZMQ.ZMQ_AFFINITY: affinity = (Long) optval; return true; case ZMQ.ZMQ_IDENTITY: byte[] val = parseBytes(option, optval); if (val == null || val.length > 255) { throw new IllegalArgumentException("identity must not be null or less than 255 " + optval); } identity = Arrays.copyOf(val, val.length); identitySize = (short) identity.length; return true; case ZMQ.ZMQ_RATE: rate = (Integer) optval; return true; case ZMQ.ZMQ_RECOVERY_IVL: recoveryIvl = (Integer) optval; return true; case ZMQ.ZMQ_SNDBUF: sndbuf = (Integer) optval; return true; case ZMQ.ZMQ_RCVBUF: rcvbuf = (Integer) optval; return true; case ZMQ.ZMQ_TOS: tos = (Integer) optval; return true; case ZMQ.ZMQ_LINGER: linger = (Integer) optval; return true; case ZMQ.ZMQ_RECONNECT_IVL: reconnectIvl = (Integer) optval; if (reconnectIvl < -1) { throw new IllegalArgumentException("reconnectIvl " + optval); } return true; case ZMQ.ZMQ_RECONNECT_IVL_MAX: reconnectIvlMax = (Integer) optval; if (reconnectIvlMax < 0) { throw new IllegalArgumentException("reconnectIvlMax " + optval); } return true; case ZMQ.ZMQ_BACKLOG: backlog = (Integer) optval; return true; case ZMQ.ZMQ_MAXMSGSIZE: maxMsgSize = (Long) optval; return true; case ZMQ.ZMQ_MULTICAST_HOPS: multicastHops = (Integer) optval; return true; case ZMQ.ZMQ_RCVTIMEO: recvTimeout = (Integer) optval; return true; case ZMQ.ZMQ_SNDTIMEO: sendTimeout = (Integer) optval; return true; /* Deprecated in favor of ZMQ_IPV6 */ case ZMQ.ZMQ_IPV4ONLY: return setSocketOpt(ZMQ.ZMQ_IPV6, !parseBoolean(option, optval)); /* To replace the somewhat surprising IPV4ONLY */ case ZMQ.ZMQ_IPV6: ipv6 = parseBoolean(option, optval); return true; case ZMQ.ZMQ_SOCKS_PROXY: socksProxyAddress = parseString(option, optval); return true; case ZMQ.ZMQ_TCP_KEEPALIVE: tcpKeepAlive = ((Number) optval).intValue(); if (tcpKeepAlive != -1 && tcpKeepAlive != 0 && tcpKeepAlive != 1) { throw new IllegalArgumentException("tcpKeepAlive only accepts one of -1,0,1 " + optval); } return true; case ZMQ.ZMQ_TCP_KEEPALIVE_CNT: this.tcpKeepAliveCnt = ((Number) optval).intValue(); return true; case ZMQ.ZMQ_TCP_KEEPALIVE_IDLE: this.tcpKeepAliveIdle = ((Number) optval).intValue(); return true; case ZMQ.ZMQ_TCP_KEEPALIVE_INTVL: this.tcpKeepAliveIntvl = ((Number) optval).intValue(); return true; case ZMQ.ZMQ_IMMEDIATE: immediate = parseBoolean(option, optval); return true; case ZMQ.ZMQ_DELAY_ATTACH_ON_CONNECT: immediate = !parseBoolean(option, optval); return true; case ZMQ.ZMQ_TCP_ACCEPT_FILTER: String filterStr = parseString(option, optval); if (filterStr == null) { tcpAcceptFilters.clear(); } else if (filterStr.isEmpty() || filterStr.length() > 255) { throw new IllegalArgumentException("tcp_accept_filter " + optval); } else { TcpAddressMask filter = new TcpAddressMask(filterStr, ipv6); tcpAcceptFilters.add(filter); } return true; case ZMQ.ZMQ_PLAIN_SERVER: asServer = parseBoolean(option, optval); mechanism = (asServer ? Mechanisms.PLAIN : Mechanisms.NULL); return true; case ZMQ.ZMQ_PLAIN_USERNAME: if (optval == null) { mechanism = Mechanisms.NULL; asServer = false; return true; } plainUsername = parseString(option, optval); asServer = false; mechanism = Mechanisms.PLAIN; return true; case ZMQ.ZMQ_PLAIN_PASSWORD: if (optval == null) { mechanism = Mechanisms.NULL; asServer = false; return true; } plainPassword = parseString(option, optval); asServer = false; mechanism = Mechanisms.PLAIN; return true; case ZMQ.ZMQ_ZAP_DOMAIN: String domain = parseString(option, optval); if (domain != null && domain.length() < 256) { zapDomain = domain; return true; } throw new IllegalArgumentException("zap domain length shall be < 256 : " + optval); case ZMQ.ZMQ_CURVE_SERVER: asServer = parseBoolean(option, optval); mechanism = (asServer ? Mechanisms.CURVE : Mechanisms.NULL); return true; case ZMQ.ZMQ_CURVE_PUBLICKEY: curvePublicKey = setCurveKey(option, optval, result); return result.get(); case ZMQ.ZMQ_CURVE_SECRETKEY: curveSecretKey = setCurveKey(option, optval, result); return result.get(); case ZMQ.ZMQ_CURVE_SERVERKEY: curveServerKey = setCurveKey(option, optval, result); if (curveServerKey == null) { asServer = false; } return result.get(); case ZMQ.ZMQ_CONFLATE: conflate = parseBoolean(option, optval); return true; case ZMQ.ZMQ_GSSAPI_SERVER: asServer = parseBoolean(option, optval); mechanism = Mechanisms.GSSAPI; return true; case ZMQ.ZMQ_GSSAPI_PRINCIPAL: gssPrincipal = parseString(option, optval); mechanism = Mechanisms.GSSAPI; return true; case ZMQ.ZMQ_GSSAPI_SERVICE_PRINCIPAL: gssServicePrincipal = parseString(option, optval); mechanism = Mechanisms.GSSAPI; return true; case ZMQ.ZMQ_GSSAPI_PLAINTEXT: gssPlaintext = parseBoolean(option, optval); return true; case ZMQ.ZMQ_HANDSHAKE_IVL: handshakeIvl = (Integer) optval; if (handshakeIvl < 0) { throw new IllegalArgumentException("handshakeIvl only accept positive values " + optval); } return true; case ZMQ.ZMQ_HEARTBEAT_IVL: heartbeatInterval = (Integer) optval; if (heartbeatInterval < 0) { throw new IllegalArgumentException("heartbeatInterval only accept positive values " + optval); } return true; case ZMQ.ZMQ_HEARTBEAT_TIMEOUT: heartbeatTimeout = (Integer) optval; if (heartbeatTimeout < 0) { throw new IllegalArgumentException("heartbeatTimeout only accept positive values " + optval); } return true; case ZMQ.ZMQ_HEARTBEAT_TTL: Integer value = (Integer) optval; // Convert this to deciseconds from milliseconds value /= 100; if (value >= 0 && value <= 6553) { heartbeatTtl = value; } else { throw new IllegalArgumentException("heartbeatTtl is out of range [0..655399]" + optval); } return true; case ZMQ.ZMQ_HEARTBEAT_CONTEXT: heartbeatContext = (byte[]) optval; if (heartbeatContext == null) { throw new IllegalArgumentException("heartbeatContext cannot be null"); } return true; case ZMQ.ZMQ_DECODER: decoder = checkCustomCodec(optval, IDecoder.class); rawSocket = true; // failure throws ZError.InstantiationException // if that line is reached, everything is fine return true; case ZMQ.ZMQ_ENCODER: encoder = checkCustomCodec(optval, IEncoder.class); rawSocket = true; // failure throws ZError.InstantiationException // if that line is reached, everything is fine return true; case ZMQ.ZMQ_MSG_ALLOCATOR: if (optval instanceof String) { try { allocator = allocator(Class.forName((String) optval)); return true; } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e); } } else if (optval instanceof Class) { allocator = allocator((Class<?>) optval); return true; } else if (optval instanceof MsgAllocator) { allocator = (MsgAllocator) optval; return true; } return false; case ZMQ.ZMQ_MSG_ALLOCATION_HEAP_THRESHOLD: Integer allocationHeapThreshold = (Integer) optval; allocator = new MsgAllocatorThreshold(allocationHeapThreshold); return true; case ZMQ.ZMQ_SELECTOR_PROVIDERCHOOSER: if (optval instanceof String) { try { selectorChooser = chooser(Class.forName((String) optval)); return true; } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e); } } else if (optval instanceof Class) { selectorChooser = chooser((Class<?>) optval); return true; } else if (optval instanceof SelectorProviderChooser) { selectorChooser = (SelectorProviderChooser) optval; return true; } return false; case ZMQ.ZMQ_HELLO_MSG: if (optval == null) { helloMsg = null; } else { byte[] bytes = parseBytes(option, optval); if (bytes.length == 0) { helloMsg = null; } else { helloMsg = new Msg(Arrays.copyOf(bytes, bytes.length)); } } return true; case ZMQ.ZMQ_DISCONNECT_MSG: if (optval == null) { disconnectMsg = null; } else { byte[] bytes = parseBytes(option, optval); if (bytes.length == 0) { disconnectMsg = null; } else { disconnectMsg = new Msg(Arrays.copyOf(bytes, bytes.length)); } } return true; case ZMQ.ZMQ_HICCUP_MSG: if (optval == null) { hiccupMsg = null; } else { byte[] bytes = parseBytes(option, optval); if (bytes.length == 0) { hiccupMsg = null; } else { hiccupMsg = new Msg(Arrays.copyOf(bytes, bytes.length)); } } return true; case ZMQ.ZMQ_AS_TYPE: this.asType = (Integer) optval; return true; case ZMQ.ZMQ_SELFADDR_PROPERTY_NAME: this.selfAddressPropertyName = parseString(option, optval); return true; default: throw new IllegalArgumentException("Unknown Option " + option); } }
@Test(expected = IllegalArgumentException.class) public void testHeartbeatTtlOverflow() { options.setSocketOpt(ZMQ.ZMQ_HEARTBEAT_TTL, 655400); }
public static void saveStartType() { try { Storage storage = new Storage(Config.meta_dir + "/image"); String hostType = useFqdn ? HostType.FQDN.toString() : HostType.IP.toString(); storage.writeFeStartFeHostType(hostType); } catch (IOException e) { LOG.error("fail to write fe start host type:" + e.getMessage()); System.exit(-1); } }
@Test public void testSaveStartType() throws FileNotFoundException, IOException { Config.meta_dir = "feOpTestDir2"; String metaPath = Config.meta_dir + "/"; // fqdn File dir = new File(metaPath); deleteDir(dir); mkdir(false, metaPath); FrontendOptions.saveStartType(); String roleFilePath = Config.meta_dir + "/image/ROLE"; File roleFile = new File(roleFilePath); Properties prop = new Properties(); String hostType; try (FileInputStream in = new FileInputStream(roleFile)) { prop.load(in); } hostType = prop.getProperty("hostType", null); Assert.assertTrue((hostType.equals("IP") || hostType.equals("FQDN"))); dir = new File(metaPath); deleteDir(dir); }
@Override public GoApiResponse submit(final GoApiRequest request) { if (requestProcessorRegistry.canProcess(request)) { try { GoPluginApiRequestProcessor processor = requestProcessorRegistry.processorFor(request); return processor.process(pluginDescriptor, request); } catch (Exception e) { LOGGER.warn("Error while processing request api [{}]", request.api(), e); throw new RuntimeException(String.format("Error while processing request api %s", request.api()), e); } } LOGGER.warn("Plugin {} sent a request that could not be understood {} at version {}", request.pluginIdentifier().getExtension(), request.api(), request.apiVersion()); return unhandledApiRequest(); }
@Test void shouldHandleExceptionThrownByProcessor() { String api = "api-uri"; GoPluginApiRequestProcessor processor = mock(GoPluginApiRequestProcessor.class); GoApiRequest goApiRequest = mock(GoApiRequest.class); GoPluginDescriptor descriptor = mock(GoPluginDescriptor.class); when(goApiRequest.api()).thenReturn(api); Throwable cause = new RuntimeException("error"); when(processor.process(descriptor, goApiRequest)).thenThrow(cause); PluginRequestProcessorRegistry pluginRequestProcessorRegistry = new PluginRequestProcessorRegistry(); pluginRequestProcessorRegistry.registerProcessorFor(api, processor); PluginAwareDefaultGoApplicationAccessor accessor = new PluginAwareDefaultGoApplicationAccessor(descriptor, pluginRequestProcessorRegistry); assertThatCode(() -> accessor.submit(goApiRequest)) .hasMessage(String.format("Error while processing request api %s", api)) .hasCause(cause); }
public static Expression normalize(Expression expression) { if (expression instanceof NotExpression) { NotExpression not = (NotExpression) expression; if (not.getValue() instanceof ComparisonExpression && ((ComparisonExpression) not.getValue()).getOperator() != IS_DISTINCT_FROM) { ComparisonExpression comparison = (ComparisonExpression) not.getValue(); return new ComparisonExpression(comparison.getOperator().negate(), comparison.getLeft(), comparison.getRight()); } if (not.getValue() instanceof NotExpression) { return normalize(((NotExpression) not.getValue()).getValue()); } } return expression; }
@Test public void testNormalize() { assertNormalize(new ComparisonExpression(EQUAL, name("a"), new LongLiteral("1"))); assertNormalize(new IsNullPredicate(name("a"))); assertNormalize(new NotExpression(new LikePredicate(name("a"), new StringLiteral("x%"), Optional.empty()))); assertNormalize( new NotExpression(new ComparisonExpression(EQUAL, name("a"), new LongLiteral("1"))), new ComparisonExpression(NOT_EQUAL, name("a"), new LongLiteral("1"))); assertNormalize( new NotExpression(new ComparisonExpression(NOT_EQUAL, name("a"), new LongLiteral("1"))), new ComparisonExpression(EQUAL, name("a"), new LongLiteral("1"))); // Cannot normalize IS DISTINCT FROM yet assertNormalize(new NotExpression(new ComparisonExpression(IS_DISTINCT_FROM, name("a"), new LongLiteral("1")))); }
public static void deleteQuietly(File file) { Objects.requireNonNull(file, "file"); FileUtils.deleteQuietly(file); }
@Test void deleteQuietly() throws IOException { File tmpFile = DiskUtils.createTmpFile(UUID.randomUUID().toString(), ".ut"); DiskUtils.deleteQuietly(tmpFile); assertFalse(tmpFile.exists()); }
void precheckMaxResultLimitOnLocalPartitions(String mapName) { // check if feature is enabled if (!isPreCheckEnabled) { return; } // limit number of local partitions to check to keep runtime constant PartitionIdSet localPartitions = mapServiceContext.getCachedOwnedPartitions(); int partitionsToCheck = min(localPartitions.size(), maxLocalPartitionsLimitForPreCheck); if (partitionsToCheck == 0) { return; } // calculate size of local partitions int localPartitionSize = getLocalPartitionSize(mapName, localPartitions, partitionsToCheck); if (localPartitionSize == 0) { return; } // check local result size long localResultLimit = getNodeResultLimit(partitionsToCheck); if (localPartitionSize > localResultLimit * MAX_RESULT_LIMIT_FACTOR_FOR_PRECHECK) { var localMapStatsProvider = mapServiceContext.getLocalMapStatsProvider(); if (localMapStatsProvider != null && localMapStatsProvider.hasLocalMapStatsImpl(mapName)) { localMapStatsProvider.getLocalMapStatsImpl(mapName).incrementQueryResultSizeExceededCount(); } throw new QueryResultSizeExceededException(maxResultLimit, " Result size exceeded in local pre-check."); } }
@Test(expected = QueryResultSizeExceededException.class) public void testLocalPreCheckEnabledWitPartitionOverLimit() { int[] partitionsSizes = {1090}; populatePartitions(partitionsSizes); initMocksWithConfiguration(200000, 1); limiter.precheckMaxResultLimitOnLocalPartitions(ANY_MAP_NAME); }
public void setResource(Resource resource) { this.resource = resource; }
@Test void testSetResource() { Resource resource = new Resource("", "", "", "", new Properties()); assertNull(authContext.getResource()); authContext.setResource(resource); assertSame(resource, authContext.getResource()); }
String formatsTimeZone(TimeZone tz) { // package-private for test. int seconds = Math.abs(tz.getOffset(System.currentTimeMillis())) / 1000; int hours = seconds / 3600; int minutes = (seconds % 3600) / 60; return (tz.getRawOffset() < 0 ? "-" : "+") + String.format("%02d:%02d", hours, minutes); }
@Test public void testTimeZone() { IQEntityTimeHandler iqEntityTimeHandler = new IQEntityTimeHandler(); assertEquals(iqEntityTimeHandler.formatsTimeZone(TimeZone.getTimeZone("GMT-8:00")), "-08:00"); }
public ProviderGroup add(ProviderInfo providerInfo) { if (providerInfo == null) { return this; } ConcurrentHashSet<ProviderInfo> tmp = new ConcurrentHashSet<ProviderInfo>(providerInfos); tmp.add(providerInfo); // 排重 this.providerInfos = new ArrayList<ProviderInfo>(tmp); return this; }
@Test public void add() throws Exception { ProviderGroup pg = new ProviderGroup("xxx", null); Assert.assertTrue(pg.size() == 0); pg.add(null); Assert.assertTrue(pg.size() == 0); pg.add(ProviderHelper.toProviderInfo("127.0.0.1:12200")); Assert.assertTrue(pg.size() == 1); pg.add(ProviderHelper.toProviderInfo("127.0.0.1:12201")); Assert.assertTrue(pg.size() == 2); // 重复 pg.add(ProviderHelper.toProviderInfo("127.0.0.1:12200")); Assert.assertTrue(pg.size() == 2); }
public static Executor scopeToJob(JobID jobID, Executor executor) { checkArgument(!(executor instanceof MdcAwareExecutor)); return new MdcAwareExecutor<>(executor, asContextData(jobID)); }
@Test void testScopeScheduledExecutorService() throws Exception { ScheduledExecutorService ses = java.util.concurrent.Executors.newSingleThreadScheduledExecutor(); try { assertJobIDLogged( jobID -> MdcUtils.scopeToJob(jobID, ses) .schedule(LOGGING_RUNNABLE, 1L, TimeUnit.MILLISECONDS) .get()); } finally { ses.shutdownNow(); } }
@Override public void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint) { super.pre(span, exchange, endpoint); span.setTag(TagConstants.DB_SYSTEM, "sql"); Object body = exchange.getIn().getBody(); if (body instanceof String) { span.setTag(TagConstants.DB_STATEMENT, (String) body); } }
@Test public void testPre() { Endpoint endpoint = Mockito.mock(Endpoint.class); Exchange exchange = Mockito.mock(Exchange.class); Message message = Mockito.mock(Message.class); Mockito.when(endpoint.getEndpointUri()).thenReturn("test"); Mockito.when(exchange.getIn()).thenReturn(message); Mockito.when(message.getBody()).thenReturn(SQL_STATEMENT); SpanDecorator decorator = new JdbcSpanDecorator(); MockSpanAdapter span = new MockSpanAdapter(); decorator.pre(span, exchange, endpoint); assertEquals("sql", span.tags().get(TagConstants.DB_SYSTEM)); assertEquals(SQL_STATEMENT, span.tags().get(TagConstants.DB_STATEMENT)); }
@Override public String toString() { String bounds = printExtendsClause() ? " extends " + joinTypeNames(upperBounds) : ""; return getClass().getSimpleName() + '{' + getName() + bounds + '}'; }
@Test public void toString_unbounded() { @SuppressWarnings("unused") class Unbounded<NAME> { } JavaTypeVariable<JavaClass> typeVariable = new ClassFileImporter().importClass(Unbounded.class).getTypeParameters().get(0); assertThat(typeVariable.toString()) .contains(JavaTypeVariable.class.getSimpleName()) .contains("NAME") .doesNotContain("extends"); }
@Override public int getPriority() { return 6; }
@Test public void testGetPriority() { assertEquals(6, roundRobinLoadBalancerProvider.getPriority()); }
public CompletableFuture<Triple<MessageExt, String, Boolean>> getMessageAsync(String topic, long offset, int queueId, String brokerName, boolean deCompressBody) { MessageStore messageStore = brokerController.getMessageStoreByBrokerName(brokerName); if (messageStore != null) { return messageStore.getMessageAsync(innerConsumerGroupName, topic, queueId, offset, 1, null) .thenApply(result -> { if (result == null) { LOG.warn("getMessageResult is null , innerConsumerGroupName {}, topic {}, offset {}, queueId {}", innerConsumerGroupName, topic, offset, queueId); return Triple.of(null, "getMessageResult is null", false); // local store, so no retry } List<MessageExt> list = decodeMsgList(result, deCompressBody); if (list == null || list.isEmpty()) { // OFFSET_FOUND_NULL returned by TieredMessageStore indicates exception occurred boolean needRetry = GetMessageStatus.OFFSET_FOUND_NULL.equals(result.getStatus()) && messageStore instanceof TieredMessageStore; LOG.warn("Can not get msg , topic {}, offset {}, queueId {}, needRetry {}, result is {}", topic, offset, queueId, needRetry, result); return Triple.of(null, "Can not get msg", needRetry); } return Triple.of(list.get(0), "", false); }); } else { return getMessageFromRemoteAsync(topic, offset, queueId, brokerName); } }
@Test public void getMessageAsyncTest_remoteStore_addressNotFound() throws Exception { when(brokerController.getMessageStoreByBrokerName(any())).thenReturn(null); // just test address not found, since we have complete tests of getMessageFromRemoteAsync() when(topicRouteInfoManager.findBrokerAddressInSubscribe(anyString(), anyLong(), anyBoolean())).thenReturn(null); Triple<MessageExt, String, Boolean> rst = escapeBridge.getMessageAsync(TEST_TOPIC, 0, DEFAULT_QUEUE_ID, BROKER_NAME, false).join(); Assert.assertNull(rst.getLeft()); Assert.assertEquals("brokerAddress not found", rst.getMiddle()); Assert.assertTrue(rst.getRight()); // need retry }
public static Message toProto(final Map<?, ?> inputData, final Message defaultInstance) { ObjectHelper.notNull(inputData, "inputData"); ObjectHelper.notNull(defaultInstance, "defaultInstance"); final Descriptor descriptor = defaultInstance.getDescriptorForType(); final Builder target = defaultInstance.newBuilderForType(); return convertMapToMessage(descriptor, target, inputData); }
@Test public void testIfThrowsErrorInCaseNestedMessageNotMap() { final Map<String, Object> input = new HashMap<>(); input.put("name", "Martin"); input.put("id", 1234); input.put("address", "wrong address"); final AddressBookProtos.Person defaultInstance = AddressBookProtos.Person.getDefaultInstance(); assertThrows(IllegalArgumentException.class, () -> ProtobufConverter.toProto(input, defaultInstance)); }
@Override public List<ResourceReference> getResourceDependencies( TransMeta transMeta, StepMeta stepInfo ) { List<ResourceReference> references = new ArrayList<>( 5 ); String realFilename = transMeta.environmentSubstitute( transformationPath ); ResourceReference reference = new ResourceReference( stepInfo ); references.add( reference ); if ( !Utils.isEmpty( realFilename ) ) { // Add the filename to the references, including a reference to this step // meta data. // reference.getEntries().add( new ResourceEntry( realFilename, ResourceEntry.ResourceType.ACTIONFILE ) ); } return references; }
@Test public void testGetResourceDependencies() { String stepId = "KafkConsumerInput"; String path = "/home/bgroves/fake.ktr"; StepMeta stepMeta = new StepMeta(); stepMeta.setStepID( stepId ); StuffStreamMeta inputMeta = new StuffStreamMeta(); List<ResourceReference> resourceDependencies = inputMeta.getResourceDependencies( new TransMeta(), stepMeta ); assertEquals( 0, resourceDependencies.get( 0 ).getEntries().size() ); inputMeta.setTransformationPath( path ); resourceDependencies = inputMeta.getResourceDependencies( new TransMeta(), stepMeta ); assertEquals( 1, resourceDependencies.get( 0 ).getEntries().size() ); assertEquals( path, resourceDependencies.get( 0 ).getEntries().get( 0 ).getResource() ); assertEquals( ResourceEntry.ResourceType.ACTIONFILE, resourceDependencies.get( 0 ).getEntries().get( 0 ).getResourcetype() ); testRoundTrip( inputMeta ); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "list" ) List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } FEELFnResult<BigDecimal> s = sum.invoke( list ); Function<FEELEvent, FEELFnResult<BigDecimal>> ifLeft = event -> FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "unable to sum the elements which is required to calculate the mean")); Function<BigDecimal, FEELFnResult<BigDecimal>> ifRight = (sum) -> { try { return FEELFnResult.ofResult( sum.divide( BigDecimal.valueOf( list.size() ), MathContext.DECIMAL128 ) ); } catch (Exception e) { return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to perform division to calculate the mean", e) ); } }; return s.cata(ifLeft, ifRight); }
@Test void invokeNumberFloat() { FunctionTestUtil.assertResult(meanFunction.invoke(Arrays.asList(10.1f)), BigDecimal.valueOf(10.1)); }
public void validate() { final String[] patterns = value().toArray(new String[0]); Automaton automaton = Regex.simpleMatchToAutomaton(patterns); final CharacterRunAutomaton characterRunAutomaton = new CharacterRunAutomaton(automaton); final boolean isHostMatching = characterRunAutomaton.run(remoteHost.getHost() + ":" + remoteHost.getPort()); if (!isHostMatching) { throw new IllegalArgumentException("Provided allowlist[" + rawValue + "] doesn't match remote host address[" + remoteHost + "]"); } }
@Test void testRegexValidationOK() { final RemoteReindexAllowlist allowlist = new RemoteReindexAllowlist(URI.create("http://10.0.1.28:9200"), "10.0.1.*:9200"); allowlist.validate(); }
@Override public DataSink createDataSink(Context context) { FactoryHelper.createFactoryHelper(this, context) .validateExcept(TABLE_CREATE_PROPERTIES_PREFIX, SINK_PROPERTIES_PREFIX); StarRocksSinkOptions sinkOptions = buildSinkConnectorOptions(context.getFactoryConfiguration()); TableCreateConfig tableCreateConfig = TableCreateConfig.from(context.getFactoryConfiguration()); SchemaChangeConfig schemaChangeConfig = SchemaChangeConfig.from(context.getFactoryConfiguration()); String zoneStr = context.getFactoryConfiguration().get(PIPELINE_LOCAL_TIME_ZONE); ZoneId zoneId = PIPELINE_LOCAL_TIME_ZONE.defaultValue().equals(zoneStr) ? ZoneId.systemDefault() : ZoneId.of(zoneStr); return new StarRocksDataSink(sinkOptions, tableCreateConfig, schemaChangeConfig, zoneId); }
@Test void testPrefixRequireOption() { DataSinkFactory sinkFactory = FactoryDiscoveryUtils.getFactoryByIdentifier("starrocks", DataSinkFactory.class); Assertions.assertThat(sinkFactory).isInstanceOf(StarRocksDataSinkFactory.class); Configuration conf = Configuration.fromMap( ImmutableMap.<String, String>builder() .put("jdbc-url", "jdbc:mysql://127.0.0.1:9030") .put("load-url", "127.0.0.1:8030") .put("username", "root") .put("password", "") .put("table.create.properties.replication_num", "1") .put("sink.properties.format", "json") .build()); DataSink dataSink = sinkFactory.createDataSink( new FactoryHelper.DefaultContext( conf, conf, Thread.currentThread().getContextClassLoader())); Assertions.assertThat(dataSink).isInstanceOf(StarRocksDataSink.class); }
public EnumNode() { }
@Test void testEnumNode() { MyNode n = new MyNode(); assertNull(n.getValue()); assertEquals("(null)", n.toString()); assertTrue(n.doSetValue("ONE")); assertEquals("ONE", n.getValue()); assertEquals("ONE", n.toString()); assertFalse(n.doSetValue("THREE")); }
@Override public void checkBeforeUpdate(final CreateReadwriteSplittingRuleStatement sqlStatement) { ReadwriteSplittingRuleStatementChecker.checkCreation(database, sqlStatement.getRules(), null == rule ? null : rule.getConfiguration(), sqlStatement.isIfNotExists()); }
@Test void assertCheckSQLStatementWithIfNotExists() { ReadwriteSplittingRuleSegment staticSegment = new ReadwriteSplittingRuleSegment("readwrite_ds_0", "write_ds_0", Arrays.asList("read_ds_2", "read_ds_3"), new AlgorithmSegment(null, new Properties())); ReadwriteSplittingRule rule = mock(ReadwriteSplittingRule.class); when(rule.getConfiguration()).thenReturn(createCurrentRuleConfiguration()); executor.setRule(rule); executor.checkBeforeUpdate(createSQLStatement(true, staticSegment)); }
static <K, V> CacheConfig<K, V> createCacheConfig(HazelcastClientInstanceImpl client, CacheConfig<K, V> newCacheConfig, boolean urgent) { try { String nameWithPrefix = newCacheConfig.getNameWithPrefix(); int partitionId = client.getClientPartitionService().getPartitionId(nameWithPrefix); InternalSerializationService serializationService = client.getSerializationService(); ClientMessage request = CacheCreateConfigCodec .encodeRequest(CacheConfigHolder.of(newCacheConfig, serializationService), true); ClientInvocation clientInvocation = new ClientInvocation(client, request, nameWithPrefix, partitionId); Future<ClientMessage> future = urgent ? clientInvocation.invokeUrgent() : clientInvocation.invoke(); final ClientMessage response = future.get(); final CacheConfigHolder cacheConfigHolder = CacheCreateConfigCodec.decodeResponse(response); if (cacheConfigHolder == null) { return null; } return cacheConfigHolder.asCacheConfig(serializationService); } catch (Exception e) { throw rethrow(e); } }
@Test(expected = IllegalArgumentException.class) public void testCreateCacheConfig_rethrowsExceptions() { createCacheConfig(exceptionThrowingClient, newCacheConfig, false); }
public void generateAcknowledgementPayload( MllpSocketBuffer mllpSocketBuffer, byte[] hl7MessageBytes, String acknowledgementCode) throws MllpAcknowledgementGenerationException { generateAcknowledgementPayload(mllpSocketBuffer, hl7MessageBytes, acknowledgementCode, null); }
@Test public void testGenerateAcknowledgementPayloadWithoutEndOfSegment() throws Exception { String junkMessage = "MSH|^~\\&|REQUESTING|ICE|INHOUSE|RTH00|20161206193919||ORM^O01|00001|D|2.3|||||||" + "PID|1||ICE999999^^^ICE^ICE||Testpatient^Testy^^^Mr||19740401|M|||123 Barrel Drive^^^^SW18 4RT|||||2||||||||||||||"; MllpSocketBuffer mllpSocketBuffer = new MllpSocketBuffer(new MllpEndpointStub()); hl7util.generateAcknowledgementPayload(mllpSocketBuffer, junkMessage.getBytes(), "AA"); String actual = mllpSocketBuffer.toString(); assertThat(actual, startsWith(EXPECTED_ACKNOWLEDGEMENT_PAYLOAD_START)); assertThat(actual, endsWith( "PID|1||ICE999999^^^ICE^ICE||Testpatient^Testy^^^Mr||19740401|M|||123 Barrel Drive^^^^SW18 4RT|||||2|||||||||||||\r" + "MSA|AA|00001\r" + MllpProtocolConstants.END_OF_BLOCK + MllpProtocolConstants.END_OF_DATA)); }
public boolean isZero() { return value.compareTo(BigDecimal.ZERO) == 0; }
@Test void testIsZero() { final Resource resource1 = new TestResource(0.0); final Resource resource2 = new TestResource(1.0); assertThat(resource1.isZero()).isTrue(); assertThat(resource2.isZero()).isFalse(); }
public final DisposableServer bindNow() { return bindNow(Duration.ofSeconds(45)); }
@Test void testBindTimeoutLongOverflow() { assertThatExceptionOfType(ArithmeticException.class) .isThrownBy(() -> new TestServerTransport(Mono.never()).bindNow(Duration.ofMillis(Long.MAX_VALUE))); }
String getServiceProviderCertificate() { return configuration.get(SERVICE_PROVIDER_CERTIFICATE).orElseThrow(() -> new IllegalArgumentException("Service provider certificate is missing")); }
@Test public void return_service_provider_certificate() { settings.setProperty("sonar.auth.saml.sp.certificate.secured", "my_certificate"); assertThat(underTest.getServiceProviderCertificate()).isEqualTo("my_certificate"); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } JsonPrimitive other = (JsonPrimitive) obj; if (value == null) { return other.value == null; } if (isIntegral(this) && isIntegral(other)) { return (this.value instanceof BigInteger || other.value instanceof BigInteger) ? this.getAsBigInteger().equals(other.getAsBigInteger()) : this.getAsNumber().longValue() == other.getAsNumber().longValue(); } if (value instanceof Number && other.value instanceof Number) { if (value instanceof BigDecimal && other.value instanceof BigDecimal) { // Uses compareTo to ignore scale of values, e.g. `0` and `0.00` should be considered equal return this.getAsBigDecimal().compareTo(other.getAsBigDecimal()) == 0; } double thisAsDouble = this.getAsDouble(); double otherAsDouble = other.getAsDouble(); // Don't use Double.compare(double, double) because that considers -0.0 and +0.0 not equal return (thisAsDouble == otherAsDouble) || (Double.isNaN(thisAsDouble) && Double.isNaN(otherAsDouble)); } return value.equals(other.value); }
@Test public void testEqualsIntegerAndBigInteger() { JsonPrimitive a = new JsonPrimitive(5L); JsonPrimitive b = new JsonPrimitive(new BigInteger("18446744073709551621")); assertWithMessage("%s not equals %s", a, b).that(a.equals(b)).isFalse(); }
@Override public PageResult<NotifyTemplateDO> getNotifyTemplatePage(NotifyTemplatePageReqVO pageReqVO) { return notifyTemplateMapper.selectPage(pageReqVO); }
@Test public void testGetNotifyTemplatePage() { // mock 数据 NotifyTemplateDO dbNotifyTemplate = randomPojo(NotifyTemplateDO.class, o -> { // 等会查询到 o.setName("芋头"); o.setCode("test_01"); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setCreateTime(buildTime(2022, 2, 3)); }); notifyTemplateMapper.insert(dbNotifyTemplate); // 测试 name 不匹配 notifyTemplateMapper.insert(cloneIgnoreId(dbNotifyTemplate, o -> o.setName("投"))); // 测试 code 不匹配 notifyTemplateMapper.insert(cloneIgnoreId(dbNotifyTemplate, o -> o.setCode("test_02"))); // 测试 status 不匹配 notifyTemplateMapper.insert(cloneIgnoreId(dbNotifyTemplate, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 测试 createTime 不匹配 notifyTemplateMapper.insert(cloneIgnoreId(dbNotifyTemplate, o -> o.setCreateTime(buildTime(2022, 1, 5)))); // 准备参数 NotifyTemplatePageReqVO reqVO = new NotifyTemplatePageReqVO(); reqVO.setName("芋"); reqVO.setCode("est_01"); reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus()); reqVO.setCreateTime(buildBetweenTime(2022, 2, 1, 2022, 2, 5)); // 调用 PageResult<NotifyTemplateDO> pageResult = notifyTemplateService.getNotifyTemplatePage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbNotifyTemplate, pageResult.getList().get(0)); }
public static SqlAggregation from(QueryDataType operandType, boolean distinct) { SqlAggregation aggregation = from(operandType); return distinct ? new DistinctSqlAggregation(aggregation) : aggregation; }
@Test public void test_serialization() { SqlAggregation original = AvgSqlAggregations.from(QueryDataType.DECIMAL, false); original.accumulate(BigDecimal.ONE); InternalSerializationService ss = new DefaultSerializationServiceBuilder().build(); SqlAggregation serialized = ss.toObject(ss.toData(original)); assertThat(serialized).isEqualToComparingFieldByField(original); }
public static DataMap convertToDataMap(Map<String, Object> queryParams) { return convertToDataMap(queryParams, Collections.<String, Class<?>>emptyMap(), AllProtocolVersions.RESTLI_PROTOCOL_1_0_0.getProtocolVersion(), RestLiProjectionDataMapSerializer.DEFAULT_SERIALIZER); }
@Test public void testPreSerializedProjectionParams() { Map<String, Object> queryParams = new HashMap<>(); queryParams.put(RestConstants.FIELDS_PARAM, "fields"); queryParams.put(RestConstants.PAGING_FIELDS_PARAM, "paging"); queryParams.put(RestConstants.METADATA_FIELDS_PARAM, "metadata"); DataMap dataMap = QueryParamsUtil.convertToDataMap(queryParams, Collections.emptyMap(), AllProtocolVersions.LATEST_PROTOCOL_VERSION, (paramName, pathSpecs) -> null); Assert.assertEquals("fields", dataMap.getString(RestConstants.FIELDS_PARAM)); Assert.assertEquals("paging", dataMap.getString(RestConstants.PAGING_FIELDS_PARAM)); Assert.assertEquals("metadata", dataMap.getString(RestConstants.METADATA_FIELDS_PARAM)); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { return payload.readStringFixByBytes(readLengthFromMeta(columnDef.getColumnMeta(), payload)); }
@Test void assertReadWithUnknownMetaValue() { columnDef.setColumnMeta(5); assertThrows(UnsupportedSQLOperationException.class, () -> new MySQLBlobBinlogProtocolValue().read(columnDef, payload)); }
@Override public Collection<SchemaMetaData> load(final MetaDataLoaderMaterial material) throws SQLException { try (Connection connection = material.getDataSource().getConnection()) { Collection<String> schemaNames = SchemaMetaDataLoader.loadSchemaNames(connection, TypedSPILoader.getService(DatabaseType.class, "openGauss")); Map<String, Multimap<String, IndexMetaData>> schemaIndexMetaDataMap = loadIndexMetaDataMap(connection, schemaNames); Map<String, Multimap<String, ColumnMetaData>> schemaColumnMetaDataMap = loadColumnMetaDataMap(connection, material.getActualTableNames(), schemaNames); Collection<SchemaMetaData> result = new LinkedList<>(); for (String each : schemaNames) { Multimap<String, IndexMetaData> tableIndexMetaDataMap = schemaIndexMetaDataMap.getOrDefault(each, LinkedHashMultimap.create()); Multimap<String, ColumnMetaData> tableColumnMetaDataMap = schemaColumnMetaDataMap.getOrDefault(each, LinkedHashMultimap.create()); result.add(new SchemaMetaData(each, createTableMetaDataList(tableIndexMetaDataMap, tableColumnMetaDataMap))); } return result; } }
@Test void assertLoadWithoutTables() throws SQLException { DataSource dataSource = mockDataSource(); ResultSet schemaResultSet = mockSchemaMetaDataResultSet(); when(dataSource.getConnection().getMetaData().getSchemas()).thenReturn(schemaResultSet); ResultSet tableResultSet = mockTableMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(TABLE_META_DATA_SQL_WITHOUT_TABLES).executeQuery()).thenReturn(tableResultSet); ResultSet primaryKeyResultSet = mockPrimaryKeyMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(PRIMARY_KEY_META_DATA_SQL).executeQuery()).thenReturn(primaryKeyResultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(BASIC_INDEX_META_DATA_SQL).executeQuery()).thenReturn(indexResultSet); ResultSet advanceIndexResultSet = mockAdvanceIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ADVANCE_INDEX_META_DATA_SQL).executeQuery()).thenReturn(advanceIndexResultSet); assertTableMetaDataMap(getDialectTableMetaDataLoader().load(new MetaDataLoaderMaterial(Collections.emptyList(), dataSource, new OpenGaussDatabaseType(), "sharding_db"))); }
@Override public List<Class<? extends Event>> subscribeTypes() { List<Class<? extends Event>> result = new LinkedList<>(); result.add(ClientOperationEvent.ClientRegisterServiceEvent.class); result.add(ClientOperationEvent.ClientDeregisterServiceEvent.class); result.add(ClientOperationEvent.ClientSubscribeServiceEvent.class); result.add(ClientOperationEvent.ClientUnsubscribeServiceEvent.class); result.add(ClientOperationEvent.ClientReleaseEvent.class); return result; }
@Test void testSubscribeTypes() { List<Class<? extends Event>> classes = clientServiceIndexesManager.subscribeTypes(); assertNotNull(classes); assertEquals(5, classes.size()); }
@Override protected void requestSubpartitions() throws IOException { boolean retriggerRequest = false; boolean notifyDataAvailable = false; // The lock is required to request only once in the presence of retriggered requests. synchronized (requestLock) { checkState(!isReleased, "LocalInputChannel has been released already"); if (subpartitionView == null) { LOG.debug( "{}: Requesting LOCAL subpartitions {} of partition {}. {}", this, consumedSubpartitionIndexSet, partitionId, channelStatePersister); try { ResultSubpartitionView subpartitionView = partitionManager.createSubpartitionView( partitionId, consumedSubpartitionIndexSet, this); if (subpartitionView == null) { throw new IOException("Error requesting subpartition."); } // make the subpartition view visible this.subpartitionView = subpartitionView; // check if the channel was released in the meantime if (isReleased) { subpartitionView.releaseAllResources(); this.subpartitionView = null; } else { notifyDataAvailable = true; } } catch (PartitionNotFoundException notFound) { if (increaseBackoff()) { retriggerRequest = true; } else { throw notFound; } } } } if (notifyDataAvailable) { notifyDataAvailable(this.subpartitionView); } // Do this outside of the lock scope as this might lead to a // deadlock with a concurrent release of the channel via the // input gate. if (retriggerRequest) { inputGate.retriggerPartitionRequest(partitionId.getPartitionId(), channelInfo); } }
@Test void testConcurrentReleaseAndRetriggerPartitionRequest() throws Exception { final SingleInputGate gate = createSingleInputGate(1); ResultPartitionManager partitionManager = mock(ResultPartitionManager.class); when(partitionManager.createSubpartitionView( any(ResultPartitionID.class), any(ResultSubpartitionIndexSet.class), any(BufferAvailabilityListener.class))) .thenAnswer( (Answer<ResultSubpartitionView>) invocationOnMock -> { // Sleep here a little to give the releaser Thread // time to acquire the input gate lock. We throw // the Exception to retrigger the request. Thread.sleep(100); throw new PartitionNotFoundException(new ResultPartitionID()); }); final LocalInputChannel channel = createLocalInputChannel(gate, partitionManager, 1, 1); Thread releaser = new Thread( () -> { try { gate.close(); } catch (IOException ignored) { } }); Thread requester = new Thread( () -> { try { channel.requestSubpartitions(); } catch (IOException ignored) { } }); requester.start(); releaser.start(); releaser.join(); requester.join(); }
@Override public boolean decide(final SelectStatementContext selectStatementContext, final List<Object> parameters, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final ShardingRule rule, final Collection<DataNode> includedDataNodes) { Collection<String> tableNames = rule.getShardingLogicTableNames(selectStatementContext.getTablesContext().getTableNames()); if (tableNames.isEmpty()) { return false; } includedDataNodes.addAll(getTableDataNodes(rule, tableNames, database)); if (selectStatementContext.isContainsSubquery() || selectStatementContext.isContainsHaving() || selectStatementContext.isContainsCombine() || selectStatementContext.isContainsPartialDistinctAggregation()) { return true; } if (!selectStatementContext.isContainsJoinQuery() || rule.isAllTablesInSameDataSource(tableNames)) { return false; } if (1 == tableNames.size() && selectStatementContext.isContainsJoinQuery() && !rule.isAllBindingTables(database, selectStatementContext, tableNames)) { return true; } return tableNames.size() > 1 && !rule.isAllBindingTables(database, selectStatementContext, tableNames); }
@Test void assertDecideWhenAllTablesIsBindingTables() { SelectStatementContext select = createStatementContext(); when(select.isContainsJoinQuery()).thenReturn(true); ShardingRule shardingRule = createShardingRule(); ShardingSphereDatabase database = createDatabase(shardingRule); when(shardingRule.isAllBindingTables(database, select, Arrays.asList("t_order", "t_order_item"))).thenReturn(true); Collection<DataNode> includedDataNodes = new HashSet<>(); assertFalse(new ShardingSQLFederationDecider().decide(select, Collections.emptyList(), mock(RuleMetaData.class), database, shardingRule, includedDataNodes)); assertThat(includedDataNodes.size(), is(4)); }
public ConnectionDetails createConnectionDetails( String scheme ) { try { ConnectionProvider<? extends ConnectionDetails> provider = connectionProviders.get( scheme ); return provider.getClassType().newInstance(); } catch ( Exception e ) { logger.error( "Error in createConnectionDetails {}", scheme, e ); return null; } }
@Test public void testCreateConnectionDetails() { addProvider(); assertNotNull( connectionManager.createConnectionDetails( TestConnectionWithBucketsProvider.SCHEME ) ); }
public boolean removeWatchedAddress(final Address address) { return removeWatchedAddresses(Collections.singletonList(address)); }
@Test public void removeWatchedAddress() { Address watchedAddress = new ECKey().toAddress(ScriptType.P2PKH, TESTNET); wallet.addWatchedAddress(watchedAddress); wallet.removeWatchedAddress(watchedAddress); assertFalse(wallet.isAddressWatched(watchedAddress)); }
public static <InputT> KeyByBuilder<InputT> of(PCollection<InputT> input) { return named(null).of(input); }
@Test public void testWindow_applyIfNot() { final PCollection<String> dataset = TestUtils.createMockDataset(TypeDescriptors.strings()); final PCollection<KV<String, Long>> reduced = ReduceByKey.of(dataset) .keyBy(s -> s) .valueBy(s -> 1L) .combineBy(Sums.ofLongs()) .applyIf( false, b -> b.windowBy(FixedWindows.of(Duration.standardHours(1))) .triggeredBy(DefaultTrigger.of()) .accumulationMode(AccumulationMode.DISCARDING_FIRED_PANES)) .output(); final ReduceByKey reduce = (ReduceByKey) TestUtils.getProducer(reduced); assertFalse(reduce.getWindow().isPresent()); }
@Override public int getOrder() { return PluginEnum.LOGGING_CONSOLE.getCode(); }
@Test public void testGetOrder() { assertEquals(loggingConsolePlugin.getOrder(), PluginEnum.LOGGING_CONSOLE.getCode()); }
@Override public MethodDescriptor<?, ?> methodDescriptor() { return methodDescriptor; }
@Test void methodDescriptor() { assertThat(request.methodDescriptor()).isSameAs(methodDescriptor); }
@Override void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException { expectCaseSensitiveDefaultCollation(connection); if (state == DatabaseCharsetChecker.State.UPGRADE || state == DatabaseCharsetChecker.State.STARTUP) { repairColumns(connection); } }
@Test public void fresh_install_fails_if_default_collation_is_not_CS_AS() throws SQLException { answerDefaultCollation("Latin1_General_CI_AI"); assertThatThrownBy(() -> underTest.handle(connection, DatabaseCharsetChecker.State.FRESH_INSTALL)) .isInstanceOf(MessageException.class) .hasMessage("Database collation must be case-sensitive and accent-sensitive. It is Latin1_General_CI_AI but should be Latin1_General_CS_AS."); }
public SendResult putMessageToRemoteBroker(MessageExtBrokerInner messageExt, String brokerNameToSend) { if (this.brokerController.getBrokerConfig().getBrokerName().equals(brokerNameToSend)) { // not remote broker return null; } final boolean isTransHalfMessage = TransactionalMessageUtil.buildHalfTopic().equals(messageExt.getTopic()); MessageExtBrokerInner messageToPut = messageExt; if (isTransHalfMessage) { messageToPut = TransactionalMessageUtil.buildTransactionalMessageFromHalfMessage(messageExt); } final TopicPublishInfo topicPublishInfo = this.brokerController.getTopicRouteInfoManager().tryToFindTopicPublishInfo(messageToPut.getTopic()); if (null == topicPublishInfo || !topicPublishInfo.ok()) { LOG.warn("putMessageToRemoteBroker: no route info of topic {} when escaping message, msgId={}", messageToPut.getTopic(), messageToPut.getMsgId()); return null; } final MessageQueue mqSelected; if (StringUtils.isEmpty(brokerNameToSend)) { mqSelected = topicPublishInfo.selectOneMessageQueue(this.brokerController.getBrokerConfig().getBrokerName()); messageToPut.setQueueId(mqSelected.getQueueId()); brokerNameToSend = mqSelected.getBrokerName(); if (this.brokerController.getBrokerConfig().getBrokerName().equals(brokerNameToSend)) { LOG.warn("putMessageToRemoteBroker failed, remote broker not found. Topic: {}, MsgId: {}, Broker: {}", messageExt.getTopic(), messageExt.getMsgId(), brokerNameToSend); return null; } } else { mqSelected = new MessageQueue(messageExt.getTopic(), brokerNameToSend, messageExt.getQueueId()); } final String brokerAddrToSend = this.brokerController.getTopicRouteInfoManager().findBrokerAddressInPublish(brokerNameToSend); if (null == brokerAddrToSend) { LOG.warn("putMessageToRemoteBroker failed, remote broker address not found. Topic: {}, MsgId: {}, Broker: {}", messageExt.getTopic(), messageExt.getMsgId(), brokerNameToSend); return null; } final long beginTimestamp = System.currentTimeMillis(); try { final SendResult sendResult = this.brokerController.getBrokerOuterAPI().sendMessageToSpecificBroker( brokerAddrToSend, brokerNameToSend, messageToPut, this.getProducerGroup(messageToPut), SEND_TIMEOUT); if (null != sendResult && SendStatus.SEND_OK.equals(sendResult.getSendStatus())) { return sendResult; } else { LOG.error("Escaping failed! cost {}ms, Topic: {}, MsgId: {}, Broker: {}", System.currentTimeMillis() - beginTimestamp, messageExt.getTopic(), messageExt.getMsgId(), brokerNameToSend); } } catch (RemotingException | MQBrokerException e) { LOG.error(String.format("putMessageToRemoteBroker exception, MsgId: %s, RT: %sms, Broker: %s", messageToPut.getMsgId(), System.currentTimeMillis() - beginTimestamp, mqSelected), e); } catch (InterruptedException e) { LOG.error(String.format("putMessageToRemoteBroker interrupted, MsgId: %s, RT: %sms, Broker: %s", messageToPut.getMsgId(), System.currentTimeMillis() - beginTimestamp, mqSelected), e); Thread.currentThread().interrupt(); } return null; }
@Test public void testPutMessageToRemoteBroker_specificBrokerName_addressNotFound() throws Exception { MessageExtBrokerInner message = new MessageExtBrokerInner(); message.setTopic(TEST_TOPIC); TopicPublishInfo publishInfo = mockTopicPublishInfo(BROKER_NAME); when(topicRouteInfoManager.tryToFindTopicPublishInfo(anyString())).thenReturn(publishInfo); escapeBridge.putMessageToRemoteBroker(message, "whatever"); verify(topicRouteInfoManager).findBrokerAddressInPublish(eq("whatever")); verify(brokerOuterAPI, times(0)).sendMessageToSpecificBroker(anyString(), anyString(), any(MessageExtBrokerInner.class), anyString(), anyLong()); }
@Override public Collection<ParameterRewriter> getParameterRewriters() { Collection<ParameterRewriter> result = new LinkedList<>(); addParameterRewriter(result, new EncryptAssignmentParameterRewriter(encryptRule)); addParameterRewriter(result, new EncryptPredicateParameterRewriter(encryptRule)); addParameterRewriter(result, new EncryptInsertValueParameterRewriter(encryptRule)); addParameterRewriter(result, new EncryptInsertOnDuplicateKeyUpdateValueParameterRewriter(encryptRule)); return result; }
@Test void assertGetParameterRewritersWhenPredicateIsNeedRewrite() { EncryptRule encryptRule = mock(EncryptRule.class, RETURNS_DEEP_STUBS); when(encryptRule.findEncryptTable("t_order").isPresent()).thenReturn(true); SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_order")); Collection<ParameterRewriter> actual = new EncryptParameterRewriterBuilder( encryptRule, DefaultDatabase.LOGIC_NAME, Collections.singletonMap("test", mock(ShardingSphereSchema.class)), sqlStatementContext, Collections.emptyList()).getParameterRewriters(); assertThat(actual.size(), is(1)); assertThat(actual.iterator().next(), instanceOf(EncryptPredicateParameterRewriter.class)); }
public PendingTransactionFilter(Web3j web3j, Callback<String> callback) { super(web3j, callback); }
@Test public void testPendingTransactionFilter() throws Exception { EthLog ethLog = objectMapper.readValue( "{\"jsonrpc\":\"2.0\",\"id\":1,\"result\":[\"0x31c2342b1e0b8ffda1507fbffddf213c4b3c1e819ff6a84b943faabb0ebf2403\",\"0xccc0d2e07c1febcaca0c3341c4e1268204b06fefa4bb0c8c0d693d8e581c82da\"]}", EthLog.class); runTest(ethLog, web3j.ethPendingTransactionHashFlowable()); }
public void forEachInt(final IntConsumer action) { if (sizeOfArrayValues > 0) { final int[] values = this.values; for (final int v : values) { if (MISSING_VALUE != v) { action.accept(v); } } } if (containsMissingValue) { action.accept(MISSING_VALUE); } }
@Test void forEachIntIsANoOpIfTheSetIsEmpty() { final IntConsumer consumer = mock(IntConsumer.class); testSet.forEachInt(consumer); verifyNoInteractions(consumer); }
@Activate public void activate(ComponentContext context) { cfgService.registerProperties(getClass()); alarmsExecutor = newScheduledThreadPool(CORE_POOL_SIZE, groupedThreads("onos/pollingalarmprovider", "alarm-executor-%d", log)); eventHandlingExecutor = Executors.newFixedThreadPool(CORE_POOL_SIZE, groupedThreads("onos/pollingalarmprovider", "device-installer-%d", log)); providerService = providerRegistry.register(this); deviceService.addListener(deviceListener); mastershipService.addListener(mastershipListener); if (context == null) { alarmPollFrequencySeconds = POLL_FREQUENCY_SECONDS_DEFAULT; log.info("No component configuration"); } else { Dictionary<?, ?> properties = context.getProperties(); alarmPollFrequencySeconds = getNewPollFrequency(properties, alarmPollFrequencySeconds); } scheduledTask = schedulePolling(); log.info("Started"); }
@Test public void activate() throws Exception { assertFalse("Provider should be registered", providerRegistry.getProviders().contains(provider.id())); assertEquals("Device listener should be added", 1, deviceListeners.size()); assertEquals("Incorrect alarm provider service", alarmProviderService, provider.providerService); assertEquals("Mastership listener should be added", 1, mastershipListeners.size()); assertEquals("Incorrect polling frequency", 1, provider.alarmPollFrequencySeconds); assertFalse("Executor should be running", provider.alarmsExecutor.isShutdown()); provider.activate(null); assertEquals("Incorrect polling frequency, should be default", 60, provider.alarmPollFrequencySeconds); }
@GetMapping("/price") public String getPrice() { return priceService.getPrice(); }
@Test void getPriceTest() { var priceController = new PriceController(new PriceServiceImpl()); var price = priceController.getPrice(); assertEquals("20", price); }
@Override public void define(WebService.NewController controller) { controller .createAction(VALIDATION_INIT_KEY) .setInternal(true) .setPost(false) .setHandler(ServletFilterHandler.INSTANCE) .setDescription("Initiate a SAML request to the identity Provider for configuration validation purpose.") .setContentType(Response.ContentType.NO_CONTENT) .setSince("9.7"); }
@Test public void verify_definition() { String controllerKey = "foo"; WebService.Context context = new WebService.Context(); WebService.NewController newController = context.createController(controllerKey); underTest.define(newController); newController.done(); WebService.Action validationInitAction = context.controller(controllerKey).action("validation_init"); assertThat(validationInitAction).isNotNull(); assertThat(validationInitAction.description()).isNotEmpty(); assertThat(validationInitAction.handler()).isNotNull(); }
@Override public boolean publishConfig(String dataId, String group, String content) throws NacosException { return publishConfig(dataId, group, content, ConfigType.getDefaultType().getType()); }
@Test void testPublishConfig() throws NacosException { String dataId = "1"; String group = "2"; String content = "123"; String namespace = ""; String type = ConfigType.getDefaultType().getType(); Mockito.when(mockWoker.publishConfig(dataId, group, namespace, null, null, null, content, "", null, type)).thenReturn(true); final boolean b = nacosConfigService.publishConfig(dataId, group, content); assertTrue(b); Mockito.verify(mockWoker, Mockito.times(1)).publishConfig(dataId, group, namespace, null, null, null, content, "", null, type); }
public static Tags empty() { return new Tags(Set.of()); }
@Test public void testEmpty() { assertEquals(Tags.empty(), Tags.fromString(null)); assertEquals(Tags.empty(), Tags.fromString("")); assertEquals(Tags.empty(), Tags.fromString(" ")); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { String subCommand = safeReadLine(reader); boolean unknownSubCommand = false; String param = reader.readLine(); String returnCommand = null; try { final String[] names; if (subCommand.equals(DIR_FIELDS_SUBCOMMAND_NAME)) { Object targetObject = gateway.getObject(param); names = reflectionEngine.getPublicFieldNames(targetObject); } else if (subCommand.equals(DIR_METHODS_SUBCOMMAND_NAME)) { Object targetObject = gateway.getObject(param); names = reflectionEngine.getPublicMethodNames(targetObject); } else if (subCommand.equals(DIR_STATIC_SUBCOMMAND_NAME)) { Class<?> clazz = TypeUtil.forName(param); names = reflectionEngine.getPublicStaticNames(clazz); } else if (subCommand.equals(DIR_JVMVIEW_SUBCOMMAND_NAME)) { names = getJvmViewNames(param, reader); } else { names = null; unknownSubCommand = true; } // Read and discard end of command reader.readLine(); if (unknownSubCommand) { returnCommand = Protocol.getOutputErrorCommand("Unknown Array SubCommand Name: " + subCommand); } else if (names == null) { ReturnObject returnObject = gateway.getReturnObject(null); returnCommand = Protocol.getOutputCommand(returnObject); } else { StringBuilder namesJoinedBuilder = new StringBuilder(); for (String name : names) { namesJoinedBuilder.append(name); namesJoinedBuilder.append("\n"); } final String namesJoined; if (namesJoinedBuilder.length() > 0) { namesJoined = namesJoinedBuilder.substring(0, namesJoinedBuilder.length() - 1); } else { namesJoined = ""; } ReturnObject returnObject = gateway.getReturnObject(namesJoined); returnCommand = Protocol.getOutputCommand(returnObject); } } catch (Exception e) { logger.log(Level.FINEST, "Error in a dir subcommand", e); returnCommand = Protocol.getOutputErrorCommand(); } logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testDirMethods() throws Exception { String inputCommand = "m\n" + target + "\ne\n"; assertTrue(gateway.getBindings().containsKey(target)); command.execute("d", new BufferedReader(new StringReader(inputCommand)), writer); Set<String> methods = convertResponse(sWriter.toString()); assertEquals(ExampleClassMethods, methods); }
@Override public Result analysis( final Result result, final StreamAccessLogsMessage.Identifier identifier, final HTTPAccessLogEntry entry, final Role role ) { switch (role) { case PROXY: return analyzeProxy(result, entry); case SIDECAR: if (result.hasResult()) { return result; } return analyzeSideCar(result, entry); } return Result.builder().build(); }
@Test public void testIngress2SidecarMetric() throws IOException { try (InputStreamReader isr = new InputStreamReader(getResourceAsStream("envoy-ingress2sidecar.msg"))) { StreamAccessLogsMessage.Builder requestBuilder = StreamAccessLogsMessage.newBuilder(); JsonFormat.parser().merge(isr, requestBuilder); AccessLogAnalyzer.Result result = this.analysis.analysis(AccessLogAnalyzer.Result.builder().build(), requestBuilder.getIdentifier(), requestBuilder.getHttpLogs().getLogEntry(0), Role.SIDECAR); Assertions.assertEquals(1, result.getMetrics().getHttpMetrics().getMetricsCount()); HTTPServiceMeshMetric incoming = result.getMetrics().getHttpMetrics().getMetrics(0); Assertions.assertEquals("", incoming.getSourceServiceName()); Assertions.assertEquals("productpage", incoming.getDestServiceName()); Assertions.assertEquals(DetectPoint.server, incoming.getDetectPoint()); } }
@VisibleForTesting static Optional<String> getChildValue(@Nullable Xpp3Dom dom, String... childNodePath) { if (dom == null) { return Optional.empty(); } Xpp3Dom node = dom; for (String child : childNodePath) { node = node.getChild(child); if (node == null) { return Optional.empty(); } } return Optional.ofNullable(node.getValue()); }
@Test public void testGetChildValue_nullValue() { Xpp3Dom root = new Xpp3Dom("root"); addXpp3DomChild(root, "foo", null); assertThat(MavenProjectProperties.getChildValue(root)).isEmpty(); assertThat(MavenProjectProperties.getChildValue(root, "foo")).isEmpty(); }
@Override public void updateUserPassword(Long id, UserProfileUpdatePasswordReqVO reqVO) { // 校验旧密码密码 validateOldPassword(id, reqVO.getOldPassword()); // 执行更新 AdminUserDO updateObj = new AdminUserDO().setId(id); updateObj.setPassword(encodePassword(reqVO.getNewPassword())); // 加密密码 userMapper.updateById(updateObj); }
@Test public void testUpdateUserPassword02_success() { // mock 数据 AdminUserDO dbUser = randomAdminUserDO(); userMapper.insert(dbUser); // 准备参数 Long userId = dbUser.getId(); String password = "yudao"; // mock 方法 when(passwordEncoder.encode(anyString())).then( (Answer<String>) invocationOnMock -> "encode:" + invocationOnMock.getArgument(0)); // 调用 userService.updateUserPassword(userId, password); // 断言 AdminUserDO user = userMapper.selectById(userId); assertEquals("encode:" + password, user.getPassword()); }
public StatisticRange addAndMaxDistinctValues(StatisticRange other) { double newDistinctValues = max(distinctValues, other.distinctValues); return expandRangeWithNewDistinct(newDistinctValues, other); }
@Test public void testAddAndMaxDistinctValues() { assertEquals(unboundedRange(NaN).addAndMaxDistinctValues(unboundedRange(NaN)), unboundedRange(NaN)); assertEquals(unboundedRange(NaN).addAndMaxDistinctValues(unboundedRange(1)), unboundedRange(NaN)); assertEquals(unboundedRange(1).addAndMaxDistinctValues(unboundedRange(NaN)), unboundedRange(NaN)); assertEquals(unboundedRange(1).addAndMaxDistinctValues(unboundedRange(2)), unboundedRange(2)); assertEquals(StatisticRange.empty().addAndMaxDistinctValues(StatisticRange.empty()), StatisticRange.empty()); assertEquals(range(0, 1, 1).addAndMaxDistinctValues(StatisticRange.empty()), range(0, 1, 1)); assertEquals(range(0, 1, 1).addAndMaxDistinctValues(range(1, 2, 1)), range(0, 2, 1)); }
@Override public void exportData(JsonWriter writer) throws IOException { // version tag at the root writer.name(THIS_VERSION); writer.beginObject(); // clients list writer.name(CLIENTS); writer.beginArray(); writeClients(writer); writer.endArray(); writer.name(GRANTS); writer.beginArray(); writeGrants(writer); writer.endArray(); writer.name(WHITELISTEDSITES); writer.beginArray(); writeWhitelistedSites(writer); writer.endArray(); writer.name(BLACKLISTEDSITES); writer.beginArray(); writeBlacklistedSites(writer); writer.endArray(); writer.name(AUTHENTICATIONHOLDERS); writer.beginArray(); writeAuthenticationHolders(writer); writer.endArray(); writer.name(ACCESSTOKENS); writer.beginArray(); writeAccessTokens(writer); writer.endArray(); writer.name(REFRESHTOKENS); writer.beginArray(); writeRefreshTokens(writer); writer.endArray(); writer.name(SYSTEMSCOPES); writer.beginArray(); writeSystemScopes(writer); writer.endArray(); for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.exportExtensionData(writer); break; } } writer.endObject(); // end mitreid-connect-1.3 }
@Test public void testExportGrants() throws IOException, ParseException { Date creationDate1 = formatter.parse("2014-09-10T22:49:44.090+00:00", Locale.ENGLISH); Date accessDate1 = formatter.parse("2014-09-10T23:49:44.090+00:00", Locale.ENGLISH); OAuth2AccessTokenEntity mockToken1 = mock(OAuth2AccessTokenEntity.class); when(mockToken1.getId()).thenReturn(1L); ApprovedSite site1 = new ApprovedSite(); site1.setId(1L); site1.setClientId("foo"); site1.setCreationDate(creationDate1); site1.setAccessDate(accessDate1); site1.setUserId("user1"); site1.setAllowedScopes(ImmutableSet.of("openid", "phone")); when(mockToken1.getApprovedSite()).thenReturn(site1); Date creationDate2 = formatter.parse("2014-09-11T18:49:44.090+00:00", Locale.ENGLISH); Date accessDate2 = formatter.parse("2014-09-11T20:49:44.090+00:00", Locale.ENGLISH); Date timeoutDate2 = formatter.parse("2014-10-01T20:49:44.090+00:00", Locale.ENGLISH); ApprovedSite site2 = new ApprovedSite(); site2.setId(2L); site2.setClientId("bar"); site2.setCreationDate(creationDate2); site2.setAccessDate(accessDate2); site2.setUserId("user2"); site2.setAllowedScopes(ImmutableSet.of("openid", "offline_access", "email", "profile")); site2.setTimeoutDate(timeoutDate2); Set<ApprovedSite> allApprovedSites = ImmutableSet.of(site1, site2); Mockito.when(clientRepository.getAllClients()).thenReturn(new HashSet<ClientDetailsEntity>()); Mockito.when(approvedSiteRepository.getAll()).thenReturn(allApprovedSites); Mockito.when(blSiteRepository.getAll()).thenReturn(new HashSet<BlacklistedSite>()); Mockito.when(wlSiteRepository.getAll()).thenReturn(new HashSet<WhitelistedSite>()); Mockito.when(authHolderRepository.getAll()).thenReturn(new ArrayList<AuthenticationHolderEntity>()); Mockito.when(tokenRepository.getAllAccessTokens()).thenReturn(new HashSet<OAuth2AccessTokenEntity>()); Mockito.when(tokenRepository.getAllRefreshTokens()).thenReturn(new HashSet<OAuth2RefreshTokenEntity>()); Mockito.when(sysScopeRepository.getAll()).thenReturn(new HashSet<SystemScope>()); // do the data export StringWriter stringWriter = new StringWriter(); JsonWriter writer = new JsonWriter(stringWriter); writer.beginObject(); dataService.exportData(writer); writer.endObject(); writer.close(); // parse the output as a JSON object for testing JsonElement elem = new JsonParser().parse(stringWriter.toString()); JsonObject root = elem.getAsJsonObject(); // make sure the root is there assertThat(root.has(MITREidDataService.MITREID_CONNECT_1_3), is(true)); JsonObject config = root.get(MITREidDataService.MITREID_CONNECT_1_3).getAsJsonObject(); // make sure all the root elements are there assertThat(config.has(MITREidDataService.CLIENTS), is(true)); assertThat(config.has(MITREidDataService.GRANTS), is(true)); assertThat(config.has(MITREidDataService.WHITELISTEDSITES), is(true)); assertThat(config.has(MITREidDataService.BLACKLISTEDSITES), is(true)); assertThat(config.has(MITREidDataService.REFRESHTOKENS), is(true)); assertThat(config.has(MITREidDataService.ACCESSTOKENS), is(true)); assertThat(config.has(MITREidDataService.SYSTEMSCOPES), is(true)); assertThat(config.has(MITREidDataService.AUTHENTICATIONHOLDERS), is(true)); // make sure the root elements are all arrays assertThat(config.get(MITREidDataService.CLIENTS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.GRANTS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.WHITELISTEDSITES).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.BLACKLISTEDSITES).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.REFRESHTOKENS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.ACCESSTOKENS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.SYSTEMSCOPES).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.AUTHENTICATIONHOLDERS).isJsonArray(), is(true)); // check our scope list (this test) JsonArray sites = config.get(MITREidDataService.GRANTS).getAsJsonArray(); assertThat(sites.size(), is(2)); // check for both of our sites in turn Set<ApprovedSite> checked = new HashSet<>(); for (JsonElement e : sites) { assertThat(e.isJsonObject(), is(true)); JsonObject site = e.getAsJsonObject(); ApprovedSite compare = null; if (site.get("id").getAsLong() == site1.getId().longValue()) { compare = site1; } else if (site.get("id").getAsLong() == site2.getId().longValue()) { compare = site2; } if (compare == null) { fail("Could not find matching whitelisted site id: " + site.get("id").getAsString()); } else { assertThat(site.get("clientId").getAsString(), equalTo(compare.getClientId())); assertThat(site.get("creationDate").getAsString(), equalTo(formatter.print(compare.getCreationDate(), Locale.ENGLISH))); assertThat(site.get("accessDate").getAsString(), equalTo(formatter.print(compare.getAccessDate(), Locale.ENGLISH))); if(site.get("timeoutDate").isJsonNull()) { assertNull(compare.getTimeoutDate()); } else { assertThat(site.get("timeoutDate").getAsString(), equalTo(formatter.print(compare.getTimeoutDate(), Locale.ENGLISH))); } assertThat(site.get("userId").getAsString(), equalTo(compare.getUserId())); assertThat(jsonArrayToStringSet(site.getAsJsonArray("allowedScopes")), equalTo(compare.getAllowedScopes())); checked.add(compare); } } // make sure all of our clients were found assertThat(checked.containsAll(allApprovedSites), is(true)); }
@Override public Iterator<E> iterator() { return Iterators.transform(items.iterator(), serializer::decode); }
@Test public void testIterator() throws Exception { //Test iterator behavior (no order guarantees are made) Set<Integer> validationSet = Sets.newHashSet(); fillSet(10, this.set); fillSet(10, validationSet); set.iterator().forEachRemaining(item -> assertTrue("Items were mismatched.", validationSet.remove(item))); //All values should have been seen and removed assertTrue("All entries in the validation set should have been removed.", validationSet.isEmpty()); }
public static <T> PTransform<PCollection<T>, PCollection<KV<T, Long>>> perElement() { return new PerElement<>(); }
@Test @Category(NeedsRunner.class) @SuppressWarnings("unchecked") public void testCountPerElementBasic() { PCollection<String> input = p.apply(Create.of(WORDS)); PCollection<KV<String, Long>> output = input.apply(Count.perElement()); PAssert.that(output) .containsInAnyOrder( KV.of("hi", 4L), KV.of("there", 1L), KV.of("sue", 2L), KV.of("bob", 2L), KV.of("", 3L), KV.of("ZOW", 1L)); p.run(); }
public IndexerDirectoryInformation parse(Path path) { if (!Files.exists(path)) { throw new IndexerInformationParserException("Path " + path + " does not exist."); } if (!Files.isDirectory(path)) { throw new IndexerInformationParserException("Path " + path + " is not a directory"); } if (!Files.isReadable(path)) { throw new IndexerInformationParserException("Path " + path + " is not readable"); } final Path nodesPath = path.resolve("nodes"); if (!Files.exists(nodesPath)) { return IndexerDirectoryInformation.empty(path); } try (final Stream<Path> nodes = Files.list(nodesPath)) { final List<NodeInformation> nodeInformation = nodes.filter(Files::isDirectory) .filter(p -> p.getFileName().toString().matches("\\d+")) .map(this::parseNode) .filter(node -> !node.isEmpty()) .toList(); return new IndexerDirectoryInformation(path, nodeInformation); } catch (IOException e) { throw new IndexerInformationParserException("Failed to list nodes", e); } }
@Test void testEmptyDataDir(@TempDir Path tempDir) { final IndexerDirectoryInformation result = parser.parse(tempDir); Assertions.assertThat(result).isNotNull(); Assertions.assertThat(result.nodes()).isEmpty(); }
public ApplicationBuilder addRegistries(List<? extends RegistryConfig> registries) { if (this.registries == null) { this.registries = new ArrayList<>(); } this.registries.addAll(registries); return getThis(); }
@Test void addRegistries() { RegistryConfig registry = new RegistryConfig(); ApplicationBuilder builder = new ApplicationBuilder(); builder.addRegistries(Collections.singletonList(registry)); Assertions.assertNotNull(builder.build().getRegistry()); Assertions.assertEquals(1, builder.build().getRegistries().size()); Assertions.assertSame(registry, builder.build().getRegistry()); }
@Override public <VAgg> KTable<K, VAgg> aggregate(final Initializer<VAgg> initializer, final Aggregator<? super K, ? super V, VAgg> adder, final Aggregator<? super K, ? super V, VAgg> subtractor, final Materialized<K, VAgg, KeyValueStore<Bytes, byte[]>> materialized) { return aggregate(initializer, adder, subtractor, NamedInternal.empty(), materialized); }
@Test public void shouldThrowNullPointerOnAggregateWhenAdderIsNull() { assertThrows(NullPointerException.class, () -> groupedTable.aggregate( MockInitializer.STRING_INIT, null, MockAggregator.TOSTRING_REMOVER, Materialized.as("store"))); }
@Override public void login(final LoginCallback prompt, final CancelCallback cancel) throws BackgroundException { final Credentials credentials = authentication.get(); if(credentials.isAnonymousLogin()) { if(log.isDebugEnabled()) { log.debug(String.format("Connect with no credentials to %s", host)); } client.setProviderCredentials(null); } else { if(credentials.getTokens().validate()) { if(log.isDebugEnabled()) { log.debug(String.format("Connect with session credentials to %s", host)); } client.setProviderCredentials(new AWSSessionCredentials( credentials.getTokens().getAccessKeyId(), credentials.getTokens().getSecretAccessKey(), credentials.getTokens().getSessionToken())); } else { if(log.isDebugEnabled()) { log.debug(String.format("Connect with basic credentials to %s", host)); } client.setProviderCredentials(new AWSCredentials(credentials.getUsername(), credentials.getPassword())); } } if(host.getCredentials().isPassed()) { log.warn(String.format("Skip verifying credentials with previous successful authentication event for %s", this)); return; } try { final Path home = new DelegatingHomeFeature(new DefaultPathHomeFeature(host)).find(); final Location.Name location = new S3LocationFeature(S3Session.this, regions).getLocation(home); if(log.isDebugEnabled()) { log.debug(String.format("Retrieved region %s", location)); } if(!Location.unknown.equals(location)) { if(log.isDebugEnabled()) { log.debug(String.format("Set default region to %s determined from %s", location, home)); } // host.setProperty("s3.location", location.getIdentifier()); } } catch(AccessDeniedException | InteroperabilityException e) { log.warn(String.format("Failure %s querying region", e)); final Path home = new DefaultHomeFinderService(this).find(); if(log.isDebugEnabled()) { log.debug(String.format("Retrieved %s", home)); } } }
@Test public void testConnectDefaultPath() throws Exception { final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Collections.singleton(new S3Protocol()))); final Profile profile = new ProfilePlistReader(factory).read( this.getClass().getResourceAsStream("/S3 (HTTPS).cyberduckprofile")); final Host host = new Host(profile, profile.getDefaultHostname(), new Credentials( PROPERTIES.get("s3.key"), PROPERTIES.get("s3.secret") )); host.setDefaultPath("/test-eu-west-1-cyberduck"); final S3Session session = new S3Session(host); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); session.close(); }
public static AggregateFunctionInitArguments createAggregateFunctionInitArgs( final int numInitArgs, final FunctionCall functionCall ) { return createAggregateFunctionInitArgs( numInitArgs, Collections.emptyList(), functionCall, KsqlConfig.empty() ); }
@Test public void shouldNotThrowIfSecondParamIsColArgAndIsNotALiteral() { // Given: when(functionCall.getArguments()).thenReturn(ImmutableList.of( new UnqualifiedColumnReferenceExp(ColumnName.of("Bob")), new UnqualifiedColumnReferenceExp(ColumnName.of("Col2")), new StringLiteral("No issue here") )); // When: UdafUtil.createAggregateFunctionInitArgs( Math.max(0, functionCall.getArguments().size() - 2), Arrays.asList(0, 1), functionCall, KsqlConfig.empty() ); // Then: did not throw. }
@Override public EncodedMessage transform(ActiveMQMessage message) throws Exception { if (message == null) { return null; } long messageFormat = 0; Header header = null; Properties properties = null; Map<Symbol, Object> daMap = null; Map<Symbol, Object> maMap = null; Map<String,Object> apMap = null; Map<Object, Object> footerMap = null; Section body = convertBody(message); if (message.isPersistent()) { if (header == null) { header = new Header(); } header.setDurable(true); } byte priority = message.getPriority(); if (priority != Message.DEFAULT_PRIORITY) { if (header == null) { header = new Header(); } header.setPriority(UnsignedByte.valueOf(priority)); } String type = message.getType(); if (type != null) { if (properties == null) { properties = new Properties(); } properties.setSubject(type); } MessageId messageId = message.getMessageId(); if (messageId != null) { if (properties == null) { properties = new Properties(); } properties.setMessageId(getOriginalMessageId(message)); } ActiveMQDestination destination = message.getDestination(); if (destination != null) { if (properties == null) { properties = new Properties(); } properties.setTo(destination.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination)); } ActiveMQDestination replyTo = message.getReplyTo(); if (replyTo != null) { if (properties == null) { properties = new Properties(); } properties.setReplyTo(replyTo.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo)); } String correlationId = message.getCorrelationId(); if (correlationId != null) { if (properties == null) { properties = new Properties(); } try { properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId)); } catch (AmqpProtocolException e) { properties.setCorrelationId(correlationId); } } long expiration = message.getExpiration(); if (expiration != 0) { long ttl = expiration - System.currentTimeMillis(); if (ttl < 0) { ttl = 1; } if (header == null) { header = new Header(); } header.setTtl(new UnsignedInteger((int) ttl)); if (properties == null) { properties = new Properties(); } properties.setAbsoluteExpiryTime(new Date(expiration)); } long timeStamp = message.getTimestamp(); if (timeStamp != 0) { if (properties == null) { properties = new Properties(); } properties.setCreationTime(new Date(timeStamp)); } // JMSX Message Properties int deliveryCount = message.getRedeliveryCounter(); if (deliveryCount > 0) { if (header == null) { header = new Header(); } header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount)); } String userId = message.getUserID(); if (userId != null) { if (properties == null) { properties = new Properties(); } properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8))); } String groupId = message.getGroupID(); if (groupId != null) { if (properties == null) { properties = new Properties(); } properties.setGroupId(groupId); } int groupSequence = message.getGroupSequence(); if (groupSequence > 0) { if (properties == null) { properties = new Properties(); } properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence)); } final Map<String, Object> entries; try { entries = message.getProperties(); } catch (IOException e) { throw JMSExceptionSupport.create(e); } for (Map.Entry<String, Object> entry : entries.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.startsWith(JMS_AMQP_PREFIX)) { if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) { messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class); continue; } else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } continue; } else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } continue; } else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (maMap == null) { maMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length()); maMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class)); continue; } else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class)); continue; } else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (daMap == null) { daMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length()); daMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (footerMap == null) { footerMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length()); footerMap.put(Symbol.valueOf(name), value); continue; } } else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) { // strip off the scheduled message properties continue; } // The property didn't map into any other slot so we store it in the // Application Properties section of the message. if (apMap == null) { apMap = new HashMap<>(); } apMap.put(key, value); int messageType = message.getDataStructureType(); if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) { // Type of command to recognize advisory message Object data = message.getDataStructure(); if(data != null) { apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName()); } } } final AmqpWritableBuffer buffer = new AmqpWritableBuffer(); encoder.setByteBuffer(buffer); if (header != null) { encoder.writeObject(header); } if (daMap != null) { encoder.writeObject(new DeliveryAnnotations(daMap)); } if (maMap != null) { encoder.writeObject(new MessageAnnotations(maMap)); } if (properties != null) { encoder.writeObject(properties); } if (apMap != null) { encoder.writeObject(new ApplicationProperties(apMap)); } if (body != null) { encoder.writeObject(body); } if (footerMap != null) { encoder.writeObject(new Footer(footerMap)); } return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength()); }
@Test public void testConvertCompressedObjectMessageToAmqpMessageWithAmqpValueBody() throws Exception { ActiveMQObjectMessage outbound = createObjectMessage(TEST_OBJECT_VALUE, true); outbound.setShortProperty(JMS_AMQP_ORIGINAL_ENCODING, AMQP_VALUE_BINARY); outbound.onSend(); outbound.storeContent(); JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer(); EncodedMessage encoded = transformer.transform(outbound); assertNotNull(encoded); Message amqp = encoded.decode(); assertNotNull(amqp.getBody()); assertTrue(amqp.getBody() instanceof AmqpValue); assertTrue(((AmqpValue)amqp.getBody()).getValue() instanceof Binary); assertFalse(0 == ((Binary) ((AmqpValue) amqp.getBody()).getValue()).getLength()); Object value = deserialize(((Binary) ((AmqpValue) amqp.getBody()).getValue()).getArray()); assertNotNull(value); assertTrue(value instanceof UUID); }
public ModuleBuilder addRegistry(RegistryConfig registry) { if (this.registries == null) { this.registries = new ArrayList<>(); } this.registries.add(registry); return getThis(); }
@Test void addRegistry() { RegistryConfig registry = new RegistryConfig(); ModuleBuilder builder = ModuleBuilder.newBuilder(); builder.addRegistry(registry); Assertions.assertTrue(builder.build().getRegistries().contains(registry)); Assertions.assertEquals(1, builder.build().getRegistries().size()); }
public static UserGroupInformation getUGIFromSubject(Subject subject) throws IOException { if (subject == null) { throw new KerberosAuthException(SUBJECT_MUST_NOT_BE_NULL); } if (subject.getPrincipals(KerberosPrincipal.class).isEmpty()) { throw new KerberosAuthException(SUBJECT_MUST_CONTAIN_PRINCIPAL); } // null params indicate external subject login. no login context will // be attached. return doSubjectLogin(subject, null); }
@Test (timeout = 30000) public void testGetUGIFromSubject() throws Exception { KerberosPrincipal p = new KerberosPrincipal("guest"); Subject subject = new Subject(); subject.getPrincipals().add(p); UserGroupInformation ugi = UserGroupInformation.getUGIFromSubject(subject); assertNotNull(ugi); assertEquals("guest@DEFAULT.REALM", ugi.getUserName()); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testMapViaMultimapRemoveAndGet() { final String tag = "map"; StateTag<MapState<byte[], Integer>> addr = StateTags.map(tag, ByteArrayCoder.of(), VarIntCoder.of()); MapState<byte[], Integer> mapViaMultiMapState = underTestMapViaMultimap.state(NAMESPACE, addr); final byte[] key = "key".getBytes(StandardCharsets.UTF_8); SettableFuture<Iterable<Integer>> future = SettableFuture.create(); when(mockReader.multimapFetchSingleEntryFuture( encodeWithCoder(key, ByteArrayCoder.of()), key(NAMESPACE, tag), STATE_FAMILY, VarIntCoder.of())) .thenReturn(future); ReadableState<Integer> result1 = mapViaMultiMapState.get(key).readLater(); ReadableState<Integer> result2 = mapViaMultiMapState.get(dup(key)).readLater(); waitAndSet(future, Collections.singletonList(1), 30); assertEquals(Integer.valueOf(1), result1.read()); mapViaMultiMapState.remove(key); assertNull(mapViaMultiMapState.get(dup(key)).read()); assertNull(result2.read()); }
public static String getStringInMillis(final Duration duration) { return duration.toMillis() + TimeUnit.MILLISECONDS.labels.get(0); }
@Test void testGetStringInMillis() { assertThat(TimeUtils.getStringInMillis(Duration.ofMillis(4567L))).isEqualTo("4567ms"); assertThat(TimeUtils.getStringInMillis(Duration.ofSeconds(4567L))).isEqualTo("4567000ms"); assertThat(TimeUtils.getStringInMillis(Duration.of(4567L, ChronoUnit.MICROS))) .isEqualTo("4ms"); }
TeamsMessage createTeamsMessage(EventNotificationContext ctx, TeamsEventNotificationConfig config) throws PermanentEventNotificationException { String messageTitle = buildDefaultMessage(ctx); String description = buildMessageDescription(ctx); String customMessage = null; String template = config.customMessage(); String summary = ctx.eventDefinition().map(EventDefinitionDto::title).orElse("Graylog Event"); if (!isNullOrEmpty(template)) { customMessage = buildCustomMessage(ctx, config, template); } TeamsMessage.Sections section = TeamsMessage.Sections.builder() .activityImage(config.iconUrl()) .activitySubtitle(description) .text(customMessage) .build(); return TeamsMessage.builder() .color(config.color()) .text(messageTitle) .summary(summary) .sections(Collections.singleton(section)) .build(); }
@Test public void createTeamsMessage() throws EventNotificationException { String expectedText = "**Alert Event Definition Test Title triggered:**\n"; String expectedSubtitle = "_Event Definition Test Description_"; TeamsMessage actual = teamsEventNotification.createTeamsMessage(eventNotificationContext, teamsEventNotificationConfig); assertThat(actual.type()).isEqualTo(TeamsMessage.VALUE_TYPE); assertThat(actual.context()).isEqualTo(TeamsMessage.VALUE_CONTEXT); assertThat(actual.color()).isEqualTo(expectedColor); assertThat(actual.text()).isEqualTo(expectedText); assertThat(actual.sections().size()).isEqualTo(1); TeamsMessage.Sections section = actual.sections().iterator().next(); assertThat(section.activitySubtitle()).isEqualTo(expectedSubtitle); assertThat(section.activityImage()).isEqualTo(expectedImage); assertThat(section.text().contains("a custom message")).isTrue(); }
@Around("pageableCut()") public Object mapperAround(final ProceedingJoinPoint point) { // CHECKSTYLE:OFF try { Object query = point.getArgs()[0]; PageParameter pageParameter = (PageParameter) ReflectUtils.getFieldValue(query, "pageParameter"); if (Objects.isNull(pageParameter)) { return point.proceed(); } Page<?> page = PageMethod.startPage(pageParameter.getCurrentPage(), pageParameter.getPageSize()); Object proceed = point.proceed(); CommonPager<?> commonPager = (CommonPager<?>) proceed; PageParameter result = commonPager.getPage(); result = convert(page, result); commonPager.setPage(result); return proceed; } catch (Throwable throwable) { PageMethod.clearPage(); throw new ShenyuException(throwable); } // CHECKSTYLE:ON }
@Test public void testMapperAround() { MetaDataQuery metaDataQuery = new MetaDataQuery(); PageParameter pageParameter = new PageParameter(); MetaDataQuery[] metaDataQueries = {metaDataQuery}; /** * test null return. */ when(point.getArgs()).thenReturn(metaDataQueries); Object result = pageableAspect.mapperAround(point); assertTrue(Objects.isNull(result)); /** * test thrown Exception. */ metaDataQuery.setPageParameter(pageParameter); when(point.getArgs()).thenReturn(metaDataQueries); boolean thrown = false; try { pageableAspect.mapperAround(point); } catch (ShenyuException e) { thrown = true; } assertTrue(thrown); /** * test return proceed. */ CommonPager commonPager = mock(CommonPager.class); metaDataQuery.setPageParameter(pageParameter); when(point.getArgs()).thenReturn(metaDataQueries); pageParameter.setCurrentPage(1); pageParameter.setPageSize(50); pageParameter.setOffset(50); try { when(point.proceed()).thenReturn(commonPager); } catch (Throwable throwable) { throw new ShenyuException(throwable); } PageParameter newPageParameter = new PageParameter(); when(commonPager.getPage()).thenReturn(newPageParameter); CommonPager resultCommonPager = (CommonPager) pageableAspect.mapperAround(point); assertEquals(pageParameter, newPageParameter); assertEquals(pageParameter, resultCommonPager.getPage()); }
@Override public <VO, VR> KStream<K, VR> leftJoin(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return leftJoin(otherStream, toValueJoinerWithKey(joiner), windows); }
@Test public void shouldNotAllowNullValueJoinerWithKeyOnTableLeftJoin() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.leftJoin(testTable, (ValueJoinerWithKey<? super String, ? super String, ? super String, ?>) null)); assertThat(exception.getMessage(), equalTo("joiner can't be null")); }
public Optional<YamlRuleConfiguration> swapToYamlRuleConfiguration(final Collection<RepositoryTuple> repositoryTuples, final Class<? extends YamlRuleConfiguration> toBeSwappedType) { RepositoryTupleEntity tupleEntity = toBeSwappedType.getAnnotation(RepositoryTupleEntity.class); if (null == tupleEntity) { return Optional.empty(); } return tupleEntity.leaf() ? swapToYamlRuleConfiguration(repositoryTuples, toBeSwappedType, tupleEntity) : swapToYamlRuleConfiguration(repositoryTuples, toBeSwappedType, getFields(toBeSwappedType)); }
@Test void assertSwapToYamlRuleConfigurationWithInvalidNodeYamlRuleConfiguration() { Optional<YamlRuleConfiguration> actual = new RepositoryTupleSwapperEngine().swapToYamlRuleConfiguration( Collections.singleton(new RepositoryTuple("/invalid", "foo")), NodeYamlRuleConfiguration.class); assertFalse(actual.isPresent()); }
public static Statement sanitize( final Statement node, final MetaStore metaStore) { return sanitize(node, metaStore, true); }
@Test public void shouldThrowIfRightJoinSourceDoesNotExist() { // Given: final Statement stmt = givenQuery("SELECT * FROM TEST1 JOIN UNKNOWN" + " ON test1.col1 = UNKNOWN.col1;"); // When: final Exception e = assertThrows( KsqlException.class, () -> AstSanitizer.sanitize(stmt, META_STORE) ); // Then: assertThat(e.getMessage(), containsString( "UNKNOWN does not exist.")); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ServiceInfo that = (ServiceInfo) o; if (ports != null ? !ports.equals(that.ports) : that.ports != null) return false; if (properties != null ? !properties.equals(that.properties) : that.properties != null) return false; if (!serviceName.equals(that.serviceName)) return false; if (!serviceType.equals(that.serviceType)) return false; if (!configId.equals(that.configId)) return false; if (!hostName.equals(that.hostName)) return false; return true; }
@Test public void testEquals() { String commonConfigId = "common-config-id"; String commonHostName = "common-host"; ServiceInfo a = new ServiceInfo("0", "0", List.of(new PortInfo(33, null)), Map.of("foo", "bar"), commonConfigId, commonHostName); ServiceInfo b = new ServiceInfo("0", "0", List.of(new PortInfo(33, null)), Map.of("foo", "bar"), commonConfigId, commonHostName); ServiceInfo c = new ServiceInfo("0", "0", List.of(new PortInfo(33, null)), Map.of("foo", "baz"), commonConfigId, commonHostName); ServiceInfo d = new ServiceInfo("0", "0", List.of(new PortInfo(33, null)), Map.of("bar", "bar"), commonConfigId, commonHostName); ServiceInfo e = new ServiceInfo("0", "1", List.of(new PortInfo(33, null)), Map.of("foo", "bar"), commonConfigId, commonHostName); ServiceInfo f = new ServiceInfo("1", "0", List.of(new PortInfo(33, null)), Map.of("foo", "bar"), commonConfigId, commonHostName); ServiceInfo g = new ServiceInfo("1", "0", List.of(new PortInfo(33, null)), Map.of("foo", "bar"), "different-config-id", commonHostName); ServiceInfo h = new ServiceInfo("1", "0", List.of(new PortInfo(33, null)), Map.of("foo", "bar"), commonConfigId, "different-host"); assertEquals(a, b); assertNotEquals(a, c); assertNotEquals(a, d); assertNotEquals(a, e); assertNotEquals(a, f); assertNotEquals(a, g); assertNotEquals(a, h); assertNotEquals(c, d); assertNotEquals(c, e); assertNotEquals(c, f); assertNotEquals(c, g); assertNotEquals(c, h); assertNotEquals(d, e); assertNotEquals(d, f); assertNotEquals(d, g); assertNotEquals(d, h); assertNotEquals(e, f); assertNotEquals(e, g); assertNotEquals(e, h); assertNotEquals(f, g); assertNotEquals(f, h); assertNotEquals(g, h); }
@Override public void reportConsumerRunningInfo(TreeMap<String, ConsumerRunningInfo> criTable) { { boolean result = ConsumerRunningInfo.analyzeSubscription(criTable); if (!result) { logger.info(String.format(LOG_NOTIFY + "reportConsumerRunningInfo: ConsumerGroup: %s, Subscription different", criTable .firstEntry().getValue().getProperties().getProperty("consumerGroup"))); } } { Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerRunningInfo> next = it.next(); String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue()); if (!result.isEmpty()) { logger.info(String.format(LOG_NOTIFY + "reportConsumerRunningInfo: ConsumerGroup: %s, ClientId: %s, %s", criTable.firstEntry().getValue().getProperties().getProperty("consumerGroup"), next.getKey(), result)); } } } }
@Test public void testReportConsumerRunningInfo() { TreeMap<String, ConsumerRunningInfo> criTable = new TreeMap<>(); ConsumerRunningInfo consumerRunningInfo = new ConsumerRunningInfo(); consumerRunningInfo.setSubscriptionSet(new TreeSet<>()); consumerRunningInfo.setStatusTable(new TreeMap<>()); consumerRunningInfo.setSubscriptionSet(new TreeSet<>()); consumerRunningInfo.setMqTable(new TreeMap<>()); consumerRunningInfo.setProperties(new Properties()); criTable.put("test", consumerRunningInfo); defaultMonitorListener.reportConsumerRunningInfo(criTable); }
<T> List<List<T>> computePaths(Translator<Step, T> translator) { List<List<T>> taskPaths = new ArrayList<>(); while (!startNodes.isEmpty()) { Iterator<GraphNode> iterator = startNodes.iterator(); GraphNode start = iterator.next(); iterator.remove(); List<T> path = getPath(start, translator); taskPaths.add(path); } return taskPaths; }
@Test public void testComputePaths() throws Exception { WorkflowCreateRequest request = loadObject( "fixtures/workflows/request/sample-conditional-wf.json", WorkflowCreateRequest.class); WorkflowGraph graph = WorkflowGraph.build( request.getWorkflow(), WorkflowGraph.computeDag(request.getWorkflow(), null, null)); List<List<String>> paths = graph.computePaths(new TestTranslator()); Assert.assertEquals(3, paths.size()); Assert.assertEquals( Arrays.asList("job.1", "job.2", "job.3", "#job.6", "job.6", "job.7"), paths.get(0)); Assert.assertEquals(Collections.singletonList("job.4"), paths.get(1)); Assert.assertEquals(Arrays.asList("#job.5", "job.5"), paths.get(2)); }
public static TbMathArgumentValue fromString(String value) { try { return new TbMathArgumentValue(Double.parseDouble(value)); } catch (NumberFormatException ne) { throw new RuntimeException("Can't convert value '" + value + "' to double!"); } }
@Test public void test_fromString_thenOK() { var value = "5.0"; TbMathArgumentValue result = TbMathArgumentValue.fromString(value); Assertions.assertNotNull(result); Assertions.assertEquals(5.0, result.getValue(), 0d); }
public static Type convertType(TypeInfo typeInfo) { switch (typeInfo.getOdpsType()) { case BIGINT: return Type.BIGINT; case INT: return Type.INT; case SMALLINT: return Type.SMALLINT; case TINYINT: return Type.TINYINT; case FLOAT: return Type.FLOAT; case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return ScalarType.createUnifiedDecimalType(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); case DOUBLE: return Type.DOUBLE; case CHAR: CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo; return ScalarType.createCharType(charTypeInfo.getLength()); case VARCHAR: VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo; return ScalarType.createVarcharType(varcharTypeInfo.getLength()); case STRING: case JSON: return ScalarType.createDefaultCatalogString(); case BINARY: return Type.VARBINARY; case BOOLEAN: return Type.BOOLEAN; case DATE: return Type.DATE; case TIMESTAMP: case DATETIME: return Type.DATETIME; case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; return new MapType(convertType(mapTypeInfo.getKeyTypeInfo()), convertType(mapTypeInfo.getValueTypeInfo())); case ARRAY: ArrayTypeInfo arrayTypeInfo = (ArrayTypeInfo) typeInfo; return new ArrayType(convertType(arrayTypeInfo.getElementTypeInfo())); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; List<Type> fieldTypeList = structTypeInfo.getFieldTypeInfos().stream().map(EntityConvertUtils::convertType) .collect(Collectors.toList()); return new StructType(fieldTypeList); default: return Type.VARCHAR; } }
@Test public void testConvertTypeCaseTinyint() { TypeInfo typeInfo = TypeInfoFactory.TINYINT; Type result = EntityConvertUtils.convertType(typeInfo); assertEquals(Type.TINYINT, result); }
@Override public BufferedSink writeUtf8(String string) throws IOException { if (closed) throw new IllegalStateException("closed"); buffer.writeUtf8(string); return emitCompleteSegments(); }
@Test public void bytesNotEmittedToSinkWithoutFlush() throws Exception { Buffer sink = new Buffer(); BufferedSink bufferedSink = new RealBufferedSink(sink); bufferedSink.writeUtf8("abc"); assertEquals(0, sink.size()); }
@Override public void configure(Map<String, ?> props) { final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props); casts = parseFieldTypes(config.getList(SPEC_CONFIG)); wholeValueCastType = casts.get(WHOLE_VALUE_CAST); schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16)); replaceNullWithDefault = config.getBoolean(REPLACE_NULL_WITH_DEFAULT_CONFIG); }
@Test public void testUnsupportedTargetType() { assertThrows(ConfigException.class, () -> xformKey.configure(Collections.singletonMap(Cast.SPEC_CONFIG, "foo:bytes"))); }
public EvictionConfig getEvictionConfig() { return evictionConfig; }
@Test public void testSetEvictionPolicy() { assertEquals(EvictionPolicy.LRU, new MapConfig().getEvictionConfig() .setEvictionPolicy(EvictionPolicy.LRU) .getEvictionPolicy()); }
@Override public Set<TransferItem> find(final CommandLine input, final TerminalAction action, final Path remote) { if(input.getOptionValues(action.name()).length == 2) { switch(action) { case download: return new DownloadTransferItemFinder().find(input, action, remote); case upload: case synchronize: return new UploadTransferItemFinder().find(input, action, remote); } } else { switch(action) { case upload: case synchronize: return Collections.emptySet(); } } // Relative to current working directory using prefix finder. return Collections.singleton( new TransferItem(remote, LocalFactory.get(prefixer.normalize(remote.getName()))) ); }
@Test public void testNoLocalInOptionsUploadFile() throws Exception { final CommandLineParser parser = new PosixParser(); final CommandLine input = parser.parse(TerminalOptionsBuilder.options(), new String[]{"--upload", "rackspace://cdn.cyberduck.ch/remote"}); final Set<TransferItem> found = new SingleTransferItemFinder().find(input, TerminalAction.upload, new Path("/cdn.cyberduck.ch/remote", EnumSet.of(Path.Type.file))); assertTrue(found.isEmpty()); }
public static void writeText(File target, String text) { target.getParentFile().mkdirs(); GFileUtils.writeFile(text, target); }
@Test public void writesTextOnNonExistentFile() throws IOException { Path tempDirectory = Files.createTempDirectory(getClass().getSimpleName()); File f = tempDirectory.resolve("foo/bar/baz.txt").toFile(); f.delete(); f.getParentFile().delete(); assertFalse(f.exists()); assertFalse(f.getParentFile().exists()); //when IOUtil.writeText(f, "foo"); //then assertEquals(GFileUtils.readFile(f), "foo"); deleteDirectory(tempDirectory); }
public static String desensitizeSingleKeyword(final boolean desensitized, final String keyWord, final String source, final KeyWordMatch keyWordMatch, final String desensitizedAlg) { if (StringUtils.hasLength(source) && desensitized && keyWordMatch.matches(keyWord)) { return DataDesensitizeFactory.selectDesensitize(source, desensitizedAlg); } else { return source; } }
@Test public void desensitizeSingleKeywordTest() { String noDesensitizedData = DataDesensitizeUtils.desensitizeSingleKeyword(false, "name", JSON_TEXT, keyWordMatch, DataDesensitizeEnum.MD5_ENCRYPT.getDataDesensitizeAlg()); Assertions.assertEquals(JSON_TEXT, noDesensitizedData); String desensitizedData = DataDesensitizeUtils.desensitizeSingleKeyword(true, "name", JSON_TEXT, keyWordMatch, DataDesensitizeEnum.MD5_ENCRYPT.getDataDesensitizeAlg()); Assertions.assertEquals(DigestUtils.md5Hex(JSON_TEXT), desensitizedData); }
public static Properties updateSplitSchema(Properties splitSchema, List<HiveColumnHandle> columns) { requireNonNull(splitSchema, "splitSchema is null"); requireNonNull(columns, "columns is null"); // clone split properties for update so as not to affect the original one Properties updatedSchema = new Properties(); updatedSchema.putAll(splitSchema); updatedSchema.setProperty(LIST_COLUMNS, buildColumns(columns)); updatedSchema.setProperty(LIST_COLUMN_TYPES, buildColumnTypes(columns)); ThriftTable thriftTable = parseThriftDdl(splitSchema.getProperty(SERIALIZATION_DDL)); updatedSchema.setProperty(SERIALIZATION_DDL, thriftTableToDdl(pruneThriftTable(thriftTable, columns))); return updatedSchema; }
@Test(expectedExceptions = NullPointerException.class) public void shouldThrowNullPointerExceptionWhenColumnsIsNull() { updateSplitSchema(new Properties(), null); }
static boolean isTLSv13Cipher(String cipher) { // See https://tools.ietf.org/html/rfc8446#appendix-B.4 return TLSV13_CIPHERS.contains(cipher); }
@Test public void testIsTLSv13Cipher() { assertTrue(SslUtils.isTLSv13Cipher("TLS_AES_128_GCM_SHA256")); assertTrue(SslUtils.isTLSv13Cipher("TLS_AES_256_GCM_SHA384")); assertTrue(SslUtils.isTLSv13Cipher("TLS_CHACHA20_POLY1305_SHA256")); assertTrue(SslUtils.isTLSv13Cipher("TLS_AES_128_CCM_SHA256")); assertTrue(SslUtils.isTLSv13Cipher("TLS_AES_128_CCM_8_SHA256")); assertFalse(SslUtils.isTLSv13Cipher("TLS_DHE_RSA_WITH_AES_128_GCM_SHA256")); }
@Override public boolean passes(final String artifactType) { return StringUtils.isNotEmpty(regex) && StringUtils.isNotEmpty(artifactType) && artifactType.matches(regex); }
@Test public void testPasses() { String artifactType = null; ArtifactTypeExcluded instance = new ArtifactTypeExcluded(null); boolean expResult = false; boolean result = instance.passes(artifactType); assertEquals(expResult, result); artifactType = "pom"; instance = new ArtifactTypeExcluded(null); expResult = false; result = instance.passes(artifactType); assertEquals(expResult, result); artifactType = null; instance = new ArtifactTypeExcluded("jar"); expResult = false; result = instance.passes(artifactType); assertEquals(expResult, result); artifactType = "pom"; instance = new ArtifactTypeExcluded(""); expResult = false; result = instance.passes(artifactType); assertEquals(expResult, result); artifactType = "pom"; instance = new ArtifactTypeExcluded("jar"); expResult = false; result = instance.passes(artifactType); assertEquals(expResult, result); artifactType = "pom"; instance = new ArtifactTypeExcluded("pom"); expResult = true; result = instance.passes(artifactType); assertEquals(expResult, result); artifactType = "pom"; instance = new ArtifactTypeExcluded(".*"); expResult = true; result = instance.passes(artifactType); assertEquals(expResult, result); }
@Override void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException { // PostgreSQL does not have concept of case-sensitive collation. Only charset ("encoding" in postgresql terminology) // must be verified. expectUtf8AsDefault(connection); if (state == DatabaseCharsetChecker.State.UPGRADE || state == DatabaseCharsetChecker.State.STARTUP) { // no need to check columns on fresh installs... as they are not supposed to exist! expectUtf8Columns(connection); } }
@Test public void column_charset_can_be_empty() throws Exception { answerDefaultCharset("utf8"); answerColumns(asList( new String[] {TABLE_ISSUES, COLUMN_KEE, "utf8"}, new String[] {TABLE_PROJECTS, COLUMN_NAME, "" /* unset -> uses db collation */})); // no error assertThatCode(() -> underTest.handle(connection, DatabaseCharsetChecker.State.UPGRADE)) .doesNotThrowAnyException(); verify(sqlExecutor).select(same(connection), eq("select table_name, column_name," + " collation_name " + "from information_schema.columns " + "where table_schema='public' " + "and table_name in (" + SqTables.TABLES.stream().map(s -> "'" + s + "'").collect(Collectors.joining(",")) + ") " + "and udt_name='varchar' order by table_name, column_name"), any(SqlExecutor.StringsConverter.class)); }
public QueryCacheEventData getEventData() { return eventData; }
@Test public void testGetEventData() { assertEquals(queryCacheEventData, singleIMapEvent.getEventData()); }
public ClientSession toClientSession() { return new ClientSession( parseServer(server), user, source, Optional.empty(), parseClientTags(clientTags), clientInfo, catalog, schema, TimeZone.getDefault().getID(), Locale.getDefault(), toResourceEstimates(resourceEstimates), toProperties(sessionProperties), emptyMap(), emptyMap(), toExtraCredentials(extraCredentials), null, clientRequestTimeout, disableCompression, emptyMap(), emptyMap(), validateNextUriSource); }
@Test public void testSource() { ClientOptions options = new ClientOptions(); options.source = "test"; ClientSession session = options.toClientSession(); assertEquals(session.getSource(), "test"); }
@ApiOperation(value = "Delete a deployment", tags = { "Deployment" }, code = 204) @ApiResponses(value = { @ApiResponse(code = 204, message = "Indicates the deployment was found and has been deleted. Response-body is intentionally empty."), @ApiResponse(code = 404, message = "Indicates the requested deployment was not found.") }) @DeleteMapping(value = "/repository/deployments/{deploymentId}", produces = "application/json") @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteDeployment(@ApiParam(name = "deploymentId") @PathVariable String deploymentId, @RequestParam(value = "cascade", required = false, defaultValue = "false") Boolean cascade) { Deployment deployment = repositoryService.createDeploymentQuery().deploymentId(deploymentId).singleResult(); if (deployment == null) { throw new FlowableObjectNotFoundException("Could not find a deployment with id '" + deploymentId + "'.", Deployment.class); } if (restApiInterceptor != null) { restApiInterceptor.deleteDeployment(deployment); } if (cascade) { repositoryService.deleteDeployment(deploymentId, true); } else { repositoryService.deleteDeployment(deploymentId); } }
@Test public void testPostNewDeploymentBPMNFile() throws Exception { try { // Upload a valid BPMN-file using multipart-data HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT_COLLECTION)); httpPost.setEntity(HttpMultipartHelper.getMultiPartEntity("oneTaskProcess.bpmn20.xml", "application/xml", ReflectUtil.getResourceAsStream("org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml"), null)); CloseableHttpResponse response = executeBinaryRequest(httpPost, HttpStatus.SC_CREATED); // Check deployment JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); String deploymentId = responseNode.get("id").textValue(); assertThatJson(responseNode) .when(Option.IGNORING_EXTRA_FIELDS) .isEqualTo("{" + "id: '${json-unit.any-string}'," + "name: 'oneTaskProcess'," + "url: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT, deploymentId) + "'," + "category: null," + "deploymentTime: '${json-unit.any-string}'," + "tenantId: ''" + "}"); assertThat(repositoryService.createDeploymentQuery().deploymentId(deploymentId).count()).isEqualTo(1); // Check if process is actually deployed in the deployment List<String> resources = repositoryService.getDeploymentResourceNames(deploymentId); assertThat(resources) .containsExactly("oneTaskProcess.bpmn20.xml"); assertThat(repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId).count()).isEqualTo(1); } finally { // Always cleanup any created deployments, even if the test failed List<Deployment> deployments = repositoryService.createDeploymentQuery().list(); for (Deployment deployment : deployments) { repositoryService.deleteDeployment(deployment.getId(), true); } } }
public static boolean isValidEmail(String email) { return StringUtils.isNotBlank(email) && email.matches(EMAIL_REGEX); }
@Test void validateEmailTest() { var cases = new HashMap<String, Boolean>(); // Valid cases cases.put("simple@example.com", true); cases.put("very.common@example.com", true); cases.put("disposable.style.email.with+symbol@example.com", true); cases.put("other.email-with-hyphen@example.com", true); cases.put("fully-qualified-domain@example.com", true); cases.put("user.name+tag+sorting@example.com", true); cases.put("x@example.com", true); cases.put("example-indeed@strange-example.com", true); cases.put("example@s.example", true); cases.put("john.doe@example.com", true); cases.put("a.little.lengthy.but.fine@dept.example.com", true); cases.put("123ada@halo.co", true); cases.put("23ad@halo.top", true); // Invalid cases cases.put("Abc.example.com", false); cases.put("admin@mailserver1", false); cases.put("\" \"@example.org", false); cases.put("A@b@c@example.com", false); cases.put("a\"b(c)d,e:f;g<h>i[j\\k]l@example.com", false); cases.put("just\"not\"right@example.com", false); cases.put("this is\"not\\allowed@example.com", false); cases.put("this\\ still\\\"not\\\\allowed@example.com", false); cases.put("123456789012345678901234567890123456789012345", false); cases.forEach((email, expected) -> assertThat(ValidationUtils.isValidEmail(email)) .isEqualTo(expected)); }
public ProjectList searchProjects(String gitlabUrl, String personalAccessToken, @Nullable String projectName, @Nullable Integer pageNumber, @Nullable Integer pageSize) { String url = format("%s/projects?archived=false&simple=true&membership=true&order_by=name&sort=asc&search=%s%s%s", gitlabUrl, projectName == null ? "" : urlEncode(projectName), pageNumber == null ? "" : format("&page=%d", pageNumber), pageSize == null ? "" : format("&per_page=%d", pageSize) ); LOG.debug("get projects : [{}]", url); Request request = new Request.Builder() .addHeader(PRIVATE_TOKEN, personalAccessToken) .url(url) .get() .build(); try (Response response = client.newCall(request).execute()) { Headers headers = response.headers(); checkResponseIsSuccessful(response, "Could not get projects from GitLab instance"); List<Project> projectList = Project.parseJsonArray(response.body().string()); int returnedPageNumber = parseAndGetIntegerHeader(headers.get("X-Page")); int returnedPageSize = parseAndGetIntegerHeader(headers.get("X-Per-Page")); String xtotal = headers.get("X-Total"); Integer totalProjects = Strings.isEmpty(xtotal) ? null : parseAndGetIntegerHeader(xtotal); return new ProjectList(projectList, returnedPageNumber, returnedPageSize, totalProjects); } catch (JsonSyntaxException e) { throw new IllegalArgumentException("Could not parse GitLab answer to search projects. Got a non-json payload as result."); } catch (IOException e) { logException(url, e); throw new IllegalStateException(e.getMessage(), e); } }
@Test public void search_projects() throws InterruptedException { MockResponse projects = new MockResponse() .setResponseCode(200) .setBody("[\n" + " {\n" + " \"id\": 1,\n" + " \"name\": \"SonarQube example 1\",\n" + " \"name_with_namespace\": \"SonarSource / SonarQube / SonarQube example 1\",\n" + " \"path\": \"sonarqube-example-1\",\n" + " \"path_with_namespace\": \"sonarsource/sonarqube/sonarqube-example-1\",\n" + " \"web_url\": \"https://example.gitlab.com/sonarsource/sonarqube/sonarqube-example-1\"\n" + " },\n" + " {\n" + " \"id\": 2,\n" + " \"name\": \"SonarQube example 2\",\n" + " \"name_with_namespace\": \"SonarSource / SonarQube / SonarQube example 2\",\n" + " \"path\": \"sonarqube-example-2\",\n" + " \"path_with_namespace\": \"sonarsource/sonarqube/sonarqube-example-2\",\n" + " \"web_url\": \"https://example.gitlab.com/sonarsource/sonarqube/sonarqube-example-2\"\n" + " },\n" + " {\n" + " \"id\": 3,\n" + " \"name\": \"SonarQube example 3\",\n" + " \"name_with_namespace\": \"SonarSource / SonarQube / SonarQube example 3\",\n" + " \"path\": \"sonarqube-example-3\",\n" + " \"path_with_namespace\": \"sonarsource/sonarqube/sonarqube-example-3\",\n" + " \"web_url\": \"https://example.gitlab.com/sonarsource/sonarqube/sonarqube-example-3\"\n" + " }\n" + "]"); projects.addHeader("X-Page", 1); projects.addHeader("X-Per-Page", 10); projects.addHeader("X-Total", 3); server.enqueue(projects); ProjectList projectList = underTest.searchProjects(gitlabUrl, "pat", "example", 1, 10); assertThat(projectList.getPageNumber()).isOne(); assertThat(projectList.getPageSize()).isEqualTo(10); assertThat(projectList.getTotal()).isEqualTo(3); assertThat(projectList.getProjects()).hasSize(3); assertThat(projectList.getProjects()).extracting( Project::getId, Project::getName, Project::getNameWithNamespace, Project::getPath, Project::getPathWithNamespace, Project::getWebUrl).containsExactly( tuple(1L, "SonarQube example 1", "SonarSource / SonarQube / SonarQube example 1", "sonarqube-example-1", "sonarsource/sonarqube/sonarqube-example-1", "https://example.gitlab.com/sonarsource/sonarqube/sonarqube-example-1"), tuple(2L, "SonarQube example 2", "SonarSource / SonarQube / SonarQube example 2", "sonarqube-example-2", "sonarsource/sonarqube/sonarqube-example-2", "https://example.gitlab.com/sonarsource/sonarqube/sonarqube-example-2"), tuple(3L, "SonarQube example 3", "SonarSource / SonarQube / SonarQube example 3", "sonarqube-example-3", "sonarsource/sonarqube/sonarqube-example-3", "https://example.gitlab.com/sonarsource/sonarqube/sonarqube-example-3")); RecordedRequest projectGitlabRequest = server.takeRequest(10, TimeUnit.SECONDS); String gitlabUrlCall = projectGitlabRequest.getRequestUrl().toString(); assertThat(gitlabUrlCall).isEqualTo(server.url("") + "projects?archived=false&simple=true&membership=true&order_by=name&sort=asc&search=example&page=1&per_page=10"); assertThat(projectGitlabRequest.getMethod()).isEqualTo("GET"); }
@Override public void beforeRejectedExecution(Runnable runnable, ThreadPoolExecutor executor) { if (!(executor instanceof ExtensibleThreadPoolExecutor)) { return; } String threadPoolId = ((ExtensibleThreadPoolExecutor) executor).getThreadPoolId(); threadPoolCheckAlarm.asyncSendRejectedAlarm(threadPoolId); }
@Test public void testBeforeRejectedExecution() { ExtensibleThreadPoolExecutor executor = new ExtensibleThreadPoolExecutor( "test", new DefaultThreadPoolPluginManager(), 1, 1, 1000L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<>(1), Thread::new, new ThreadPoolExecutor.DiscardPolicy()); TestAlarm alarm = new TestAlarm(); executor.register(new TaskRejectNotifyAlarmPlugin(alarm)); executor.submit(() -> ThreadUtil.sleep(200L)); executor.submit(() -> ThreadUtil.sleep(200L)); executor.submit(() -> ThreadUtil.sleep(200L)); // waiting for shutdown executor.shutdown(); while (!executor.isTerminated()) { } Assert.assertEquals(1, alarm.getNumberOfAlarms().get()); }
public ReadOperation getReadOperation() { if (operations == null || operations.isEmpty()) { throw new IllegalStateException("Map task has no operation."); } Operation readOperation = operations.get(0); if (!(readOperation instanceof ReadOperation)) { throw new IllegalStateException("First operation in the map task is not a ReadOperation."); } return (ReadOperation) readOperation; }
@Test public void testNoReadOperation() throws Exception { // Test MapTaskExecutor without ReadOperation. List<Operation> operations = Arrays.<Operation>asList(createOperation("o1", 1), createOperation("o2", 2)); ExecutionStateTracker stateTracker = ExecutionStateTracker.newForTest(); try (MapTaskExecutor executor = new MapTaskExecutor(operations, counterSet, stateTracker)) { thrown.expect(IllegalStateException.class); thrown.expectMessage("is not a ReadOperation"); executor.getReadOperation(); } }
@Override public void upgrade() { if (shouldSkip()) { return; } final ImmutableSet<String> eventIndexPrefixes = ImmutableSet.of( elasticsearchConfig.getDefaultEventsIndexPrefix(), elasticsearchConfig.getDefaultSystemEventsIndexPrefix()); elasticsearch.addGl2MessageIdFieldAlias(eventIndexPrefixes); writeMigrationCompleted(eventIndexPrefixes); }
@Test void doesNotRunIfMigrationHasCompletedBefore() { when(clusterConfigService.get(V20200730000000_AddGl2MessageIdFieldAliasForEvents.MigrationCompleted.class)) .thenReturn(V20200730000000_AddGl2MessageIdFieldAliasForEvents.MigrationCompleted.create(ImmutableSet.of())); this.sut.upgrade(); verify(elasticsearchAdapter, never()).addGl2MessageIdFieldAlias(any()); }
@SuppressWarnings("WeakerAccess") public Map<String, Object> getProducerConfigs(final String clientId) { final Map<String, Object> clientProvidedProps = getClientPropsWithPrefix(PRODUCER_PREFIX, ProducerConfig.configNames()); checkIfUnexpectedUserSpecifiedConsumerConfig(clientProvidedProps, NON_CONFIGURABLE_PRODUCER_EOS_CONFIGS); // generate producer configs from original properties and overridden maps final Map<String, Object> props = new HashMap<>(eosEnabled ? PRODUCER_EOS_OVERRIDES : PRODUCER_DEFAULT_OVERRIDES); props.putAll(getClientCustomProps()); props.putAll(clientProvidedProps); // When using EOS alpha, stream should auto-downgrade the transactional commit protocol to be compatible with older brokers. if (StreamsConfigUtils.processingMode(this) == StreamsConfigUtils.ProcessingMode.EXACTLY_ONCE_ALPHA) { props.put("internal.auto.downgrade.txn.commit", true); } props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, originals().get(BOOTSTRAP_SERVERS_CONFIG)); // add client id with stream client id prefix props.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); return props; }
@Test public void shouldSupportNonPrefixedProducerConfigs() { props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 10); props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> configs = streamsConfig.getProducerConfigs(clientId); assertEquals(10, configs.get(ProducerConfig.BUFFER_MEMORY_CONFIG)); assertEquals(1, configs.get(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG)); }
public static MepId valueOf(short id) { if (id < 1 || id > 8191) { throw new IllegalArgumentException( "Invalid value for Mep Id - must be between 1-8191 inclusive. " + "Rejecting " + id); } return new MepId(id); }
@Test public void testLowRange() { try { MepId.valueOf((short) -1); fail("Exception expected for MepId = -1"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Invalid value for Mep Id")); } try { MepId.valueOf((short) 0); fail("Exception expected for MepId = 0"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Invalid value for Mep Id")); } }
@Override public AwsProxyResponse handle(Throwable ex) { log.error("Called exception handler for:", ex); // adding a print stack trace in case we have no appender or we are running inside SAM local, where need the // output to go to the stderr. ex.printStackTrace(); if (ex instanceof InvalidRequestEventException || ex instanceof InternalServerErrorException) { return new AwsProxyResponse(500, HEADERS, getErrorJson(INTERNAL_SERVER_ERROR)); } else { return new AwsProxyResponse(502, HEADERS, getErrorJson(GATEWAY_TIMEOUT_ERROR)); } }
@Test void typedHandle_InvalidRequestEventException_jsonContentTypeHeader() { AwsProxyResponse resp = exceptionHandler.handle(new InvalidRequestEventException(INVALID_REQUEST_MESSAGE, null)); assertNotNull(resp); assertTrue(resp.getMultiValueHeaders().containsKey(HttpHeaders.CONTENT_TYPE)); assertEquals(MediaType.APPLICATION_JSON, resp.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE)); }