focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public AggregateAnalysisResult analyze( final ImmutableAnalysis analysis, final List<SelectExpression> finalProjection ) { if (!analysis.getGroupBy().isPresent()) { throw new IllegalArgumentException("Not an aggregate query"); } final AggAnalyzer aggAnalyzer = new AggAnalyzer(analysis, functionRegistry); aggAnalyzer.process(finalProjection); return aggAnalyzer.result(); }
@Test public void shouldCaptureHavingNonAggregateFunctionArgumentsAsRequired() { // Given: when(analysis.getHavingExpression()).thenReturn(Optional.of( new FunctionCall(FunctionName.of("MAX"), ImmutableList.of(COL2)) )); // When: final AggregateAnalysisResult result = analyzer .analyze(analysis, selects); // Then: assertThat(result.getRequiredColumns(), hasItem(COL2)); }
public static String generateKey() { return UUID.randomUUID().toString().replaceAll("-", "").toUpperCase(); }
@Test public void testGenerateKey() { assertNotNull(SignUtils.generateKey()); }
@Deprecated @Override public void init(final ProcessorContext context, final StateStore root) { this.context = context instanceof InternalProcessorContext ? (InternalProcessorContext<?, ?>) context : null; taskId = context.taskId(); initStoreSerde(context); streamsMetrics = (StreamsMetricsImpl) context.metrics(); registerMetrics(); final Sensor restoreSensor = StateStoreMetrics.restoreSensor(taskId.toString(), metricsScope, name(), streamsMetrics); // register and possibly restore the state from the logs maybeMeasureLatency(() -> super.init(context, root), time, restoreSensor); }
@SuppressWarnings("deprecation") @Test public void shouldDelegateDeprecatedInit() { final MeteredWindowStore<String, String> outer = new MeteredWindowStore<>( innerStoreMock, WINDOW_SIZE_MS, // any size STORE_TYPE, new MockTime(), Serdes.String(), new SerdeThatDoesntHandleNull() ); when(innerStoreMock.name()).thenReturn("store"); doNothing().when(innerStoreMock).init((ProcessorContext) context, outer); outer.init((ProcessorContext) context, outer); }
@Override public void process(Exchange exchange) throws Exception { final String msg = exchange.getIn().getBody(String.class); final String sendTo = exchange.getIn().getHeader(IrcConstants.IRC_SEND_TO, String.class); if (connection == null || !connection.isConnected()) { reconnect(); } if (connection == null || !connection.isConnected()) { throw new RuntimeCamelException("Lost connection" + (connection == null ? "" : " to " + connection.getHost())); } if (msg != null) { if (isMessageACommand(msg)) { LOG.debug("Sending command: {}", msg); connection.send(msg); } else if (sendTo != null) { LOG.debug("Sending to: {} message: {}", sendTo, msg); connection.doPrivmsg(sendTo, msg); } else { for (IrcChannel channel : getEndpoint().getConfiguration().getChannelList()) { LOG.debug("Sending to: {} message: {}", channel, msg); connection.doPrivmsg(channel.getName(), msg); } } } }
@Test public void processTest() throws Exception { when(connection.isConnected()).thenReturn(true); when(exchange.getIn()).thenReturn(message); when(message.getBody(String.class)).thenReturn("PART foo"); when(message.getHeader(IrcConstants.IRC_SEND_TO, String.class)).thenReturn("bottest"); producer.process(exchange); verify(connection).send("PART foo"); when(message.getBody(String.class)).thenReturn("foo"); producer.process(exchange); verify(connection).doPrivmsg("bottest", "foo"); when(message.getHeader(IrcConstants.IRC_SEND_TO, String.class)).thenReturn(null); producer.process(exchange); verify(connection).doPrivmsg("#chan1", "foo"); verify(connection).doPrivmsg("#chan2", "foo"); }
void sendInternalMetadataRequest(MetadataRequest.Builder builder, String nodeConnectionId, long now) { ClientRequest clientRequest = newClientRequest(nodeConnectionId, builder, now, true); doSend(clientRequest, true, now); }
@Test public void testUnsupportedVersionDuringInternalMetadataRequest() { List<String> topics = Collections.singletonList("topic_1"); // disabling auto topic creation for versions less than 4 is not supported MetadataRequest.Builder builder = new MetadataRequest.Builder(topics, false, (short) 3); client.sendInternalMetadataRequest(builder, node.idString(), time.milliseconds()); assertEquals(UnsupportedVersionException.class, metadataUpdater.getAndClearFailure().getClass()); }
@Override public PropertiesConfiguration getConfiguration(final LoggerContext loggerContext, final ConfigurationSource source) { final Properties properties = new Properties(); try (final InputStream configStream = source.getInputStream()) { properties.load(configStream); } catch (final IOException ioe) { throw new ConfigurationException("Unable to load " + source.toString(), ioe); } PropertiesConfiguration propertiesConfiguration = new PropertiesConfigurationBuilder() .setConfigurationSource(source) .setRootProperties(properties) .setLoggerContext(loggerContext) .build(); if (System.getProperty(PIPELINE_SEPARATE_LOGS, "false").equals("false")) { // force init to avoid overwrite of appenders section propertiesConfiguration.initialize(); propertiesConfiguration.removeAppender(PIPELINE_ROUTING_APPENDER_NAME); } return propertiesConfiguration; }
@Test public void testDisableAppenderPerPipelineIsCreatedAfterLogLine() { System.setProperty(LogstashConfigurationFactory.PIPELINE_SEPARATE_LOGS, Boolean.FALSE.toString()); forceLog4JContextRefresh(); Logger logger = LogManager.getLogger(LogstashConfigurationFactoryTest.class); ThreadContext.put("pipeline.id", "pipeline_1"); logger.info("log for pipeline 1"); ThreadContext.remove("pipeline_1"); ThreadContext.put("pipeline.id", "pipeline_2"); logger.info("log for pipeline 2"); LoggerContext context = LoggerContext.getContext(false); final Configuration config = context.getConfiguration(); RoutingAppender routingApp = config.getAppender(LogstashConfigurationFactory.PIPELINE_ROUTING_APPENDER_NAME); assertNull("No routing appender should be present", routingApp); }
@Override @NonNull public Flux<Object> decode(@NonNull Publisher<DataBuffer> input, @NonNull ResolvableType elementType, @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { ObjectMapper mapper = getObjectMapper(); Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize( Flux.from(input), mapper.getFactory(), mapper, true); ObjectReader reader = getObjectReader(elementType, hints); return tokens .as(LocaleUtils::transform) .handle((tokenBuffer, sink) -> { try { Object value = reader.readValue(tokenBuffer.asParser(getObjectMapper())); logValue(value, hints); if (value != null) { sink.next(value); } } catch (IOException ex) { sink.error(processException(ex)); } }); }
@Test @SneakyThrows public void testDecodeList() { ObjectMapper mapper = new ObjectMapper(); CustomJackson2JsonDecoder decoder = new CustomJackson2JsonDecoder(new MapperEntityFactory(), mapper); ResolvableType type = ResolvableType.forClassWithGenerics(List.class, MyEntity.class); DataBuffer buffer = new DefaultDataBufferFactory().wrap("[{\"id\":\"test\"}]".getBytes()); Object object = decoder.decode(buffer, type, MediaType.APPLICATION_JSON, Collections.emptyMap()); assertTrue(object instanceof List); assertTrue(((List<?>) object).size() > 0); assertTrue(((List<?>) object).get(0) instanceof MyEntity); assertEquals(((MyEntity) ((List<?>) object).get(0)).getId(), "test"); }
BrokerResponse createQueueResponse(String queueName) throws IOException { String queryUrl = createQueueEndpoint(messageVpn); ImmutableMap<String, Object> params = ImmutableMap.<String, Object>builder() .put("accessType", "non-exclusive") .put("queueName", queueName) .put("owner", username) .put("permission", "consume") .put("ingressEnabled", true) .put("egressEnabled", true) .build(); HttpResponse response = executePost(new GenericUrl(baseUrl + queryUrl), params); return BrokerResponse.fromHttpResponse(response); }
@Test public void testCreateQueueResponseEncoding() throws IOException { MockHttpTransport transport = new MockHttpTransport() { @Override public LowLevelHttpRequest buildRequest(String method, String url) { return new MockLowLevelHttpRequest() { @Override public LowLevelHttpResponse execute() throws IOException { MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); assertTrue(this.getContentAsString().contains("\"queueName\":\"queue/xxx/yyy\"")); assertTrue(url.contains("msgVpns/vpnName%232")); return response; } }; } }; HttpRequestFactory requestFactory = transport.createRequestFactory(); SempBasicAuthClientExecutor client = new SempBasicAuthClientExecutor( "http://host", "username", "password", "vpnName#2", requestFactory); client.createQueueResponse("queue/xxx/yyy"); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { ByteBuf newlyByteBuf = payload.getByteBuf().readBytes(readLengthFromMeta(columnDef.getColumnMeta(), payload)); try { return MySQLJsonValueDecoder.decode(newlyByteBuf); } finally { newlyByteBuf.release(); } }
@Test void assertReadJsonValueWithMeta1() { columnDef.setColumnMeta(1); when(byteBuf.readUnsignedByte()).thenReturn((short) 1); when(byteBuf.readBytes(1)).thenReturn(jsonValueByteBuf); assertThat(new MySQLJsonBinlogProtocolValue().read(columnDef, payload), is(EXPECTED_JSON)); }
public <T> SchemaProvider getSchemaProvider(TypeDescriptor<T> typeDescriptor) throws NoSuchSchemaException { for (SchemaProvider provider : providers) { Schema schema = provider.schemaFor(typeDescriptor); if (schema != null) { return provider; } } throw new NoSuchSchemaException(); }
@Test public void testGetSchemaProvider() throws NoSuchSchemaException { SchemaRegistry registry = SchemaRegistry.createDefault(); SchemaProvider testDefaultSchemaProvider = registry.getSchemaProvider(TestDefaultSchemaClass.class); assertEquals(DefaultSchemaProvider.class, testDefaultSchemaProvider.getClass()); assertEquals( TestDefaultSchemaProvider.class, ((DefaultSchemaProvider) testDefaultSchemaProvider) .getUnderlyingSchemaProvider(TestDefaultSchemaClass.class) .getClass()); SchemaProvider autoValueSchemaProvider = registry.getSchemaProvider(TestAutoValue.class); assertEquals(DefaultSchemaProvider.class, autoValueSchemaProvider.getClass()); assertEquals( AutoValueSchema.class, ((DefaultSchemaProvider) autoValueSchemaProvider) .getUnderlyingSchemaProvider(TestAutoValue.class) .getClass()); SchemaProvider simpleBeanSchemaProvider = registry.getSchemaProvider(SimpleBean.class); assertEquals(DefaultSchemaProvider.class, simpleBeanSchemaProvider.getClass()); assertEquals( JavaBeanSchema.class, ((DefaultSchemaProvider) simpleBeanSchemaProvider) .getUnderlyingSchemaProvider(SimpleBean.class) .getClass()); }
@Override @Nonnull public <T extends DataConnection> T getAndRetainDataConnection(String name, Class<T> clazz) { DataConnectionEntry dataConnection = dataConnections.computeIfPresent(name, (k, v) -> { if (!clazz.isInstance(v.instance)) { throw new HazelcastException("Data connection '" + name + "' must be an instance of " + clazz); } v.instance.retain(); return v; }); if (dataConnection == null) { throw new HazelcastException("Data connection '" + name + "' not found"); } //noinspection unchecked return (T) dataConnection.instance; }
@Test public void should_return_true_when_config_data_connection_exists() { DataConnection dataConnection = dataConnectionService.getAndRetainDataConnection(TEST_CONFIG, DataConnection.class); assertThat(dataConnection) .describedAs("DataConnection created via config should exist") .isNotNull(); }
@Override public long getRowCount(HoodieStorage storage, StoragePath filePath) { ParquetMetadata footer; long rowCount = 0; footer = readMetadata(storage, filePath); for (BlockMetaData b : footer.getBlocks()) { rowCount += b.getRowCount(); } return rowCount; }
@Test public void testReadCounts() throws Exception { String filePath = Paths.get(basePath, "test.parquet").toUri().toString(); List<String> rowKeys = new ArrayList<>(); for (int i = 0; i < 123; i++) { rowKeys.add(UUID.randomUUID().toString()); } writeParquetFile(BloomFilterTypeCode.SIMPLE.name(), filePath, rowKeys); assertEquals(123, parquetUtils.getRowCount( HoodieTestUtils.getStorage(filePath), new StoragePath(filePath))); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) { if (readerWay.hasTag("hazmat:water", "no")) { hazWaterEnc.setEnum(false, edgeId, edgeIntAccess, HazmatWater.NO); } else if (readerWay.hasTag("hazmat:water", "permissive")) { hazWaterEnc.setEnum(false, edgeId, edgeIntAccess, HazmatWater.PERMISSIVE); } }
@Test public void testNoNPE() { ReaderWay readerWay = new ReaderWay(1); EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(HazmatWater.YES, hazWaterEnc.getEnum(false, edgeId, edgeIntAccess)); }
static List<MappingField> resolveFields(Map<String, Object> json) { Map<String, MappingField> fields = new LinkedHashMap<>(); for (Entry<String, Object> entry : json.entrySet()) { String name = entry.getKey(); QueryDataType type = resolveType(entry.getValue()); MappingField field = new MappingField(name, type); fields.putIfAbsent(field.name(), field); } return new ArrayList<>(fields.values()); }
@Test public void test_resolveFields() { // given Map<String, Object> json = new LinkedHashMap<>() { { put("boolean", true); put("number", 1); put("string", "string"); put("object", emptyMap()); put("array", emptyList()); put("nullValue", null); put("null", null); } }; // when List<MappingField> fields = JsonResolver.resolveFields(json); // then assertThat(fields).hasSize(7); assertThat(fields.get(0)).isEqualTo(new MappingField("boolean", QueryDataType.BOOLEAN)); assertThat(fields.get(1)).isEqualTo(new MappingField("number", QueryDataType.DOUBLE)); assertThat(fields.get(2)).isEqualTo(new MappingField("string", QueryDataType.VARCHAR)); assertThat(fields.get(3)).isEqualTo(new MappingField("object", QueryDataType.OBJECT)); assertThat(fields.get(4)).isEqualTo(new MappingField("array", QueryDataType.OBJECT)); assertThat(fields.get(5)).isEqualTo(new MappingField("nullValue", QueryDataType.OBJECT)); assertThat(fields.get(6)).isEqualTo(new MappingField("null", QueryDataType.OBJECT)); }
@Override public Token login(LoginRequest loginRequest) { final UserEntity userEntityFromDB = userRepository .findUserEntityByEmail(loginRequest.getEmail()) .orElseThrow( () -> new UserNotFoundException("Can't find with given email: " + loginRequest.getEmail()) ); if (Boolean.FALSE.equals(passwordEncoder.matches( loginRequest.getPassword(), userEntityFromDB.getPassword()))) { throw new PasswordNotValidException(); } return tokenService.generateToken(userEntityFromDB.getClaims()); }
@Test void login_InvalidEmail_ThrowsAdminNotFoundException() { // Given LoginRequest loginRequest = LoginRequest.builder() .email("nonexistent@example.com") .password("password123") .build(); // When when(userRepository.findUserEntityByEmail(loginRequest.getEmail())) .thenReturn(Optional.empty()); // Then UserNotFoundException exception = assertThrows(UserNotFoundException.class, () -> userLoginService.login(loginRequest)); assertEquals("User not found!\n Can't find with given email: " + loginRequest.getEmail(), exception.getMessage()); // Verify verify(userRepository).findUserEntityByEmail(loginRequest.getEmail()); verifyNoInteractions(passwordEncoder, tokenService); }
@GetMapping public String getHealth() { // TODO UP DOWN WARN StringBuilder sb = new StringBuilder(); String dbStatus = dataSourceService.getHealth(); boolean addressServerHealthy = isAddressServerHealthy(); if (dbStatus.contains(HEALTH_UP) && addressServerHealthy && ServerMemberManager.isInIpList()) { sb.append(HEALTH_UP); } else if (dbStatus.contains(HEALTH_WARN) && addressServerHealthy && ServerMemberManager.isInIpList()) { sb.append("WARN:"); sb.append("slave db (").append(dbStatus.split(":")[1]).append(") down. "); } else { sb.append("DOWN:"); if (dbStatus.contains(HEALTH_DOWN)) { sb.append("master db (").append(dbStatus.split(":")[1]).append(") down. "); } if (!addressServerHealthy) { sb.append("address server down. "); } if (!ServerMemberManager.isInIpList()) { sb.append("server ip ").append(InetUtils.getSelfIP()) .append(" is not in the serverList of address server. "); } } return sb.toString(); }
@Test void testGetHealth() throws Exception { when(dataSourceService.getHealth()).thenReturn("UP"); MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.HEALTH_CONTROLLER_PATH); String actualValue = mockmvc.perform(builder).andReturn().getResponse().getContentAsString(); assertEquals("UP", actualValue); }
public double asMHz() { return (double) frequency / MHZ; }
@Test public void testasMHz() { Frequency frequency = Frequency.ofGHz(1); assertThat(frequency.asMHz(), is(1000.0)); }
public void setGroupId(String groupId) { this.groupId = groupId; }
@Test public void testSetGroupId() { String groupId = "aaa"; String expected = "aaa"; Model instance = new Model(); instance.setGroupId(groupId); assertEquals(expected, instance.getGroupId()); }
void move() { //moves by 1 unit in either direction this.coordinateX += RANDOM.nextInt(3) - 1; this.coordinateY += RANDOM.nextInt(3) - 1; }
@Test void moveTest() { var b = new Bubble(10, 10, 1, 2); var initialX = b.coordinateX; var initialY = b.coordinateY; b.move(); //change in x and y < |2| assertTrue(b.coordinateX - initialX < 2 && b.coordinateX - initialX > -2); assertTrue(b.coordinateY - initialY < 2 && b.coordinateY - initialY > -2); }
public static void validateJarOnClient(SubmitJobParameters parameterObject) throws IOException { if (parameterObject.isJarOnMember()) { throw new JetException("SubmitJobParameters is configured for jar on member"); } Path jarPath = parameterObject.getJarPath(); validateJarPathNotNull(jarPath); validateFileSizeIsNotZero(jarPath); validateFileExtension(jarPath); validateJobParameters(parameterObject.getJobParameters()); }
@Test public void failJarOnMemberConfiguration() { SubmitJobParameters parameterObject = SubmitJobParameters.withJarOnMember(); assertThatThrownBy(() -> SubmitJobParametersValidator.validateJarOnClient(parameterObject)) .isInstanceOf(JetException.class) .hasMessageContaining("SubmitJobParameters is configured for jar on member"); }
public int maxAllowedPlanningFailures() { return maxAllowedPlanningFailures; }
@Test void testMaxAllowedPlanningFailures() { ScanContext context = ScanContext.builder().maxAllowedPlanningFailures(-2).build(); assertException( context, "Cannot set maxAllowedPlanningFailures to a negative number other than -1."); }
public static ObjectInputDecoder createDecoder(Type type, TypeManager typeManager) { String base = type.getTypeSignature().getBase(); switch (base) { case UnknownType.NAME: return o -> o; case BIGINT: return o -> (Long) o; case INTEGER: return o -> ((Long) o).intValue(); case SMALLINT: return o -> ((Long) o).shortValue(); case TINYINT: return o -> ((Long) o).byteValue(); case BOOLEAN: return o -> (Boolean) o; case DATE: return DateTimeUtils::createDate; case DECIMAL: if (Decimals.isShortDecimal(type)) { final int scale = ((DecimalType) type).getScale(); return o -> HiveDecimal.create(BigInteger.valueOf((long) o), scale); } else if (Decimals.isLongDecimal(type)) { final int scale = ((DecimalType) type).getScale(); return o -> HiveDecimal.create(Decimals.decodeUnscaledValue((Slice) o), scale); } break; case REAL: return o -> intBitsToFloat(((Number) o).intValue()); case DOUBLE: return o -> ((Double) o); case TIMESTAMP: return o -> new Timestamp(((long) o)); case VARBINARY: return o -> ((Slice) o).getBytes(); case VARCHAR: return o -> ((Slice) o).toStringUtf8(); case CHAR: return o -> ((Slice) o).toStringUtf8(); case ROW: return RowObjectInputDecoder.create(((RowType) type), typeManager); case ARRAY: return ArrayObjectInputDecoder.create(((ArrayType) type), typeManager); case MAP: return MapObjectInputDecoder.create(((MapType) type), typeManager); } throw unsupportedType(type); }
@Test public void testPrimitiveObjectDecoders() { ObjectInputDecoder decoder; decoder = createDecoder(BIGINT, typeManager); assertTrue(decoder.decode(123456L) instanceof Long); decoder = createDecoder(INTEGER, typeManager); assertTrue(decoder.decode(12345L) instanceof Integer); decoder = createDecoder(SMALLINT, typeManager); assertTrue(decoder.decode(1234L) instanceof Short); decoder = createDecoder(TINYINT, typeManager); assertTrue(decoder.decode(123L) instanceof Byte); decoder = createDecoder(BOOLEAN, typeManager); assertTrue(decoder.decode(true) instanceof Boolean); decoder = createDecoder(REAL, typeManager); assertTrue(decoder.decode(((float) 0.2)) instanceof Float); decoder = createDecoder(DOUBLE, typeManager); assertTrue(decoder.decode(0.1) instanceof Double); }
static String buildServiceName(AbstractInterfaceConfig config, String protocol) { if (RpcConstants.PROTOCOL_TYPE_BOLT.equals(protocol) || RpcConstants.PROTOCOL_TYPE_TR.equals(protocol)) { return ConfigUniqueNameGenerator.getServiceName(config) + ":DEFAULT"; } else { return ConfigUniqueNameGenerator.getServiceName(config) + ":" + protocol; } }
@Test public void buildServiceName() { ServerConfig serverConfig = new ServerConfig() .setProtocol("bolt") .setHost("0.0.0.0") .setPort(12200); ProviderConfig<?> provider = new ProviderConfig(); provider.setInterfaceId("com.alipay.xxx.TestService") .setApplication(new ApplicationConfig().setAppName("test-server")) .setUniqueId("nacos-test") .setProxy("javassist") .setRegister(true) .setSerialization("hessian2") .setServer(serverConfig) .setWeight(222) .setTimeout(3000); String serviceName = NacosRegistryHelper.buildServiceName(provider, RpcConstants.PROTOCOL_TYPE_BOLT); assertEquals(serviceName, "com.alipay.xxx.TestService:nacos-test:DEFAULT"); serviceName = NacosRegistryHelper.buildServiceName(provider, RpcConstants.PROTOCOL_TYPE_TR); assertEquals(serviceName, "com.alipay.xxx.TestService:nacos-test:DEFAULT"); serviceName = NacosRegistryHelper.buildServiceName(provider, RpcConstants.PROTOCOL_TYPE_TRIPLE); assertEquals(serviceName, "com.alipay.xxx.TestService:nacos-test:" + RpcConstants.PROTOCOL_TYPE_TRIPLE); serviceName = NacosRegistryHelper.buildServiceName(provider, RpcConstants.PROTOCOL_TYPE_REST); assertEquals(serviceName, "com.alipay.xxx.TestService:nacos-test:" + RpcConstants.PROTOCOL_TYPE_REST); }
@Override public void handle(SeckillWebMockRequestDTO request) { Seckill entity = new Seckill(); entity.setSeckillId(request.getSeckillId()); entity.setNumber(request.getSeckillCount()); seckillMapper.updateById(entity); // 清理已成功秒杀记录 SuccessKilled example = new SuccessKilled(); example.setSeckillId(request.getSeckillId()); successKilledMapper.delete(new QueryWrapper<>(example)); }
@Test public void shouldUpdateSeckillAndDeleteSuccessKilled() { SeckillWebMockRequestDTO request = new SeckillWebMockRequestDTO(); request.setSeckillId(123L); request.setSeckillCount(10); databasePreRequestHandler.handle(request); verify(seckillMapper, times(1)).updateById(any(Seckill.class)); verify(successKilledMapper, times(1)).delete(any()); }
@Override public ConnectorPageSource createPageSource( ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorSplit split, ConnectorTableLayoutHandle layout, List<ColumnHandle> columns, SplitContext splitContext, RuntimeStats runtimeStats) { HiveTableLayoutHandle hiveLayout = (HiveTableLayoutHandle) layout; List<HiveColumnHandle> selectedColumns = columns.stream() .map(HiveColumnHandle.class::cast) .collect(toList()); HiveSplit hiveSplit = (HiveSplit) split; Path path = new Path(hiveSplit.getFileSplit().getPath()); Configuration configuration = hdfsEnvironment.getConfiguration( new HdfsContext( session, hiveSplit.getDatabase(), hiveSplit.getTable(), hiveLayout.getTablePath(), false), path); Optional<EncryptionInformation> encryptionInformation = hiveSplit.getEncryptionInformation(); CacheQuota cacheQuota = generateCacheQuota(hiveSplit); HiveFileContext fileContext = new HiveFileContext( splitContext.isCacheable(), cacheQuota, hiveSplit.getFileSplit().getExtraFileInfo().map(BinaryExtraHiveFileInfo::new), OptionalLong.of(hiveSplit.getFileSplit().getFileSize()), OptionalLong.of(hiveSplit.getFileSplit().getStart()), OptionalLong.of(hiveSplit.getFileSplit().getLength()), hiveSplit.getFileSplit().getFileModifiedTime(), HiveSessionProperties.isVerboseRuntimeStatsEnabled(session), runtimeStats); if (columns.stream().anyMatch(columnHandle -> ((HiveColumnHandle) columnHandle).getColumnType().equals(AGGREGATED))) { checkArgument(columns.stream().allMatch(columnHandle -> ((HiveColumnHandle) columnHandle).getColumnType().equals(AGGREGATED)), "Not all columns are of 'AGGREGATED' type"); if (hiveLayout.isFooterStatsUnreliable()) { throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, format("Partial aggregation pushdown is not supported when footer stats are unreliable. " + "Table %s has file %s with unreliable footer stats. " + "Set session property [catalog-name].pushdown_partial_aggregations_into_scan=false and execute query again.", hiveLayout.getSchemaTableName(), hiveSplit.getFileSplit().getPath())); } return createAggregatedPageSource(aggregatedPageSourceFactories, configuration, session, hiveSplit, hiveLayout, selectedColumns, fileContext, encryptionInformation); } if (hiveLayout.isPushdownFilterEnabled()) { Optional<ConnectorPageSource> selectivePageSource = createSelectivePageSource( selectivePageSourceFactories, configuration, session, hiveSplit, hiveLayout, selectedColumns, hiveStorageTimeZone, typeManager, optimizedRowExpressionCache, splitContext, fileContext, encryptionInformation); if (selectivePageSource.isPresent()) { return selectivePageSource.get(); } } TupleDomain<HiveColumnHandle> effectivePredicate = hiveLayout.getDomainPredicate() .transform(Subfield::getRootName) .transform(hiveLayout.getPredicateColumns()::get); if (shouldSkipBucket(hiveLayout, hiveSplit, splitContext, isLegacyTimestampBucketing(session))) { return new HiveEmptySplitPageSource(); } if (shouldSkipPartition(typeManager, hiveLayout, hiveStorageTimeZone, hiveSplit, splitContext)) { return new HiveEmptySplitPageSource(); } Optional<ConnectorPageSource> pageSource = createHivePageSource( cursorProviders, pageSourceFactories, configuration, session, hiveSplit.getFileSplit(), hiveSplit.getTableBucketNumber(), hiveSplit.getStorage(), splitContext.getDynamicFilterPredicate().map(filter -> filter.transform(handle -> (HiveColumnHandle) handle).intersect(effectivePredicate)).orElse(effectivePredicate), selectedColumns, hiveLayout.getPredicateColumns(), hiveSplit.getPartitionKeys(), hiveStorageTimeZone, typeManager, hiveLayout.getSchemaTableName(), hiveLayout.getPartitionColumns().stream().map(HiveColumnHandle.class::cast).collect(toList()), hiveLayout.getDataColumns(), hiveLayout.getTableParameters(), hiveSplit.getPartitionDataColumnCount(), hiveSplit.getTableToPartitionMapping(), hiveSplit.getBucketConversion(), hiveSplit.isS3SelectPushdownEnabled(), fileContext, hiveLayout.getRemainingPredicate(), hiveLayout.isPushdownFilterEnabled(), rowExpressionService, encryptionInformation, hiveSplit.getRowIdPartitionComponent()); if (pageSource.isPresent()) { return pageSource.get(); } throw new IllegalStateException("Could not find a file reader for split " + hiveSplit); }
@Test public void testCreatePageSource_withRowID() { HivePageSourceProvider pageSourceProvider = createPageSourceProvider(); HiveSplit hiveSplit = makeHiveSplit(ORC, Optional.of(new byte[20])); try (HivePageSource pageSource = (HivePageSource) pageSourceProvider.createPageSource( new HiveTransactionHandle(), SESSION, hiveSplit, getHiveTableLayout(false, true, false), ImmutableList.of(LONG_COLUMN, HiveColumnHandle.rowIdColumnHandle()), new SplitContext(false), new RuntimeStats())) { assertEquals(0, pageSource.getCompletedBytes()); } }
public AstNode rewrite(final AstNode node, final C context) { return rewriter.process(node, context); }
@Test public void shouldRewriteCSAS() { final CreateStreamAsSelect csas = new CreateStreamAsSelect( location, sourceName, query, false, false, csasProperties ); when(mockRewriter.apply(query, context)).thenReturn(rewrittenQuery); final AstNode rewritten = rewriter.rewrite(csas, context); assertThat( rewritten, equalTo( new CreateStreamAsSelect( location, sourceName, rewrittenQuery, false, false, csasProperties ) ) ); }
public StepExpression createExpression(StepDefinition stepDefinition) { List<ParameterInfo> parameterInfos = stepDefinition.parameterInfos(); if (parameterInfos.isEmpty()) { return createExpression( stepDefinition.getPattern(), stepDefinitionDoesNotTakeAnyParameter(stepDefinition), false); } ParameterInfo parameterInfo = parameterInfos.get(parameterInfos.size() - 1); return createExpression( stepDefinition.getPattern(), parameterInfo.getTypeResolver()::resolve, parameterInfo.isTransposed()); }
@SuppressWarnings("unchecked") @Test void empty_table_cells_are_presented_as_null_to_transformer() { registry.setDefaultDataTableEntryTransformer( (map, valueType, tableCellByTypeTransformer) -> objectMapper.convertValue(map, objectMapper.constructType(valueType))); StepDefinition stepDefinition = new StubStepDefinition("Given some stuff:", getTypeFromStepDefinition()); StepExpression expression = stepExpressionFactory.createExpression(stepDefinition); List<List<String>> table = asList(asList("name", "amount", "unit"), asList("chocolate", null, "tbsp")); List<Argument> match = expression.match("Given some stuff:", table); List<Ingredient> ingredients = (List<Ingredient>) match.get(0).getValue(); Ingredient ingredient = ingredients.get(0); assertThat(ingredient.name, is(equalTo("chocolate"))); }
public void createAcl(String addr, AclInfo aclInfo, long millis) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, InterruptedException, MQBrokerException { CreateAclRequestHeader requestHeader = new CreateAclRequestHeader(aclInfo.getSubject()); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.AUTH_CREATE_ACL, requestHeader); request.setBody(RemotingSerializable.encode(aclInfo)); RemotingCommand response = this.remotingClient.invokeSync(addr, request, millis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { return; } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark()); }
@Test public void testCreateAcl() throws RemotingException, InterruptedException, MQBrokerException { mockInvokeSync(); mqClientAPI.createAcl(defaultBrokerAddr, new AclInfo(), defaultTimeout); }
public static String decode(String str, String encode) throws UnsupportedEncodingException { return innerDecode(null, str, encode); }
@Test void testDecode() throws UnsupportedEncodingException { // % - %25, { - %7B, } - %7D assertEquals("{k,v}", HttpUtils.decode("%7Bk,v%7D", "UTF-8")); assertEquals("{k,v}", HttpUtils.decode("%257Bk,v%257D", "UTF-8")); }
@Override public void execute(ComputationStep.Context context) { try (DbSession dbSession = dbClient.openSession(false)) { branchPersister.persist(dbSession); projectPersister.persist(dbSession); String projectUuid = treeRootHolder.getRoot().getUuid(); // safeguard, reset all rows to b-changed=false dbClient.componentDao().resetBChangedForBranchUuid(dbSession, projectUuid); Map<String, ComponentDto> existingDtosByUuids = indexExistingDtosByUuids(dbSession); boolean isRootPrivate = isRootPrivate(treeRootHolder.getRoot(), existingDtosByUuids); // Insert or update the components in database. They are removed from existingDtosByUuids // at the same time. new PathAwareCrawler<>(new PersistComponentStepsVisitor(existingDtosByUuids, dbSession)) .visit(treeRootHolder.getRoot()); disableRemainingComponents(dbSession, existingDtosByUuids.values()); dbClient.componentDao().setPrivateForBranchUuidWithoutAudit(dbSession, projectUuid, isRootPrivate); dbSession.commit(); } }
@Test public void should_fail_if_project_is_not_stored_in_database_yet() { TreeRootHolder treeRootHolder = mock(TreeRootHolder.class); Component component = mock(Component.class); DbClient dbClient = mock(DbClient.class); ComponentDao componentDao = mock(ComponentDao.class); String projectKey = randomAlphabetic(20); doReturn(component).when(treeRootHolder).getRoot(); doReturn(projectKey).when(component).getKey(); doReturn(componentDao).when(dbClient).componentDao(); doReturn(emptyList()).when(componentDao).selectByBranchUuid(eq(projectKey), any(DbSession.class)); assertThatThrownBy(() -> new PersistComponentsStep( dbClient, treeRootHolder, System2.INSTANCE, mock(MutableDisabledComponentsHolder.class), mock(BranchPersister.class), mock(ProjectPersister.class)).execute(new TestComputationStepContext())) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("The project '" + projectKey + "' is not stored in the database, during a project analysis"); }
@Override public void removeProcessor(PacketProcessor processor) { // Remove the processor entry. for (int i = 0; i < processors.size(); i++) { if (processors.get(i).processor() == processor) { processors.remove(i); break; } } }
@Test public void removeProcessorTest() { PacketProcessor testProcessor = new TestProcessor(); packetManager1.addProcessor(testProcessor, PROCESSOR_PRIORITY); assertEquals("1 processor expected", 1, packetManager1.getProcessors().size()); assertEquals("0 processor expected", 0, packetManager2.getProcessors().size()); packetManager1.removeProcessor(testProcessor); assertEquals("0 processor expected", 0, packetManager1.getProcessors().size()); assertEquals("0 processor expected", 0, packetManager2.getProcessors().size()); }
public int getDumpSchedulerLogsFailedRetrieved() { return numDumpSchedulerLogsFailedRetrieved.value(); }
@Test public void testDumpSchedulerLogsRetrievedFailed() { long totalBadBefore = metrics.getDumpSchedulerLogsFailedRetrieved(); badSubCluster.getDumpSchedulerLogsFailed(); Assert.assertEquals(totalBadBefore + 1, metrics.getDumpSchedulerLogsFailedRetrieved()); }
public static String[] split(String str) { return split(str, ESCAPE_CHAR, COMMA); }
@Test (timeout = 30000) public void testSimpleSplit() throws Exception { final String[] TO_TEST = { "a/b/c", "a/b/c////", "///a/b/c", "", "/", "////"}; for (String testSubject : TO_TEST) { assertArrayEquals("Testing '" + testSubject + "'", testSubject.split("/"), StringUtils.split(testSubject, '/')); } }
@Override public Long createProject(GoViewProjectCreateReqVO createReqVO) { // 插入 GoViewProjectDO goViewProject = GoViewProjectConvert.INSTANCE.convert(createReqVO) .setStatus(CommonStatusEnum.DISABLE.getStatus()); goViewProjectMapper.insert(goViewProject); // 返回 return goViewProject.getId(); }
@Test public void testCreateProject_success() { // 准备参数 GoViewProjectCreateReqVO reqVO = randomPojo(GoViewProjectCreateReqVO.class); // 调用 Long goViewProjectId = goViewProjectService.createProject(reqVO); // 断言 assertNotNull(goViewProjectId); // 校验记录的属性是否正确 GoViewProjectDO goViewProject = goViewProjectMapper.selectById(goViewProjectId); assertPojoEquals(reqVO, goViewProject); }
@Override public Path copy(final Path file, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", target, file)); } new DropboxDeleteFeature(session).delete(Collections.singletonMap(target, status), callback, new Delete.DisabledCallback()); } // If the source path is a folder all its contents will be copied. final RelocationResult result = new DbxUserFilesRequests(session.getClient(file)).copyV2(containerService.getKey(file), containerService.getKey(target)); listener.sent(status.getLength()); return target.withAttributes(new DropboxAttributesFinderFeature(session).toAttributes(result.getMetadata())); } catch(DbxException e) { throw new DropboxExceptionMappingService().map("Cannot copy {0}", e, file); } }
@Test public void testCopyNotFound() throws Exception { final DropboxCopyFeature feature = new DropboxCopyFeature(session); final Path home = new DefaultHomeFinderService(session).find(); final Path test = new Path(home, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); assertThrows(NotfoundException.class, () -> feature.copy(test, new Path(home, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)), new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener())); }
public static ThreadPoolExecutor newFixedThreadPool(int corePoolSize) { return new ThreadPoolExecutor(corePoolSize, corePoolSize, 0, TimeUnit.MILLISECONDS, new SynchronousQueue<Runnable>()); }
@Test public void newFixedThreadPool3() throws Exception { BlockingQueue<Runnable> queue = new SynchronousQueue<Runnable>(); RejectedExecutionHandler handler = new RejectedExecutionHandler() { @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { } }; ThreadFactory factory = new NamedThreadFactory("xxx"); ThreadPoolExecutor executor = ThreadPoolUtils.newFixedThreadPool(10, queue, factory, handler); Assert.assertEquals(executor.getCorePoolSize(), 10); Assert.assertEquals(executor.getMaximumPoolSize(), 10); Assert.assertEquals(executor.getQueue(), queue); Assert.assertEquals(executor.getThreadFactory(), factory); Assert.assertEquals(executor.getRejectedExecutionHandler(), handler); }
public static ResourceId matchNewDirectory(String singleResourceSpec, String... baseNames) { ResourceId currentDir = matchNewResource(singleResourceSpec, true); for (String dir : baseNames) { currentDir = currentDir.resolve(dir, StandardResolveOptions.RESOLVE_DIRECTORY); } return currentDir; }
@Test public void testMatchNewDirectory() { List<KV<String, KV<String, String[]>>> testCases = ImmutableList.<KV<String, KV<String, String[]>>>builder() .add(KV.of("/abc/d/", KV.of("/abc", new String[] {"d"}))) .add(KV.of("/abc/d/", KV.of("/abc/", new String[] {"d"}))) .add(KV.of("/abc/d/", KV.of("/abc", new String[] {"d/"}))) .add(KV.of("/abc/d/e/f/", KV.of("/abc", new String[] {"d", "e", "f"}))) .add(KV.of("/abc/", KV.of("/abc", new String[] {}))) .build(); for (KV<String, KV<String, String[]>> testCase : testCases) { ResourceId expected = FileSystems.matchNewResource(testCase.getKey(), true); ResourceId actual = FileSystems.matchNewDirectory( testCase.getValue().getKey(), testCase.getValue().getValue()); assertEquals(expected, actual); } }
@Nullable public static URI uriWithTrailingSlash(@Nullable final URI uri) { if (uri == null) { return null; } final String path = firstNonNull(uri.getPath(), "/"); if (path.endsWith("/")) { return uri; } else { try { return new URI( uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), path + "/", uri.getQuery(), uri.getFragment()); } catch (URISyntaxException e) { throw new RuntimeException("Could not parse URI.", e); } } }
@Test public void uriWithTrailingSlashReturnsNullIfURIIsNull() { assertNull(Tools.uriWithTrailingSlash(null)); }
@Bean public ShenyuPlugin tarsPlugin() { return new TarsPlugin(); }
@Test public void testTarsPlugin() { applicationContextRunner.run(context -> { ShenyuPlugin plugin = context.getBean("tarsPlugin", ShenyuPlugin.class); assertNotNull(plugin); assertThat(plugin.named()).isEqualTo(PluginEnum.TARS.getName()); } ); }
@Operation(summary = "queryAllClusterList", description = "QUERY_ALL_CLUSTER_LIST_NOTES") @GetMapping(value = "/query-cluster-list") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_CLUSTER_ERROR) public Result<List<ClusterDto>> queryAllClusterList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { List<ClusterDto> clusterDtos = clusterService.queryAllClusterList(); return Result.success(clusterDtos); }
@Test public void testQueryAllClusterList() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MvcResult mvcResult = mockMvc.perform(get("/cluster/query-cluster-list") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); logger.info(result.toString()); Assertions.assertTrue(result != null && result.isSuccess()); logger.info("query all cluster return result:{}", mvcResult.getResponse().getContentAsString()); }
@Override public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { Class<?> loadedClass = findLoadedClass(name); if (loadedClass != null) { return loadedClass; } if (isClosed) { throw new ClassNotFoundException("This ClassLoader is closed"); } if (config.shouldAcquire(name)) { loadedClass = PerfStatsCollector.getInstance() .measure("load sandboxed class", () -> maybeInstrumentClass(name)); } else { loadedClass = getParent().loadClass(name); } if (resolve) { resolveClass(loadedClass); } return loadedClass; } }
@Test public void shouldPerformClassLoadAndInstrumentLoadForInstrumentedClasses() throws Exception { ClassLoader classLoader = new SandboxClassLoader(configureBuilder().build()); Class<?> exampleClass = classLoader.loadClass(AnExampleClass.class.getName()); assertSame(classLoader, exampleClass.getClassLoader()); Field roboDataField = exampleClass.getField(ShadowConstants.CLASS_HANDLER_DATA_FIELD_NAME); assertNotNull(roboDataField); assertThat(Modifier.isPublic(roboDataField.getModifiers())).isTrue(); assertThat(Modifier.isTransient(roboDataField.getModifiers())).isTrue(); // Java 9 doesn't allow updates to final fields from outside <init> or <clinit>: // https://bugs.openjdk.java.net/browse/JDK-8157181 // Therefore, these fields need to be nonfinal / be made nonfinal. assertThat(Modifier.isFinal(roboDataField.getModifiers())).isFalse(); assertThat(Modifier.isFinal(exampleClass.getField("STATIC_FINAL_FIELD").getModifiers())) .isFalse(); assertThat(Modifier.isFinal(exampleClass.getField("nonstaticFinalField").getModifiers())) .isFalse(); }
public static <T> Class<T> getClass(T t) { @SuppressWarnings("unchecked") Class<T> clazz = (Class<T>)t.getClass(); return clazz; }
@Test public void testGetClass() { //test with Integer Integer x = new Integer(42); Class<Integer> c = GenericsUtil.getClass(x); assertEquals("Correct generic type is acquired from object", Integer.class, c); //test with GenericClass<Integer> GenericClass<Integer> testSubject = new GenericClass<Integer>(); Class<GenericClass<Integer>> c2 = GenericsUtil.getClass(testSubject); assertEquals("Inner generics are acquired from object.", GenericClass.class, c2); }
@Override public Object getObject(final int columnIndex) throws SQLException { return mergeResultSet.getValue(columnIndex, Object.class); }
@Test void assertGetObjectWithColumnIndex() throws SQLException { when(mergeResultSet.getValue(1, Object.class)).thenReturn("object_value"); assertThat(shardingSphereResultSet.getObject(1), is("object_value")); }
public List<RoleConfig> getRoleConfigs() { return filterRolesBy(RoleConfig.class); }
@Test public void getRoleConfigsShouldReturnOnlyNonPluginRoles() { Role admin = new RoleConfig(new CaseInsensitiveString("admin")); Role view = new RoleConfig(new CaseInsensitiveString("view")); Role blackbird = new PluginRoleConfig("blackbird", "foo"); Role spacetiger = new PluginRoleConfig("spacetiger", "foo"); RolesConfig rolesConfig = new RolesConfig(admin, blackbird, view, spacetiger); List<RoleConfig> roles = rolesConfig.getRoleConfigs(); assertThat(roles, hasSize(2)); assertThat(roles, contains(admin, view)); }
public MapBuilder getMap(String name) { MapBuilder a = mapBuilderMap.get(name); if (a == null) { validateMap(name); a = new MapBuilder(configDefinition, name); mapBuilderMap.put(name, a); } return a; }
@Test public void require_that_maps_support_simple_values() { ConfigPayloadBuilder builder = new ConfigPayloadBuilder(); ConfigPayloadBuilder.MapBuilder map = builder.getMap("foo"); map.put("fookey", "foovalue"); map.put("barkey", "barvalue"); map.put("bazkey", "bazvalue"); map.put("fookey", "lolvalue"); assertEquals(3, map.getElements().size()); Cursor root = createSlime(builder); Cursor a = root.field("foo"); assertEquals("barvalue", a.field("barkey").asString()); assertEquals("bazvalue", a.field("bazkey").asString()); assertEquals("lolvalue", a.field("fookey").asString()); }
private synchronized RemotingCommand updateColdDataFlowCtrGroupConfig(ChannelHandlerContext ctx, RemotingCommand request) { final RemotingCommand response = RemotingCommand.createResponseCommand(null); LOGGER.info("updateColdDataFlowCtrGroupConfig called by {}", RemotingHelper.parseChannelRemoteAddr(ctx.channel())); byte[] body = request.getBody(); if (body != null) { try { String bodyStr = new String(body, MixAll.DEFAULT_CHARSET); Properties properties = MixAll.string2Properties(bodyStr); if (properties != null) { LOGGER.info("updateColdDataFlowCtrGroupConfig new config: {}, client: {}", properties, ctx.channel().remoteAddress()); properties.forEach((key, value) -> { try { String consumerGroup = String.valueOf(key); Long threshold = Long.valueOf(String.valueOf(value)); this.brokerController.getColdDataCgCtrService() .addOrUpdateGroupConfig(consumerGroup, threshold); } catch (Exception e) { LOGGER.error("updateColdDataFlowCtrGroupConfig properties on entry error, key: {}, val: {}", key, value, e); } }); } else { LOGGER.error("updateColdDataFlowCtrGroupConfig string2Properties error"); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("string2Properties error"); return response; } } catch (UnsupportedEncodingException e) { LOGGER.error("updateColdDataFlowCtrGroupConfig UnsupportedEncodingException", e); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("UnsupportedEncodingException " + e); return response; } } response.setCode(ResponseCode.SUCCESS); response.setRemark(null); return response; }
@Test public void testUpdateColdDataFlowCtrGroupConfig() throws RemotingCommandException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UPDATE_COLD_DATA_FLOW_CTR_CONFIG, null); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); request.setBody("consumerGroup1=1".getBytes()); response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); request.setBody("".getBytes()); response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
public CompletableFuture<ChangeInvisibleDurationResponse> changeInvisibleDuration(ProxyContext ctx, ChangeInvisibleDurationRequest request) { CompletableFuture<ChangeInvisibleDurationResponse> future = new CompletableFuture<>(); try { validateTopicAndConsumerGroup(request.getTopic(), request.getGroup()); validateInvisibleTime(Durations.toMillis(request.getInvisibleDuration())); ReceiptHandle receiptHandle = ReceiptHandle.decode(request.getReceiptHandle()); String group = request.getGroup().getName(); MessageReceiptHandle messageReceiptHandle = messagingProcessor.removeReceiptHandle(ctx, grpcChannelManager.getChannel(ctx.getClientID()), group, request.getMessageId(), receiptHandle.getReceiptHandle()); if (messageReceiptHandle != null) { receiptHandle = ReceiptHandle.decode(messageReceiptHandle.getReceiptHandleStr()); } return this.messagingProcessor.changeInvisibleTime( ctx, receiptHandle, request.getMessageId(), group, request.getTopic().getName(), Durations.toMillis(request.getInvisibleDuration()) ).thenApply(ackResult -> convertToChangeInvisibleDurationResponse(ctx, request, ackResult)); } catch (Throwable t) { future.completeExceptionally(t); } return future; }
@Test public void testChangeInvisibleDurationInvisibleTimeTooLarge() throws Throwable { try { this.changeInvisibleDurationActivity.changeInvisibleDuration( createContext(), ChangeInvisibleDurationRequest.newBuilder() .setInvisibleDuration(Durations.fromDays(7)) .setTopic(Resource.newBuilder().setName(TOPIC).build()) .setGroup(Resource.newBuilder().setName(CONSUMER_GROUP).build()) .setMessageId("msgId") .setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000)) .build() ).get(); } catch (ExecutionException executionException) { GrpcProxyException exception = (GrpcProxyException) executionException.getCause(); assertEquals(Code.ILLEGAL_INVISIBLE_TIME, exception.getCode()); } }
@Override public SortedSet<String> languages() { return globalLanguagesCache; }
@Test public void should_add_languages_per_module_and_globally() { InputComponentStoreTester tester = new InputComponentStoreTester(sonarRuntime); String mod1Key = "mod1"; tester.addFile(mod1Key, "src/main/java/Foo.java", "java"); String mod2Key = "mod2"; tester.addFile(mod2Key, "src/main/groovy/Foo.groovy", "groovy"); assertThat(tester.languages(mod1Key)).containsExactly("java"); assertThat(tester.languages(mod2Key)).containsExactly("groovy"); assertThat(tester.languages()).containsExactlyInAnyOrder("java", "groovy"); }
@Override public Map<QueryId, PersistentQueryMetadata> getPersistentQueries() { return Collections.unmodifiableMap(persistentQueries); }
@Test public void shouldGetPersistentQueries() { // Given: final PersistentQueryMetadata q1 = givenCreate(registry, "q1", "source", Optional.of("sink1"), CREATE_AS); final PersistentQueryMetadata q2 = givenCreate(registry, "q2", "source", Optional.of("sink2"), INSERT); final PersistentQueryMetadata q3 = givenCreate(registry, "q3", "source", Optional.empty(), CREATE_SOURCE); // When: final Map<QueryId, PersistentQueryMetadata> persistent = registry.getPersistentQueries(); // Then: assertThat(persistent.size(), is(3)); assertThat(persistent.get(new QueryId("q1")), is(q1)); assertThat(persistent.get(new QueryId("q2")), is(q2)); assertThat(persistent.get(new QueryId("q3")), is(q3)); }
@Override public boolean containsMessage(String queueName, String messageId) { if (!queues.containsKey(queueName)) { queues.put(queueName, new ConcurrentLinkedDeque<>()); } return queues.get(queueName).contains(messageId); }
@Test public void testContainsMessage() { String queueName = "test-queue"; String id = "abcd-1234-defg-5678"; assertFalse(queueDao.containsMessage(queueName, id)); assertEquals(1, internalQueue.size()); assertTrue(internalQueue.get(queueName).isEmpty()); assertTrue(queueDao.pushIfNotExists(queueName, id, 123)); assertTrue(queueDao.containsMessage(queueName, id)); assertEquals(1, internalQueue.size()); assertEquals(1, internalQueue.get(queueName).size()); assertEquals(id, internalQueue.get(queueName).peek()); }
static Object parseCell(String cell, Schema.Field field) { Schema.FieldType fieldType = field.getType(); try { switch (fieldType.getTypeName()) { case STRING: return cell; case INT16: return Short.parseShort(cell); case INT32: return Integer.parseInt(cell); case INT64: return Long.parseLong(cell); case BOOLEAN: return Boolean.parseBoolean(cell); case BYTE: return Byte.parseByte(cell); case DECIMAL: return new BigDecimal(cell); case DOUBLE: return Double.parseDouble(cell); case FLOAT: return Float.parseFloat(cell); case DATETIME: return Instant.parse(cell); default: throw new UnsupportedOperationException( "Unsupported type: " + fieldType + ", consider using withCustomRecordParsing"); } } catch (IllegalArgumentException e) { throw new IllegalArgumentException( e.getMessage() + " field " + field.getName() + " was received -- type mismatch"); } }
@Test public void givenValidIntegerCell_parses() { DefaultMapEntry cellToExpectedValue = new DefaultMapEntry("12", 12); Schema schema = Schema.builder().addInt32Field("an_integer").addInt64Field("a_long").build(); assertEquals( cellToExpectedValue.getValue(), CsvIOParseHelpers.parseCell( cellToExpectedValue.getKey().toString(), schema.getField("an_integer"))); }
public static Read<JmsRecord> read() { return new AutoValue_JmsIO_Read.Builder<JmsRecord>() .setMaxNumRecords(Long.MAX_VALUE) .setCoder(SerializableCoder.of(JmsRecord.class)) .setCloseTimeout(DEFAULT_CLOSE_TIMEOUT) .setRequiresDeduping(false) .setMessageMapper( new MessageMapper<JmsRecord>() { @Override public JmsRecord mapMessage(Message message) throws Exception { TextMessage textMessage = (TextMessage) message; Map<String, Object> properties = new HashMap<>(); @SuppressWarnings("rawtypes") Enumeration propertyNames = textMessage.getPropertyNames(); while (propertyNames.hasMoreElements()) { String propertyName = (String) propertyNames.nextElement(); properties.put(propertyName, textMessage.getObjectProperty(propertyName)); } return new JmsRecord( textMessage.getJMSMessageID(), textMessage.getJMSTimestamp(), textMessage.getJMSCorrelationID(), textMessage.getJMSReplyTo(), textMessage.getJMSDestination(), textMessage.getJMSDeliveryMode(), textMessage.getJMSRedelivered(), textMessage.getJMSType(), textMessage.getJMSExpiration(), textMessage.getJMSPriority(), properties, textMessage.getText()); } }) .build(); }
@Test public void testReadMessages() throws Exception { long count = 5; produceTestMessages(count, JmsIOTest::createTextMessage); // read from the queue PCollection<JmsRecord> output = pipeline.apply( JmsIO.read() .withConnectionFactory(connectionFactory) .withQueue(QUEUE) .withUsername(USERNAME) .withPassword(PASSWORD) .withMaxNumRecords(count)); PAssert.thatSingleton(output.apply("Count", Count.globally())).isEqualTo(count); pipeline.run(); assertQueueIsEmpty(); }
public static HiveFileInfo createHiveFileInfo(LocatedFileStatus locatedFileStatus, Optional<byte[]> extraFileContext) throws IOException { return createHiveFileInfo( locatedFileStatus, extraFileContext, ImmutableMap.of()); }
@Test public void testThriftRoundTrip() throws IOException { ThriftCodec<HiveFileInfo> hiveFileInfoThriftCodec = new ThriftCodecManager().getCodec(HiveFileInfo.class); HiveFileInfo hiveFileInfo = createHiveFileInfo(new LocatedFileStatus( 100, false, 0, 0L, 0L, 0L, null, null, null, null, new Path("test"), new org.apache.hadoop.fs.BlockLocation[] {new BlockLocation(new String[1], new String[] {"localhost"}, 0, 100)}), Optional.empty()); int thriftBufferSize = toIntExact(new DataSize(1000, BYTE).toBytes()); SliceOutput dynamicSliceOutput = new DynamicSliceOutput(thriftBufferSize); ThriftProtocolUtils.write(hiveFileInfo, hiveFileInfoThriftCodec, FB_COMPACT, dynamicSliceOutput); byte[] serialized = dynamicSliceOutput.slice().getBytes(); HiveFileInfo copy = ThriftProtocolUtils.read(hiveFileInfoThriftCodec, FB_COMPACT, Slices.wrappedBuffer(serialized).getInput()); assertEquals(copy, hiveFileInfo); }
public String parseEmbeddedExample() throws IOException, SAXException, TikaException { AutoDetectParser parser = new AutoDetectParser(); BodyContentHandler handler = new BodyContentHandler(); Metadata metadata = new Metadata(); ParseContext context = new ParseContext(); context.set(Parser.class, parser); try (InputStream stream = ParsingExample.class.getResourceAsStream("test_recursive_embedded.docx")) { parser.parse(stream, handler, metadata, context); return handler.toString(); } }
@Test public void testRecursiveParseExample() throws IOException, SAXException, TikaException { String result = parsingExample.parseEmbeddedExample(); assertContains("embed_0", result); assertContains("embed1/embed1a.txt", result); assertContains("embed3/embed3.txt", result); assertContains("When in the Course", result); }
public static <T> RetryOperator<T> of(Retry retry) { return new RetryOperator<>(retry); }
@Test public void shouldFailWithExceptionFlux() { RetryConfig config = retryConfig(); Retry retry = Retry.of("testName", config); RetryOperator<Object> retryOperator = RetryOperator.of(retry); StepVerifier.create(Flux.error(new HelloWorldException()) .transformDeferred(retryOperator)) .expectSubscription() .expectError(HelloWorldException.class) .verify(Duration.ofSeconds(1)); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfSuccessfulCallsWithoutRetryAttempt()).isZero(); assertThat(metrics.getNumberOfSuccessfulCallsWithRetryAttempt()).isZero(); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isEqualTo(1); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); }
void prioritizeCopiesAndShiftUps(List<MigrationInfo> migrations) { for (int i = 0; i < migrations.size(); i++) { prioritize(migrations, i); } if (logger.isFinestEnabled()) { StringBuilder s = new StringBuilder("Migration order after prioritization: ["); int ix = 0; for (MigrationInfo migration : migrations) { s.append("\n\t").append(ix++).append("- ").append(migration).append(","); } s.deleteCharAt(s.length() - 1); s.append("]"); logger.finest(s.toString()); } }
@Test public void testCopyPrioritizationAgainstShiftDownToColderIndex() throws UnknownHostException { List<MigrationInfo> migrations = new ArrayList<>(); final MigrationInfo migration1 = new MigrationInfo(0, new PartitionReplica(new Address("localhost", 5701), uuids[0]), new PartitionReplica(new Address("localhost", 5702), uuids[1]), 0, 2, -1, 0); final MigrationInfo migration2 = new MigrationInfo(0, null, new PartitionReplica(new Address("localhost", 5703), uuids[2]), -1, -1, -1, 1); migrations.add(migration1); migrations.add(migration2); migrationPlanner.prioritizeCopiesAndShiftUps(migrations); assertEquals(asList(migration2, migration1), migrations); }
public String scope() { return scope; }
@Test public void testMethods() { BadConfigurationException ex = new BadConfigurationException("my-scope", "error"); assertThat(ex.scope()).isEqualTo("my-scope"); assertThat(ex).hasToString("BadConfigurationException{scope=my-scope, errors=[error]}"); }
@Override public Object handle(String targetService, List<Object> invokers, Object invocation, Map<String, String> queryMap, String serviceInterface) { if (!shouldHandle(invokers)) { return invokers; } List<Object> result = getTargetInvokersByRules(invokers, targetService); return super.handle(targetService, result, invocation, queryMap, serviceInterface); }
@Test public void testGetTargetInvokerByTagRulesWithPolicySceneFour() { // initialize the routing rule RuleInitializationUtils.initAZTagMatchTriggerThresholdMinAllInstancesPolicyRule(); // Scenario 1: The downstream provider has instances that meet the requirements List<Object> invokers = new ArrayList<>(); ApacheInvoker<Object> invoker1 = new ApacheInvoker<>("1.0.0", "az1"); invokers.add(invoker1); ApacheInvoker<Object> invoker2 = new ApacheInvoker<>("1.0.0", "az2"); invokers.add(invoker2); ApacheInvoker<Object> invoker3 = new ApacheInvoker<>("1.0.1", "az1"); invokers.add(invoker3); ApacheInvoker<Object> invoker4 = new ApacheInvoker<>("1.0.1", "az2"); invokers.add(invoker4); ApacheInvoker<Object> invoker5 = new ApacheInvoker<>("1.0.2", "az2"); invokers.add(invoker5); Invocation invocation = new ApacheInvocation(); Map<String, String> queryMap = new HashMap<>(); queryMap.put("zone", "az1"); queryMap.put("interface", "io.sermant.foo.FooTest"); Map<String, String> parameters = new HashMap<>(); parameters.putIfAbsent(RouterConstant.META_ZONE_KEY, "az1"); DubboCache.INSTANCE.setParameters(parameters); DubboCache.INSTANCE.putApplication("io.sermant.foo.FooTest", "foo"); List<Object> targetInvokers = (List<Object>) tagRouteHandler.handle( DubboCache.INSTANCE.getApplication("io.sermant.foo.FooTest") , invokers, invocation, queryMap, "io.sermant.foo.FooTest"); Assert.assertEquals(5, targetInvokers.size()); ConfigCache.getLabel(RouterConstant.DUBBO_CACHE_NAME).resetRouteRule(Collections.emptyMap()); }
@Override public BulkWriter<T> create(FSDataOutputStream out) throws IOException { OrcFile.WriterOptions opts = getWriterOptions(); opts.physicalWriter(new PhysicalWriterImpl(out, opts)); // The path of the Writer is not used to indicate the destination file // in this case since we have used a dedicated physical writer to write // to the give output stream directly. However, the path would be used as // the key of writer in the ORC memory manager, thus we need to make it unique. Path unusedPath = new Path(UUID.randomUUID().toString()); return new OrcBulkWriter<>(vectorizer, new WriterImpl(null, unusedPath, opts)); }
@Test void testNotOverrideInMemoryManager(@TempDir java.nio.file.Path tmpDir) throws IOException { TestMemoryManager memoryManager = new TestMemoryManager(); OrcBulkWriterFactory<Record> factory = new TestOrcBulkWriterFactory<>( new RecordVectorizer("struct<_col0:string,_col1:int>"), memoryManager); factory.create(new LocalDataOutputStream(tmpDir.resolve("file1").toFile())); factory.create(new LocalDataOutputStream(tmpDir.resolve("file2").toFile())); List<Path> addedWriterPath = memoryManager.getAddedWriterPath(); assertThat(addedWriterPath).hasSize(2); assertThat(addedWriterPath.get(1)).isNotEqualTo(addedWriterPath.get(0)); }
@VisibleForTesting DictTypeDO validateDictTypeExists(Long id) { if (id == null) { return null; } DictTypeDO dictType = dictTypeMapper.selectById(id); if (dictType == null) { throw exception(DICT_TYPE_NOT_EXISTS); } return dictType; }
@Test public void testValidateDictDataExists_success() { // mock 数据 DictTypeDO dbDictType = randomDictTypeDO(); dictTypeMapper.insert(dbDictType);// @Sql: 先插入出一条存在的数据 // 调用成功 dictTypeService.validateDictTypeExists(dbDictType.getId()); }
public static WindowedValueCoderComponents getWindowedValueCoderComponents(Coder coder) { checkArgument(WINDOWED_VALUE_CODER_URN.equals(coder.getSpec().getUrn())); return new AutoValue_ModelCoders_WindowedValueCoderComponents( coder.getComponentCoderIds(0), coder.getComponentCoderIds(1)); }
@Test public void windowedValueCoderComponentsNoUrn() { thrown.expect(IllegalArgumentException.class); ModelCoders.getWindowedValueCoderComponents( Coder.newBuilder().setSpec(FunctionSpec.getDefaultInstance()).build()); }
@Override protected synchronized Class loadClass(String s, boolean b) throws ClassNotFoundException { throw new UnsupportedOperationException("I18n classloader does support only resources, but not classes"); }
@Test public void not_support_lookup_of_java_classes() throws ClassNotFoundException { assertThatThrownBy(() -> i18nClassloader.loadClass("java.lang.String")) .isInstanceOf(UnsupportedOperationException.class); }
@Override protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { // has the class loaded already? Class<?> loadedClass = findLoadedClass(name); if (loadedClass == null) { try { // find the class from given jar urls as in first constructor parameter. loadedClass = findClass(name); } catch (ClassNotFoundException ignored) { // ignore class not found } if (loadedClass == null) { loadedClass = getParent().loadClass(name); } if (loadedClass == null) { throw new ClassNotFoundException("Could not find class " + name + " in classloader nor in parent classloader"); } } if (resolve) { resolveClass(loadedClass); } return loadedClass; }
@Test public void canLoadClassFromChildClassLoaderWhenNotPresentInParent() throws Exception { cl = new ChildFirstClassLoader(new URL[]{resourceJarUrl("deployment/sample-pojo-1.0-car.jar")}, ClassLoader.getSystemClassLoader()); String className = "com.sample.pojo.car.Car"; Class<?> clazz = cl.loadClass(className); assertThat(clazz).isNotNull(); assertThat(clazz.getName()).isEqualTo(className); assertThat(clazz.getClassLoader()).isEqualTo(cl); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof TunnelEndPoint) { final TunnelEndPoint that = (TunnelEndPoint) obj; return this.getClass() == that.getClass() && Objects.equals(this.value, that.value); } return false; }
@Test public void testEquals() { new EqualsTester() .addEqualityGroup(endPoint1, sameAsEndPoint1) .addEqualityGroup(endPoint2) .addEqualityGroup(endPoint3) .testEquals(); }
public static int computeDistance(Class<?> parent, Class<?> child) { int distance = -1; if (parent.equals(child)) { distance = 0; } // Search through super classes if (distance == -1) { distance = computeSuperDistance(parent, child); } // Search through interfaces (costly) if (distance == -1) { distance = computeInterfaceDistance(parent, child, new HashSet<String>(), Arrays.asList(child.getInterfaces())); } if (distance != -1) { distance *= DISTANCE_FACTOR; } return distance; }
@Test public void testDistance() { assertEquals(0, TypeUtil.computeDistance(ATest.class, ATest.class)); assertEquals(100, TypeUtil.computeDistance(ATest.class, BTest.class)); assertEquals(200, TypeUtil.computeDistance(ATest.class, CTest.class)); assertEquals(300, TypeUtil.computeDistance(ATest.class, DTest.class)); assertEquals(400, TypeUtil.computeDistance(Object.class, DTest.class)); assertEquals(-1, TypeUtil.computeDistance(int.class, DTest.class)); assertEquals(-1, TypeUtil.computeDistance(String.class, DTest.class)); assertEquals(100, TypeUtil.computeDistance(I0Test.class, I1Test.class)); assertEquals(100, TypeUtil.computeDistance(I0Test.class, VTest.class)); assertEquals(200, TypeUtil.computeDistance(I0Test.class, WTest.class)); assertEquals(100, TypeUtil.computeDistance(I1Test.class, WTest.class)); assertEquals(100, TypeUtil.computeDistance(I0Test.class, XTest.class)); assertEquals(200, TypeUtil.computeDistance(I0Test.class, YTest.class)); assertEquals(300, TypeUtil.computeDistance(J0Test.class, YTest.class)); assertEquals(400, TypeUtil.computeDistance(I0Test.class, ZTest.class)); }
public void start() { this.nativeCacheManager.start(); }
@Test public final void startShouldStartTheNativeRemoteCacheManager() throws IOException { objectUnderTest.start(); assertTrue("Calling start() on SpringRemoteCacheManager should start the enclosed " + "Infinispan RemoteCacheManager. However, it is still not running.", remoteCacheManager.isStarted()); }
@Override protected List<MatchResult> match(List<String> specs) throws IOException { return match(new File(".").getAbsolutePath(), specs); }
@Test public void testMatchRelativeWildcardPath() throws Exception { File baseFolder = temporaryFolder.newFolder("A"); File expectedFile1 = new File(baseFolder, "file1"); expectedFile1.createNewFile(); List<String> expected = ImmutableList.of(expectedFile1.getAbsolutePath()); // This no longer works: // System.setProperty("user.dir", temporaryFolder.getRoot().toString()); // There is no way to set the working directory without forking. Instead we // call in to the helper method that gives just about as good test coverage. List<MatchResult> matchResults = localFileSystem.match(temporaryFolder.getRoot().toString(), ImmutableList.of("A/*")); assertThat( toFilenames(matchResults), containsInAnyOrder(expected.toArray(new String[expected.size()]))); }
public static KTableHolder<GenericKey> build( final KGroupedStreamHolder groupedStream, final StreamAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedStream, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test public void shouldBuildMaterializedWithCorrectNameForUnwindowedAggregate() { // Given: givenUnwindowedAggregate(); // When: aggregate.build(planBuilder, planInfo); // Then: verify(materializedFactory).create(any(), any(), eq("agg-regate-Materialize")); }
public NodeState getWantedState() { NodeState retiredState = new NodeState(node.getType(), State.RETIRED); // Don't let configure retired state override explicitly set Down and Maintenance. if (configuredRetired && wantedState.above(retiredState)) { return retiredState; } return wantedState; }
@Test void down_wanted_state_overrides_config_retired_state() { ClusterFixture fixture = ClusterFixture.forFlatCluster(3) .markNodeAsConfigRetired(1) .proposeStorageNodeWantedState(1, State.DOWN); NodeInfo nodeInfo = fixture.cluster.getNodeInfo(new Node(NodeType.STORAGE, 1)); assertEquals(State.DOWN, nodeInfo.getWantedState().getState()); }
@Override @SuppressWarnings({"rawtypes", "unchecked"}) public <T extends Gauge> T gauge(String name) { return (T) NoopGauge.INSTANCE; }
@Test @SuppressWarnings("rawtypes") public void accessingACustomGaugeRegistersAndReusesIt() { final MetricRegistry.MetricSupplier<Gauge> supplier = () -> gauge; final Gauge gauge1 = registry.gauge("thing", supplier); final Gauge gauge2 = registry.gauge("thing", supplier); assertThat(gauge1).isExactlyInstanceOf(NoopMetricRegistry.NoopGauge.class); assertThat(gauge2).isExactlyInstanceOf(NoopMetricRegistry.NoopGauge.class); assertThat(gauge1).isSameAs(gauge2); verify(listener, never()).onGaugeAdded("thing", gauge1); }
@Override public List<RemoteFileInfo> getRemoteFiles(Table table, GetRemoteFilesParams params) { RemoteFileInfo remoteFileInfo = new RemoteFileInfo(); PaimonTable paimonTable = (PaimonTable) table; PaimonFilter filter = new PaimonFilter(paimonTable.getDbName(), paimonTable.getTableName(), params.getPredicate(), params.getFieldNames()); if (!paimonSplits.containsKey(filter)) { ReadBuilder readBuilder = paimonTable.getNativeTable().newReadBuilder(); int[] projected = params.getFieldNames().stream().mapToInt(name -> (paimonTable.getFieldNames().indexOf(name))).toArray(); List<Predicate> predicates = extractPredicates(paimonTable, params.getPredicate()); List<Split> splits = readBuilder.withFilter(predicates).withProjection(projected).newScan().plan().splits(); PaimonSplitsInfo paimonSplitsInfo = new PaimonSplitsInfo(predicates, splits); paimonSplits.put(filter, paimonSplitsInfo); List<RemoteFileDesc> remoteFileDescs = ImmutableList.of( PaimonRemoteFileDesc.createPamonRemoteFileDesc(paimonSplitsInfo)); remoteFileInfo.setFiles(remoteFileDescs); } else { List<RemoteFileDesc> remoteFileDescs = ImmutableList.of( PaimonRemoteFileDesc.createPamonRemoteFileDesc(paimonSplits.get(filter))); remoteFileInfo.setFiles(remoteFileDescs); } return Lists.newArrayList(remoteFileInfo); }
@Test public void testGetRemoteFiles(@Mocked FileStoreTable paimonNativeTable, @Mocked ReadBuilder readBuilder) throws Catalog.TableNotExistException { new MockUp<PaimonMetadata>() { @Mock public long getTableCreateTime(String dbName, String tblName) { return 0L; } }; new Expectations() { { paimonNativeCatalog.getTable((Identifier) any); result = paimonNativeTable; paimonNativeTable.newReadBuilder(); result = readBuilder; readBuilder.withFilter((List<Predicate>) any).withProjection((int[]) any).newScan().plan().splits(); result = splits; } }; PaimonTable paimonTable = (PaimonTable) metadata.getTable("db1", "tbl1"); List<String> requiredNames = Lists.newArrayList("f2", "dt"); List<RemoteFileInfo> result = metadata.getRemoteFiles(paimonTable, GetRemoteFilesParams.newBuilder().setFieldNames(requiredNames).build()); Assert.assertEquals(1, result.size()); Assert.assertEquals(1, result.get(0).getFiles().size()); PaimonRemoteFileDesc desc = (PaimonRemoteFileDesc) result.get(0).getFiles().get(0); Assert.assertEquals(2, desc.getPaimonSplitsInfo().getPaimonSplits().size()); }
@PostMapping @Secured(resource = AuthConstants.CONSOLE_RESOURCE_NAME_PREFIX + "permissions", action = ActionTypes.WRITE) public Object addPermission(@RequestParam String role, @RequestParam String resource, @RequestParam String action) { nacosRoleService.addPermission(role, resource, action); return RestResultUtils.success("add permission ok!"); }
@Test void testAddPermission() { RestResult<String> result = (RestResult<String>) permissionController.addPermission("admin", "test", "test"); verify(nacosRoleService, times(1)).addPermission(anyString(), anyString(), anyString()); assertEquals(200, result.getCode()); }
public static <T> Values<T> of(Iterable<T> elems) { return new Values<>(elems, Optional.absent(), Optional.absent(), false); }
@Test public void testCreateDefaultOutputCoderUsingCoder() throws Exception { Coder<Record> coder = new RecordCoder(); assertThat( p.apply(Create.of(new Record(), new Record2()).withCoder(coder)).getCoder(), equalTo(coder)); }
public static long calculateTotalFlinkMemoryFromComponents(Configuration config) { Preconditions.checkArgument(config.contains(TaskManagerOptions.TASK_HEAP_MEMORY)); Preconditions.checkArgument(config.contains(TaskManagerOptions.TASK_OFF_HEAP_MEMORY)); Preconditions.checkArgument(config.contains(TaskManagerOptions.NETWORK_MEMORY_MAX)); Preconditions.checkArgument(config.contains(TaskManagerOptions.NETWORK_MEMORY_MIN)); Preconditions.checkArgument(config.contains(TaskManagerOptions.MANAGED_MEMORY_SIZE)); Preconditions.checkArgument(config.contains(TaskManagerOptions.FRAMEWORK_HEAP_MEMORY)); Preconditions.checkArgument(config.contains(TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY)); Preconditions.checkArgument( config.get(TaskManagerOptions.NETWORK_MEMORY_MAX) .equals(config.get(TaskManagerOptions.NETWORK_MEMORY_MIN))); return config.get(TaskManagerOptions.TASK_HEAP_MEMORY) .add(config.get(TaskManagerOptions.TASK_OFF_HEAP_MEMORY)) .add(config.get(TaskManagerOptions.NETWORK_MEMORY_MAX)) .add(config.get(TaskManagerOptions.MANAGED_MEMORY_SIZE)) .add(config.get(TaskManagerOptions.FRAMEWORK_HEAP_MEMORY)) .add(config.get(TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY)) .getBytes(); }
@Test void testCalculateTotalFlinkMemoryWithAllFactorsBeingSet() { Configuration config = new Configuration(); config.set(TaskManagerOptions.FRAMEWORK_HEAP_MEMORY, new MemorySize(1)); config.set(TaskManagerOptions.TASK_HEAP_MEMORY, new MemorySize(2)); config.set(TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY, new MemorySize(3)); config.set(TaskManagerOptions.TASK_OFF_HEAP_MEMORY, new MemorySize(4)); config.set(TaskManagerOptions.NETWORK_MEMORY_MAX, new MemorySize(6)); config.set(TaskManagerOptions.NETWORK_MEMORY_MIN, new MemorySize(6)); config.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, new MemorySize(7)); assertThat(TaskExecutorResourceUtils.calculateTotalFlinkMemoryFromComponents(config)) .isEqualTo(23L); }
static void checkValidCollectionName(String databaseName, String collectionName) { String fullCollectionName = databaseName + "." + collectionName; if (collectionName.length() < MIN_COLLECTION_NAME_LENGTH) { throw new IllegalArgumentException("Collection name cannot be empty."); } if (fullCollectionName.length() > MAX_COLLECTION_NAME_LENGTH) { throw new IllegalArgumentException( "Collection name " + fullCollectionName + " cannot be longer than " + MAX_COLLECTION_NAME_LENGTH + " characters, including the database name and dot."); } if (ILLEGAL_COLLECTION_CHARS.matcher(collectionName).find()) { throw new IllegalArgumentException( "Collection name " + collectionName + " is not a valid name. Only letters, numbers, hyphens, underscores and exclamation points are allowed."); } if (collectionName.charAt(0) != '_' && !Character.isLetter(collectionName.charAt(0))) { throw new IllegalArgumentException( "Collection name " + collectionName + " must start with a letter or an underscore."); } String illegalKeyword = "system."; if (collectionName.startsWith(illegalKeyword)) { throw new IllegalArgumentException( "Collection name " + collectionName + " cannot start with the prefix \"" + illegalKeyword + "\"."); } }
@Test public void testCheckValidCollectionNameThrowsErrorWhenNameContainsNull() { assertThrows( IllegalArgumentException.class, () -> checkValidCollectionName("test-database", "test\0collection")); }
@Override protected Map<String, Object> toJsonMap(ILoggingEvent event) { final MapBuilder mapBuilder = new MapBuilder(timestampFormatter, customFieldNames, additionalFields, includes.size()) .addTimestamp("timestamp", isIncluded(EventAttribute.TIMESTAMP), event.getTimeStamp()) .add("level", isIncluded(EventAttribute.LEVEL), () -> String.valueOf(event.getLevel())) .add("thread", isIncluded(EventAttribute.THREAD_NAME), event::getThreadName) .add("marker", isIncluded(EventAttribute.MARKER) && event.getMarker() != null, () -> event.getMarker().getName()) .add("logger", isIncluded(EventAttribute.LOGGER_NAME), event::getLoggerName) .add("message", isIncluded(EventAttribute.MESSAGE), event::getFormattedMessage) .add("context", isIncluded(EventAttribute.CONTEXT_NAME), () -> event.getLoggerContextVO().getName()) .add("version", jsonProtocolVersion != null, jsonProtocolVersion) .add("exception", isIncluded(EventAttribute.EXCEPTION) && event.getThrowableProxy() != null, () -> throwableProxyConverter.convert(event)); final boolean includeMdc = isIncluded(EventAttribute.MDC); if (flattenMdc) { filterMdc(event.getMDCPropertyMap()).forEach((k,v) -> mapBuilder.add(k, includeMdc, v)); } else { mapBuilder.addMap("mdc", includeMdc, () -> filterMdc(event.getMDCPropertyMap())); } final boolean includeCallerData = isIncluded(EventAttribute.CALLER_DATA); final StackTraceElement[] callerData = event.getCallerData(); if (includeCallerData && callerData.length >= 1) { final StackTraceElement stackTraceElement = callerData[0]; mapBuilder.add("caller_class_name", includeCallerData, stackTraceElement.getClassName()); mapBuilder.add("caller_method_name", includeCallerData, stackTraceElement.getMethodName()); mapBuilder.add("caller_file_name", includeCallerData, stackTraceElement.getFileName()); mapBuilder.addNumber("caller_line_number", includeCallerData, stackTraceElement.getLineNumber()); } return mapBuilder.build(); }
@Test void testReplaceFieldName() { final Map<String, String> customFieldNames = Map.of( "timestamp", "@timestamp", "message", "@message"); Map<String, Object> map = new EventJsonLayout(jsonFormatter, timestampFormatter, throwableProxyConverter, DEFAULT_EVENT_ATTRIBUTES, customFieldNames, Collections.emptyMap(), Collections.emptySet(), false) .toJsonMap(event); final HashMap<String, Object> expectedFields = new HashMap<>(defaultExpectedFields); expectedFields.put("@timestamp", timestamp); expectedFields.put("@message", message); expectedFields.remove("timestamp"); expectedFields.remove("message"); assertThat(map).isEqualTo(expectedFields); }
@SuppressWarnings("unchecked") public <IN, OUT> AvroDatumConverter<IN, OUT> create(Class<IN> inputClass) { boolean isMapOnly = ((JobConf) getConf()).getNumReduceTasks() == 0; if (AvroKey.class.isAssignableFrom(inputClass)) { Schema schema; if (isMapOnly) { schema = AvroJob.getMapOutputKeySchema(getConf()); if (null == schema) { schema = AvroJob.getOutputKeySchema(getConf()); } } else { schema = AvroJob.getOutputKeySchema(getConf()); } if (null == schema) { throw new IllegalStateException("Writer schema for output key was not set. Use AvroJob.setOutputKeySchema()."); } return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema); } if (AvroValue.class.isAssignableFrom(inputClass)) { Schema schema; if (isMapOnly) { schema = AvroJob.getMapOutputValueSchema(getConf()); if (null == schema) { schema = AvroJob.getOutputValueSchema(getConf()); } } else { schema = AvroJob.getOutputValueSchema(getConf()); } if (null == schema) { throw new IllegalStateException( "Writer schema for output value was not set. Use AvroJob.setOutputValueSchema()."); } return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema); } if (BooleanWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new BooleanWritableConverter(); } if (BytesWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new BytesWritableConverter(); } if (ByteWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new ByteWritableConverter(); } if (DoubleWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new DoubleWritableConverter(); } if (FloatWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new FloatWritableConverter(); } if (IntWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new IntWritableConverter(); } if (LongWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new LongWritableConverter(); } if (NullWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new NullWritableConverter(); } if (Text.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new TextConverter(); } throw new UnsupportedOperationException("Unsupported input type: " + inputClass.getName()); }
@Test void convertNullWritable() { AvroDatumConverter<NullWritable, Object> converter = mFactory.create(NullWritable.class); assertNull(converter.convert(NullWritable.get())); }
public void dropDb(String dbName, boolean force) throws MetaNotFoundException { Database database; try { database = getDb(dbName); } catch (Exception e) { LOG.error("Failed to access database {}", dbName, e); throw new MetaNotFoundException("Failed to access database " + dbName); } if (database == null) { throw new MetaNotFoundException("Not found database " + dbName); } String dbLocation = database.getLocation(); if (Strings.isNullOrEmpty(dbLocation)) { throw new MetaNotFoundException("Database location is empty"); } boolean deleteData = false; try { deleteData = !FileSystem.get(URI.create(dbLocation), hadoopConf) .listLocatedStatus(new Path(dbLocation)).hasNext(); } catch (Exception e) { LOG.error("Failed to check database directory", e); } metastore.dropDb(dbName, deleteData); }
@Test public void testDropDb() throws MetaNotFoundException { class MockedTestMetaClient extends HiveMetastoreTest.MockedHiveMetaClient { public org.apache.hadoop.hive.metastore.api.Database getDb(String dbName) throws RuntimeException { if (dbName.equals("not_exist_db")) { throw new RuntimeException("db not_exist_db not found"); } return null; } } HiveMetaClient client = new MockedTestMetaClient(); HiveMetastore metastore = new HiveMetastore(client, "hive_catalog", null); ExecutorService executor = Executors.newFixedThreadPool(5); CachingHiveMetastore cachingHiveMetastore = new CachingHiveMetastore( metastore, executor, expireAfterWriteSec, refreshAfterWriteSec, 1000, false); HiveMetastoreOperations hmsOps = new HiveMetastoreOperations(cachingHiveMetastore, true, new Configuration(), MetastoreType.HMS, "hive_catalog"); HiveMetastoreOperations finalHmsOps = hmsOps; ExceptionChecker.expectThrowsWithMsg(MetaNotFoundException.class, "Failed to access database not_exist_db", () -> finalHmsOps.dropDb("not_exist_db", true)); ExceptionChecker.expectThrowsWithMsg(MetaNotFoundException.class, "Database location is empty", () -> this.hmsOps.dropDb("db1", true)); class MockedTestMetaClient1 extends HiveMetastoreTest.MockedHiveMetaClient { public org.apache.hadoop.hive.metastore.api.Database getDb(String dbName) throws RuntimeException { if (dbName.equals("db1")) { org.apache.hadoop.hive.metastore.api.Database database = new org.apache.hadoop.hive.metastore.api.Database(); database.setName("db1"); database.setLocationUri("locationXXX"); return database; } return null; } } metastore = new HiveMetastore(new MockedTestMetaClient1(), "hive_catalog", MetastoreType.HMS); executor = Executors.newFixedThreadPool(5); cachingHiveMetastore = new CachingHiveMetastore( metastore, executor, expireAfterWriteSec, refreshAfterWriteSec, 1000, false); hmsOps = new HiveMetastoreOperations(cachingHiveMetastore, true, new Configuration(), MetastoreType.HMS, "hive_catalog"); hmsOps.dropDb("db1", false); }
public FEELFnResult<Boolean> invoke(@ParameterName("list") List list) { if (list == null) { return FEELFnResult.ofResult(false); } boolean result = false; for (final Object element : list) { if (element != null && !(element instanceof Boolean)) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not" + " a Boolean")); } else { if (element != null) { result |= (Boolean) element; } } } return FEELFnResult.ofResult(result); }
@Test void invokeListParamEmptyList() { FunctionTestUtil.assertResult(anyFunction.invoke(Collections.emptyList()), false); }
public static void main(String[] args) { var loadBalancer1 = new LoadBalancer(); var loadBalancer2 = new LoadBalancer(); loadBalancer1.serverRequest(new Request("Hello")); loadBalancer2.serverRequest(new Request("Hello World")); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
private String getMessage(AuthenticationException failed) { String message = "Server Error"; if (failed instanceof UsernameNotFoundException) { message = "用户不存在"; } else if (failed instanceof BadCredentialsException) { message = "密码错误"; } return message; }
@Test public void getMessageTest() { JWTAuthenticationFilter filter = new JWTAuthenticationFilter(null); Assertions.assertEquals("用户不存在", ReflectUtil.invoke(filter, "getMessage", new UsernameNotFoundException(""))); Assertions.assertEquals("密码错误", ReflectUtil.invoke(filter, "getMessage", new BadCredentialsException(""))); }
@Override public ICardinality merge(ICardinality... estimators) throws LogLogMergeException { if (estimators == null) { return new LogLog(M); } byte[] mergedBytes = Arrays.copyOf(this.M, this.M.length); for (ICardinality estimator : estimators) { if (!(this.getClass().isInstance(estimator))) { throw new LogLogMergeException("Cannot merge estimators of different class"); } if (estimator.sizeof() != this.sizeof()) { throw new LogLogMergeException("Cannot merge estimators of different sizes"); } LogLog ll = (LogLog) estimator; for (int i = 0; i < mergedBytes.length; ++i) { mergedBytes[i] = (byte) Math.max(mergedBytes[i], ll.M[i]); } } return new LogLog(mergedBytes); }
@Test public void testMerge() throws CardinalityMergeException { int numToMerge = 5; int bits = 16; int cardinality = 1000000; LogLog[] loglogs = new LogLog[numToMerge]; LogLog baseline = new LogLog(bits); for (int i = 0; i < numToMerge; i++) { loglogs[i] = new LogLog(bits); for (int j = 0; j < cardinality; j++) { double val = Math.random(); loglogs[i].offer(val); baseline.offer(val); } } LogLog hll = loglogs[0]; loglogs = Arrays.asList(loglogs).subList(1, loglogs.length).toArray(new LogLog[0]); long mergedEstimate = hll.merge(loglogs).cardinality(); long baselineEstimate = baseline.cardinality(); System.out.println("Baseline estimate: " + baselineEstimate); assertEquals(mergedEstimate, baselineEstimate); }
public String getRepositoryName() { return repositoryName; }
@Test public void getRepositoryName() { assertEquals( "", element1.getRepositoryName() ); Element element2 = new Element( NAME, TYPE, PATH, LOCAL_PROVIDER, DUMMY_STRING ); assertEquals( DUMMY_STRING, element2.getRepositoryName() ); }
@Override public Collection<FileSourceSplit> enumerateSplits(Path[] paths, int minDesiredSplits) throws IOException { final ArrayList<FileSourceSplit> splits = new ArrayList<>(); for (Path path : paths) { final FileSystem fs = path.getFileSystem(); final FileStatus status = fs.getFileStatus(path); addSplitsForPath(status, fs, splits); } return splits; }
@Test void testFilesWithNoBlockInfo() throws Exception { final Path testPath = new Path("testfs:///dir/file1"); testFs = TestingFileSystem.createForFileStatus( "testfs", TestingFileSystem.TestFileStatus.forFileWithBlocks(testPath, 12345L)); testFs.register(); final NonSplittingRecursiveEnumerator enumerator = createEnumerator(); final Collection<FileSourceSplit> splits = enumerator.enumerateSplits(new Path[] {new Path("testfs:///dir")}, 0); assertThat(splits).hasSize(1); assertSplitsEqual( new FileSourceSplit("ignoredId", testPath, 0L, 12345L, 0, 12345L), splits.iterator().next()); }
@VisibleForTesting protected void copyResourcesFromJar(JarFile inputJar) throws IOException { Enumeration<JarEntry> inputJarEntries = inputJar.entries(); // The zip spec allows multiple files with the same name; the Java zip libraries do not. // Keep track of the files we've already written to filter out duplicates. // Also, ignore the old manifest; we want to write our own. Set<String> previousEntryNames = new HashSet<>(ImmutableList.of(JarFile.MANIFEST_NAME)); while (inputJarEntries.hasMoreElements()) { JarEntry inputJarEntry = inputJarEntries.nextElement(); InputStream inputStream = inputJar.getInputStream(inputJarEntry); String entryName = inputJarEntry.getName(); if (previousEntryNames.contains(entryName)) { LOG.debug("Skipping duplicated file {}", entryName); } else { JarEntry outputJarEntry = new JarEntry(inputJarEntry); outputStream.putNextEntry(outputJarEntry); LOG.trace("Copying jar entry {}", inputJarEntry); IOUtils.copy(inputStream, outputStream); previousEntryNames.add(entryName); } } }
@Test public void testCopyResourcesFromJar_copiesResources() throws IOException { List<JarEntry> entries = ImmutableList.of(new JarEntry("foo"), new JarEntry("bar"), new JarEntry("baz")); when(inputJar.entries()).thenReturn(Collections.enumeration(entries)); jarCreator.copyResourcesFromJar(inputJar); verify(outputStream, times(3)).putNextEntry(any()); }
@Override public boolean authenticateHttpRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { Boolean authenticated = applyAuthProcessor( providers, provider -> { try { return provider.authenticateHttpRequest(request, response); } catch (Exception e) { if (e instanceof AuthenticationException) { throw (AuthenticationException) e; } else { throw new AuthenticationException("Failed to authentication http request"); } } } ); return authenticated; }
@Test public void testAuthenticateHttpRequest() throws Exception { HttpServletRequest requestAA = mock(HttpServletRequest.class); when(requestAA.getRemoteAddr()).thenReturn("127.0.0.1"); when(requestAA.getRemotePort()).thenReturn(8080); when(requestAA.getHeader("Authorization")).thenReturn("Bearer " + expiringTokenAA); boolean doFilterAA = authProvider.authenticateHttpRequest(requestAA, null); assertTrue(doFilterAA); verify(requestAA).setAttribute(eq(AuthenticatedRoleAttributeName), eq(SUBJECT_A)); verify(requestAA).setAttribute(eq(AuthenticatedDataAttributeName), isA(AuthenticationDataSource.class)); HttpServletRequest requestAB = mock(HttpServletRequest.class); when(requestAB.getRemoteAddr()).thenReturn("127.0.0.1"); when(requestAB.getRemotePort()).thenReturn(8080); when(requestAB.getHeader("Authorization")).thenReturn("Bearer " + expiringTokenAB); boolean doFilterAB = authProvider.authenticateHttpRequest(requestAB, null); assertTrue(doFilterAB); verify(requestAB).setAttribute(eq(AuthenticatedRoleAttributeName), eq(SUBJECT_B)); verify(requestAB).setAttribute(eq(AuthenticatedDataAttributeName), isA(AuthenticationDataSource.class)); HttpServletRequest requestBA = mock(HttpServletRequest.class); when(requestBA.getRemoteAddr()).thenReturn("127.0.0.1"); when(requestBA.getRemotePort()).thenReturn(8080); when(requestBA.getHeader("Authorization")).thenReturn("Bearer " + expiringTokenBA); boolean doFilterBA = authProvider.authenticateHttpRequest(requestBA, null); assertTrue(doFilterBA); verify(requestBA).setAttribute(eq(AuthenticatedRoleAttributeName), eq(SUBJECT_A)); verify(requestBA).setAttribute(eq(AuthenticatedDataAttributeName), isA(AuthenticationDataSource.class)); HttpServletRequest requestBB = mock(HttpServletRequest.class); when(requestBB.getRemoteAddr()).thenReturn("127.0.0.1"); when(requestBB.getRemotePort()).thenReturn(8080); when(requestBB.getHeader("Authorization")).thenReturn("Bearer " + expiringTokenBB); boolean doFilterBB = authProvider.authenticateHttpRequest(requestBB, null); assertTrue(doFilterBB); verify(requestBB).setAttribute(eq(AuthenticatedRoleAttributeName), eq(SUBJECT_B)); verify(requestBB).setAttribute(eq(AuthenticatedDataAttributeName), isA(AuthenticationDataSource.class)); }
public static void raftReadIndexFailed() { RAFT_READ_INDEX_FAILED.record(1); }
@Test void testRaftReadIndexFailed() { MetricsMonitor.raftReadIndexFailed(); MetricsMonitor.raftReadIndexFailed(); assertEquals(2D, MetricsMonitor.getRaftReadIndexFailed().totalAmount(), 0.01); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@Test public void testUnpartitionedOr() throws Exception { createUnpartitionedTable(spark, tableName); SparkScanBuilder builder = scanBuilder(); YearsFunction.TimestampToYearsFunction tsToYears = new YearsFunction.TimestampToYearsFunction(); UserDefinedScalarFunc udf1 = toUDF(tsToYears, expressions(fieldRef("ts"))); Predicate predicate1 = new Predicate("=", expressions(udf1, intLit(2017 - 1970))); BucketFunction.BucketLong bucketLong = new BucketFunction.BucketLong(DataTypes.LongType); UserDefinedScalarFunc udf = toUDF(bucketLong, expressions(intLit(5), fieldRef("id"))); Predicate predicate2 = new Predicate(">=", expressions(udf, intLit(2))); Predicate predicate = new Or(predicate1, predicate2); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); // NOT (years(ts) = 47 OR bucket(id, 5) >= 2) builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); }
@Override public BuiltIndex<NewAuthorizedIndex> build() { checkState(!getRelations().isEmpty(), "At least one relation mapping must be defined"); return new BuiltIndex<>(this); }
@Test public void build_fails_if_no_relation_mapping_has_been_created() { NewAuthorizedIndex underTest = new NewAuthorizedIndex(someIndex, defaultSettingsConfiguration); assertThatThrownBy(() -> underTest.build()) .isInstanceOf(IllegalStateException.class) .hasMessage("At least one relation mapping must be defined"); }
public boolean isAlwaysFalse() { if (conditions.isEmpty()) { return false; } for (ShardingCondition each : conditions) { if (!(each instanceof AlwaysFalseShardingCondition)) { return false; } } return true; }
@Test void assertIsAlwaysFalseTrue() { ShardingConditions shardingConditions = createSingleShardingConditions(); assertTrue(shardingConditions.isAlwaysFalse()); }
public RequestAndSize parseRequest(ByteBuffer buffer) { if (isUnsupportedApiVersionsRequest()) { // Unsupported ApiVersion requests are treated as v0 requests and are not parsed ApiVersionsRequest apiVersionsRequest = new ApiVersionsRequest(new ApiVersionsRequestData(), (short) 0, header.apiVersion()); return new RequestAndSize(apiVersionsRequest, 0); } else { ApiKeys apiKey = header.apiKey(); try { short apiVersion = header.apiVersion(); return AbstractRequest.parseRequest(apiKey, apiVersion, buffer); } catch (Throwable ex) { throw new InvalidRequestException("Error getting request for apiKey: " + apiKey + ", apiVersion: " + header.apiVersion() + ", connectionId: " + connectionId + ", listenerName: " + listenerName + ", principal: " + principal, ex); } } }
@Test public void testInvalidRequestForByteArray() throws UnknownHostException { short version = (short) 1; // choose a version with fixed length encoding, for simplicity ByteBuffer corruptBuffer = serialize(version, new SaslAuthenticateRequestData().setAuthBytes(new byte[0])); // corrupt the length of the bytes array corruptBuffer.putInt(0, Integer.MAX_VALUE); RequestHeader header = new RequestHeader(ApiKeys.SASL_AUTHENTICATE, version, "console-producer", 1); RequestContext context = new RequestContext(header, "0", InetAddress.getLocalHost(), KafkaPrincipal.ANONYMOUS, new ListenerName("ssl"), SecurityProtocol.SASL_SSL, ClientInformation.EMPTY, true); String msg = assertThrows(InvalidRequestException.class, () -> context.parseRequest(corruptBuffer)).getCause().getMessage(); assertEquals("Error reading byte array of 2147483647 byte(s): only 0 byte(s) available", msg); }
public void checkUnderWritableMountPoint(AlluxioURI alluxioUri) throws InvalidPathException, AccessControlException { try (LockResource r = new LockResource(mReadLock)) { // This will re-acquire the read lock, but that is allowed. String mountPoint = getMountPoint(alluxioUri); MountInfo mountInfo = mState.getMountTable().get(mountPoint); if (mountInfo.getOptions().getReadOnly()) { throw new AccessControlException(ExceptionMessage.MOUNT_READONLY, alluxioUri, mountPoint); } } }
@Test public void writableMount() throws Exception { String mountPath = "/mnt/foo"; AlluxioURI alluxioUri = new AlluxioURI("alluxio://localhost:1234" + mountPath); addMount(alluxioUri.toString(), "hdfs://localhost:5678/foo", IdUtils.INVALID_MOUNT_ID); try { mMountTable.checkUnderWritableMountPoint(alluxioUri); } catch (AccessControlException e) { Assert.fail("Default mount point should be writable."); } try { String path = mountPath + "/sub/directory"; alluxioUri = new AlluxioURI("alluxio://localhost:1234" + path); mMountTable.checkUnderWritableMountPoint(alluxioUri); } catch (AccessControlException e) { Assert.fail("Default mount point should be writable."); } }
public static List<CredentialProvider> getProviders(Configuration conf ) throws IOException { List<CredentialProvider> result = new ArrayList<>(); for(String path: conf.getStringCollection(CREDENTIAL_PROVIDER_PATH)) { try { URI uri = new URI(path); boolean found = false; // Iterate serviceLoader in a synchronized block since // serviceLoader iterator is not thread-safe. synchronized (serviceLoader) { try { if (SERVICE_LOADER_LOCKED.getAndSet(true)) { throw new PathIOException(path, "Recursive load of credential provider; " + "if loading a JCEKS file, this means that the filesystem connector is " + "trying to load the same file"); } for (CredentialProviderFactory factory : serviceLoader) { CredentialProvider kp = factory.createProvider(uri, conf); if (kp != null) { result.add(kp); found = true; break; } } } finally { SERVICE_LOADER_LOCKED.set(false); } } if (!found) { throw new IOException("No CredentialProviderFactory for " + uri + " in " + CREDENTIAL_PROVIDER_PATH); } } catch (URISyntaxException error) { throw new IOException("Bad configuration of " + CREDENTIAL_PROVIDER_PATH + " at " + path, error); } } return result; }
@Test public void testUriErrors() throws Exception { Configuration conf = new Configuration(); conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, "unkn@own:/x/y"); try { List<CredentialProvider> providers = CredentialProviderFactory.getProviders(conf); assertTrue("should throw!", false); } catch (IOException e) { assertEquals("Bad configuration of " + CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH + " at unkn@own:/x/y", e.getMessage()); } }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldErrorOnRemoveSubscriptionOnUnknownRegistrationId() { final long id1 = driverProxy.addSubscription(CHANNEL_4000, STREAM_ID_1); driverProxy.removeSubscription(id1 + 100); driverConductor.doWork(); driverConductor.doWork(); final InOrder inOrder = inOrder(receiverProxy, mockClientProxy); inOrder.verify(receiverProxy).addSubscription(any(), anyInt()); inOrder.verify(mockClientProxy).onSubscriptionReady(eq(id1), anyInt()); inOrder.verify(mockClientProxy).onError(anyLong(), eq(UNKNOWN_SUBSCRIPTION), anyString()); inOrder.verifyNoMoreInteractions(); verify(mockErrorHandler).onError(any(Throwable.class)); }
public final void setStrictness(Strictness strictness) { Objects.requireNonNull(strictness); this.strictness = strictness; }
@Test public void testSetStrictness() { JsonReader reader = new JsonReader(reader("{}")); reader.setStrictness(Strictness.STRICT); assertThat(reader.getStrictness()).isEqualTo(Strictness.STRICT); }
public static Map<Integer, Map<RowExpression, VariableReferenceExpression>> collectCSEByLevel(List<? extends RowExpression> expressions) { if (expressions.isEmpty()) { return ImmutableMap.of(); } CommonSubExpressionCollector expressionCollector = new CommonSubExpressionCollector(); expressions.forEach(expression -> expression.accept(expressionCollector, null)); if (expressionCollector.cseByLevel.isEmpty()) { return ImmutableMap.of(); } Map<Integer, Map<RowExpression, Integer>> cseByLevel = removeRedundantCSE(expressionCollector.cseByLevel, expressionCollector.expressionCount); VariableAllocator variableAllocator = new VariableAllocator(); ImmutableMap.Builder<Integer, Map<RowExpression, VariableReferenceExpression>> commonSubExpressions = ImmutableMap.builder(); Map<RowExpression, VariableReferenceExpression> rewriteWith = new HashMap<>(); int startCSELevel = cseByLevel.keySet().stream().reduce(Math::min).get(); int maxCSELevel = cseByLevel.keySet().stream().reduce(Math::max).get(); for (int i = startCSELevel; i <= maxCSELevel; i++) { if (cseByLevel.containsKey(i)) { ExpressionRewriter rewriter = new ExpressionRewriter(rewriteWith); ImmutableMap.Builder<RowExpression, VariableReferenceExpression> expressionVariableMapBuilder = ImmutableMap.builder(); for (Map.Entry<RowExpression, Integer> entry : cseByLevel.get(i).entrySet()) { RowExpression rewrittenExpression = entry.getKey().accept(rewriter, null); expressionVariableMapBuilder.put(rewrittenExpression, variableAllocator.newVariable(rewrittenExpression, "cse")); } Map<RowExpression, VariableReferenceExpression> expressionVariableMap = expressionVariableMapBuilder.build(); commonSubExpressions.put(i, expressionVariableMap); rewriteWith.putAll(expressionVariableMap.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue()))); } } return commonSubExpressions.build(); }
@Test void testCollectCSEByLevel() { List<RowExpression> expressions = ImmutableList.of(rowExpression("x * 2 + y + z"), rowExpression("(x * 2 + y + 1) * 2"), rowExpression("(x * 2) + (x * 2 + y + z)")); Map<Integer, Map<RowExpression, VariableReferenceExpression>> cseByLevel = collectCSEByLevel(expressions); assertEquals(cseByLevel, ImmutableMap.of( 3, ImmutableMap.of(rowExpression("\"add$cse\" + z"), rowExpression("\"add$cse_0\"")), 2, ImmutableMap.of(rowExpression("\"multiply$cse\" + y"), rowExpression("\"add$cse\"")), 1, ImmutableMap.of(rowExpression("x * 2"), rowExpression("\"multiply$cse\"")))); }
@Override public String getName() { return "Dart Package Analyzer"; }
@Test public void testDartAnalyzerGetName() { assertThat(dartAnalyzer.getName(), is("Dart Package Analyzer")); }
public static boolean isSystemGroup(String group) { if (StringUtils.isBlank(group)) { return false; } String groupInLowerCase = group.toLowerCase(); for (String prefix : SYSTEM_GROUP_PREFIX_LIST) { if (groupInLowerCase.startsWith(prefix)) { return true; } } return false; }
@Test public void testIsSystemGroup_EmptyGroup_ReturnsFalse() { String group = ""; boolean result = BrokerMetricsManager.isSystemGroup(group); assertThat(result).isFalse(); }
public String encode(String name, String value) { return encode(new DefaultCookie(name, value)); }
@Test public void illegalCharInCookieValueMakesStrictEncoderThrowsException() { Set<Character> illegalChars = new HashSet<Character>(); // CTLs for (int i = 0x00; i <= 0x1F; i++) { illegalChars.add((char) i); } illegalChars.add((char) 0x7F); // whitespace, DQUOTE, comma, semicolon, and backslash for (char c : new char[] { ' ', '"', ',', ';', '\\' }) { illegalChars.add(c); } int exceptions = 0; for (char c : illegalChars) { try { ServerCookieEncoder.STRICT.encode(new DefaultCookie("name", "value" + c)); } catch (IllegalArgumentException e) { exceptions++; } } assertEquals(illegalChars.size(), exceptions); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void outOfMemoryJVM() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/logs/out_of_memory.txt")), CrashReportAnalyzer.Rule.OUT_OF_MEMORY); }