focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static L4ModificationInstruction modTcpSrc(TpPort port) { checkNotNull(port, "Src TCP port cannot be null"); return new ModTransportPortInstruction(L4SubType.TCP_SRC, port); }
@Test public void testModTcpSrcMethod() { final Instruction instruction = Instructions.modTcpSrc(tpPort1); final L4ModificationInstruction.ModTransportPortInstruction modTransportPortInstruction = checkAndConvert(instruction, Instruction.Type.L4MODIFICATION, L4ModificationInstruction.ModTransportPortInstruction.class); assertThat(modTransportPortInstruction.port(), is(equalTo(tpPort1))); assertThat(modTransportPortInstruction.subtype(), is(equalTo(L4ModificationInstruction.L4SubType.TCP_SRC))); }
public Timer add(long interval, TimerHandler handler, Object... args) { if (handler == null) { return null; } return new Timer(timer.add(interval, handler, args)); }
@Test public void testAddFaultyHandler() { Timer timer = timers.add(10, null); assertThat(timer, nullValue()); }
public <T> void resolve(T resolvable) { ParamResolver resolver = this; if (ParamScope.class.isAssignableFrom(resolvable.getClass())) { ParamScope newScope = (ParamScope) resolvable; resolver = newScope.applyOver(resolver); } resolveStringLeaves(resolvable, resolver); resolveNonStringLeaves(resolvable, resolver); resolveNodes(resolvable, resolver); }
@Test public void shouldProvideContextWhenAnExceptionOccurs() { PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("cruise", "dev", "ant"); pipelineConfig.setLabelTemplate("#a"); new ParamResolver(new ParamSubstitutionHandlerFactory(params(param("foo", "pavan"), param("bar", "jj"))), fieldCache).resolve(pipelineConfig); assertThat(pipelineConfig.errors().on("labelTemplate"), is("Error when processing params for '#a' used in field 'labelTemplate', # must be followed by a parameter pattern or escaped by another #")); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; // This handles a tombstone message if (value == null) { return SchemaAndValue.NULL; } try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (config.schemasEnabled() && (!jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME) || !jsonValue.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); // The deserialized data should either be an envelope object containing the schema and the payload or the schema // was stripped during serialization and we need to fill in an all-encompassing schema. if (!config.schemasEnabled()) { ObjectNode envelope = JSON_NODE_FACTORY.objectNode(); envelope.set(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME, null); envelope.set(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME, jsonValue); jsonValue = envelope; } Schema schema = asConnectSchema(jsonValue.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); return new SchemaAndValue( schema, convertToConnect(schema, jsonValue.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME), config) ); }
@Test public void decimalToConnect() { Schema schema = Decimal.schema(2); BigDecimal reference = new BigDecimal(new BigInteger("156"), 2); // Payload is base64 encoded byte[]{0, -100}, which is the two's complement encoding of 156. String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }, \"payload\": \"AJw=\" }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); BigDecimal converted = (BigDecimal) schemaAndValue.value(); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, converted); }
public void isNoneOf( @Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) { isNotIn(accumulate(first, second, rest)); }
@Test public void isNoneOfNull() { assertThat((String) null).isNoneOf("a", "b", "c"); }
public Optional<TransactionReceipt> getTransactionReceipt() { return Optional.ofNullable(transactionReceipt); }
@Test public void testTransactionFailedWithRevertReason() throws Exception { TransactionReceipt transactionReceipt = createFailedTransactionReceipt(); prepareCall(OWNER_REVERT_MSG_HASH); TransactionException thrown = assertThrows( TransactionException.class, () -> { prepareTransaction(transactionReceipt); contract.performTransaction( new Address(BigInteger.TEN), new Uint256(BigInteger.ONE)) .send(); }); assertEquals( String.format( "Transaction %s has failed with status: %s. Gas used: 1. Revert reason: '%s'.", TRANSACTION_HASH, TXN_FAIL_STATUS, OWNER_REVERT_MSG_STR), thrown.getMessage()); assertEquals(transactionReceipt, thrown.getTransactionReceipt().get()); }
public static FieldScope ignoringFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return FieldScopeImpl.createIgnoringFieldDescriptors(asList(firstFieldDescriptor, rest)); }
@Test public void testIgnoreFieldsAtDifferentLevels() { // Ignore all 'o_int' fields, in different ways. Message message = parse( "o_int: 1 r_string: \"foo\" o_sub_test_message: { o_int: 2 " + "o_sub_sub_test_message: { o_int: 3 r_string: \"bar\" } }"); // Even though o_int is ignored, message presence is not. So these all fail. Message diffMessage1 = parse("r_string: \"baz\""); Message diffMessage2 = parse("r_string: \"foo\""); Message diffMessage3 = parse("r_string: \"foo\" o_sub_test_message: {}"); Message diffMessage4 = parse("r_string: \"foo\" o_sub_test_message: { o_sub_sub_test_message: {} }"); // All of these messages are equivalent, because all o_int are ignored. Message eqMessage1 = parse( "o_int: 111 r_string: \"foo\" o_sub_test_message: { o_int: 222 " + "o_sub_sub_test_message: { o_int: 333 r_string: \"bar\" } }"); Message eqMessage2 = parse( "o_int: 1 r_string: \"foo\" o_sub_test_message: { o_int: 2 " + "o_sub_sub_test_message: { o_int: 3 r_string: \"bar\" } }"); Message eqMessage3 = parse( "r_string: \"foo\" o_sub_test_message: { " + "o_sub_sub_test_message: { r_string: \"bar\" } }"); Message eqMessage4 = parse( "o_int: 333 r_string: \"foo\" o_sub_test_message: { o_int: 111 " + "o_sub_sub_test_message: { o_int: 222 r_string: \"bar\" } }"); FieldDescriptor top = getFieldDescriptor("o_int"); FieldDescriptor middle = getFieldDescriptor("o_sub_test_message").getMessageType().findFieldByName("o_int"); FieldDescriptor bottom = getFieldDescriptor("o_sub_test_message") .getMessageType() .findFieldByName("o_sub_sub_test_message") .getMessageType() .findFieldByName("o_int"); ImmutableMap<String, FieldScope> fieldScopes = ImmutableMap.of( "BASIC", FieldScopes.ignoringFieldDescriptors(top, middle, bottom), "CHAINED", FieldScopes.ignoringFieldDescriptors(top) .ignoringFieldDescriptors(middle) .ignoringFieldDescriptors(bottom), "REPEATED", FieldScopes.ignoringFieldDescriptors(top, middle) .ignoringFieldDescriptors(middle, bottom)); for (String scopeName : fieldScopes.keySet()) { String msg = "FieldScope(" + scopeName + ")"; FieldScope scope = fieldScopes.get(scopeName); expectThatWithMessage(msg, diffMessage1).withPartialScope(scope).isNotEqualTo(message); expectThatWithMessage(msg, diffMessage2).withPartialScope(scope).isNotEqualTo(message); expectThatWithMessage(msg, diffMessage3).withPartialScope(scope).isNotEqualTo(message); expectThatWithMessage(msg, diffMessage4).withPartialScope(scope).isNotEqualTo(message); expectThatWithMessage(msg, eqMessage1).withPartialScope(scope).isEqualTo(message); expectThatWithMessage(msg, eqMessage2).withPartialScope(scope).isEqualTo(message); expectThatWithMessage(msg, eqMessage3).withPartialScope(scope).isEqualTo(message); expectThatWithMessage(msg, eqMessage4).withPartialScope(scope).isEqualTo(message); } }
@Override public OrganizedImports organizeImports(List<Import> imports) { OrganizedImports organized = new OrganizedImports(); // Group into static and non-static. Map<Boolean, List<Import>> partionedByStatic = imports.stream().collect(Collectors.partitioningBy(Import::isStatic)); for (Boolean key : order.groupOrder()) { organizePartition(organized, partionedByStatic.get(key)); } return organized; }
@Test public void staticFirstOrdering() { AndroidImportOrganizer organizer = new AndroidImportOrganizer(StaticOrder.STATIC_FIRST); ImportOrganizer.OrganizedImports organized = organizer.organizeImports(IMPORTS); assertThat(organized.asImportBlock()) .isEqualTo( "import static android.foo.bar;\n" + "\n" + "import static com.android.blah.blah;\n" + "\n" + "import static net.wilma.flintstone;\n" + "\n" + "import static unknown.fred.flintstone;\n" + "\n" + "import static java.ping.pong;\n" + "\n" + "import static javax.pong.ping;\n" + "\n" + "import android.foo;\n" + "\n" + "import com.android.blah;\n" + "\n" + "import net.wilma;\n" + "\n" + "import unknown.barney;\n" + "import unknown.fred;\n" + "\n" + "import java.ping;\n" + "\n" + "import javax.pong;\n"); }
@Override public boolean add(PipelineConfig pipelineConfig) { verifyUniqueName(pipelineConfig); PipelineConfigs part = this.getFirstEditablePartOrNull(); if (part == null) throw bomb("No editable configuration sources"); return part.add(pipelineConfig); }
@Test public void shouldAddPipelineAtIndex_WhenWouldLandInEditablePart() { PipelineConfig pipeline0 = PipelineConfigMother.pipelineConfig("pipeline0"); PipelineConfig pipeline1 = PipelineConfigMother.pipelineConfig("pipeline1"); PipelineConfig pipeline3 = PipelineConfigMother.pipelineConfig("pipeline3"); PipelineConfig pipeline5 = PipelineConfigMother.pipelineConfig("pipeline5"); PipelineConfig pipeline2 = PipelineConfigMother.pipelineConfig("pipeline2"); PipelineConfig pipeline4 = PipelineConfigMother.pipelineConfig("pipeline4"); BasicPipelineConfigs pipelineConfigsMiddle = new BasicPipelineConfigs(pipeline3); pipelineConfigsMiddle.setOrigin(new FileConfigOrigin()); BasicPipelineConfigs bottom = new BasicPipelineConfigs(pipeline0, pipeline1, pipeline2); BasicPipelineConfigs top = new BasicPipelineConfigs(pipeline4, pipeline5); bottom.setOrigin(new RepoConfigOrigin()); top.setOrigin(new RepoConfigOrigin()); PipelineConfigs group = new MergePipelineConfigs( bottom, pipelineConfigsMiddle, top); PipelineConfig p1 = PipelineConfigMother.pipelineConfig("pipelineToInsert"); group.add(3, p1); assertThat(group, hasItem(p1)); assertThat(pipelineConfigsMiddle, hasItem(p1)); }
@Override protected LinkedHashMap<String, Callable<? extends ChannelHandler>> getChildChannelHandlers(MessageInput input) { final LinkedHashMap<String, Callable<? extends ChannelHandler>> handlers = new LinkedHashMap<>(); final CodecAggregator aggregator = getAggregator(); handlers.put("channel-registration", () -> new ChannelRegistrationHandler(childChannels)); handlers.put("traffic-counter", () -> throughputCounter); handlers.put("connection-counter", () -> connectionCounter); if (tlsEnable) { LOG.info("Enabled TLS for input {}. key-file=\"{}\" cert-file=\"{}\"", input.toIdentifier(), tlsKeyFile, tlsCertFile); handlers.put("tls", getSslHandlerCallable(input)); } handlers.putAll(getCustomChildChannelHandlers(input)); if (aggregator != null) { LOG.debug("Adding codec aggregator {} to channel pipeline", aggregator); handlers.put("codec-aggregator", () -> new ByteBufMessageAggregationHandler(aggregator, localRegistry)); } handlers.put("rawmessage-handler", () -> new RawMessageHandler(input)); handlers.put("exception-logger", () -> new ExceptionLoggingChannelHandler(input, LOG, this.tcpKeepalive)); return handlers; }
@Test public void getChildChannelHandlersFailsIfTempDirIsNotWritable() throws IOException { final File tmpDir = temporaryFolder.newFolder(); assumeTrue(tmpDir.setWritable(false)); assumeFalse(tmpDir.canWrite()); System.setProperty("java.io.tmpdir", tmpDir.getAbsolutePath()); final Configuration configuration = new Configuration(ImmutableMap.of( "bind_address", "localhost", "port", 12345, "tls_enable", true) ); final AbstractTcpTransport transport = new AbstractTcpTransport( configuration, throughputCounter, localRegistry, eventLoopGroup, eventLoopGroupFactory, nettyTransportConfiguration, tlsConfiguration) {}; expectedException.expect(IllegalStateException.class); expectedException.expectMessage("Couldn't write to temporary directory: " + tmpDir.getAbsolutePath()); transport.getChildChannelHandlers(input); }
@Override public Map<String, String> generationCodes(Long tableId) { // 校验是否已经存在 CodegenTableDO table = codegenTableMapper.selectById(tableId); if (table == null) { throw exception(CODEGEN_TABLE_NOT_EXISTS); } List<CodegenColumnDO> columns = codegenColumnMapper.selectListByTableId(tableId); if (CollUtil.isEmpty(columns)) { throw exception(CODEGEN_COLUMN_NOT_EXISTS); } // 如果是主子表,则加载对应的子表信息 List<CodegenTableDO> subTables = null; List<List<CodegenColumnDO>> subColumnsList = null; if (CodegenTemplateTypeEnum.isMaster(table.getTemplateType())) { // 校验子表存在 subTables = codegenTableMapper.selectListByTemplateTypeAndMasterTableId( CodegenTemplateTypeEnum.SUB.getType(), tableId); if (CollUtil.isEmpty(subTables)) { throw exception(CODEGEN_MASTER_GENERATION_FAIL_NO_SUB_TABLE); } // 校验子表的关联字段存在 subColumnsList = new ArrayList<>(); for (CodegenTableDO subTable : subTables) { List<CodegenColumnDO> subColumns = codegenColumnMapper.selectListByTableId(subTable.getId()); if (CollUtil.findOne(subColumns, column -> column.getId().equals(subTable.getSubJoinColumnId())) == null) { throw exception(CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); } subColumnsList.add(subColumns); } } // 执行生成 return codegenEngine.execute(table, columns, subTables, subColumnsList); }
@Test public void testGenerationCodes_tableNotExists() { assertServiceException(() -> codegenService.generationCodes(randomLongId()), CODEGEN_TABLE_NOT_EXISTS); }
@Override public Set<String> getConfigNames() { return SOURCE.keySet(); }
@Test public void getConfigNames() { final Set<String> configNames = source.getConfigNames(); Assert.assertFalse(configNames.isEmpty()); }
public static Properties updateSplitSchema(Properties splitSchema, List<HiveColumnHandle> columns) { requireNonNull(splitSchema, "splitSchema is null"); requireNonNull(columns, "columns is null"); // clone split properties for update so as not to affect the original one Properties updatedSchema = new Properties(); updatedSchema.putAll(splitSchema); updatedSchema.setProperty(LIST_COLUMNS, buildColumns(columns)); updatedSchema.setProperty(LIST_COLUMN_TYPES, buildColumnTypes(columns)); ThriftTable thriftTable = parseThriftDdl(splitSchema.getProperty(SERIALIZATION_DDL)); updatedSchema.setProperty(SERIALIZATION_DDL, thriftTableToDdl(pruneThriftTable(thriftTable, columns))); return updatedSchema; }
@Test(expectedExceptions = NullPointerException.class) public void shouldThrowNullPointerExceptionWhenSchemaIsNull() { updateSplitSchema(null, ImmutableList.of()); }
public Optional<Measure> toMeasure(@Nullable MeasureDto measureDto, Metric metric) { requireNonNull(metric); if (measureDto == null) { return Optional.empty(); } Double value = measureDto.getValue(); String data = measureDto.getData(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(measureDto, value, data); case LONG: return toLongMeasure(measureDto, value, data); case DOUBLE: return toDoubleMeasure(measureDto, value, data); case BOOLEAN: return toBooleanMeasure(measureDto, value, data); case STRING: return toStringMeasure(measureDto, data); case LEVEL: return toLevelMeasure(measureDto, data); case NO_VALUE: return toNoValueMeasure(measureDto); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_returns_no_value_if_dto_has_no_value_for_Long_Metric() { Optional<Measure> measure = underTest.toMeasure(EMPTY_MEASURE_DTO, SOME_LONG_METRIC); assertThat(measure).isPresent(); assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.NO_VALUE); }
@Override public Mono<byte[]> readPublicKey() { return Mono.just(keyPair) .map(KeyPair::getPublic) .map(PublicKey::getEncoded); }
@Test void shouldReadPublicKey() throws IOException { var realPubKeyBytes = Files.readAllBytes(tempDir.resolve("pat_id_rsa.pub")); StepVerifier.create(service.readPublicKey()) .assertNext(bytes -> assertArrayEquals(realPubKeyBytes, bytes)) .verifyComplete(); }
public boolean canViewAndEditTemplate(CaseInsensitiveString username, List<Role> roles) { for (PipelineTemplateConfig templateConfig : this) { if (canUserEditTemplate(templateConfig, username, roles)) { return true; } } return false; }
@Test public void shouldReturnTrueIfUserCanViewAndEditAtLeastOneTemplate() throws Exception { CaseInsensitiveString templateAdmin = new CaseInsensitiveString("template-admin"); TemplatesConfig templates = configForUserWhoCanViewATemplate(); templates.add(PipelineTemplateConfigMother.createTemplate("template200", new Authorization(new AdminsConfig(new AdminUser(templateAdmin))), StageConfigMother.manualStage("stage-name"))); assertThat(templates.canViewAndEditTemplate(templateAdmin, null), is(true)); }
@Override public synchronized DefaultConnectClient get( final Optional<String> ksqlAuthHeader, final List<Entry<String, String>> incomingRequestHeaders, final Optional<KsqlPrincipal> userPrincipal ) { if (defaultConnectAuthHeader == null) { defaultConnectAuthHeader = buildDefaultAuthHeader(); } final Map<String, Object> configWithPrefixOverrides = ksqlConfig.valuesWithPrefixOverride(KsqlConfig.KSQL_CONNECT_PREFIX); return new DefaultConnectClient( ksqlConfig.getString(KsqlConfig.CONNECT_URL_PROPERTY), buildAuthHeader(ksqlAuthHeader, incomingRequestHeaders), requestHeadersExtension .map(extension -> extension.getHeaders(userPrincipal)) .orElse(Collections.emptyMap()), Optional.ofNullable(newSslContext(configWithPrefixOverrides)), shouldVerifySslHostname(configWithPrefixOverrides), ksqlConfig.getLong(KsqlConfig.CONNECT_REQUEST_TIMEOUT_MS) ); }
@Test public void shouldBuildWithoutAuthHeader() { // When: final DefaultConnectClient connectClient = connectClientFactory.get(Optional.empty(), Collections.emptyList(), Optional.empty()); // Then: assertThat(connectClient.getRequestHeaders(), is(EMPTY_HEADERS)); }
@Override public Object getDateValue(final ResultSet resultSet, final int columnIndex) throws SQLException { return resultSet.getDate(columnIndex); }
@Test void assertGetDateValue() throws SQLException { when(resultSet.getDate(1)).thenReturn(new Date(0L)); assertThat(dialectResultSetMapper.getDateValue(resultSet, 1), is(new Date(0L))); }
public FilterAggregationBuilder buildTopAggregation(String topAggregationName, TopAggregationDefinition<?> topAggregation, Consumer<BoolQueryBuilder> extraFilters, Consumer<FilterAggregationBuilder> subAggregations) { BoolQueryBuilder filter = filterComputer.getTopAggregationFilter(topAggregation) .orElseGet(QueryBuilders::boolQuery); // optionally add extra filter(s) extraFilters.accept(filter); FilterAggregationBuilder res = AggregationBuilders.filter(topAggregationName, filter); subAggregations.accept(res); checkState( !res.getSubAggregations().isEmpty(), "no sub-aggregation has been added to top-aggregation %s", topAggregationName); return res; }
@Test public void buildTopAggregation_adds_subAggregation_from_lambda_parameter() { SimpleFieldTopAggregationDefinition topAggregation = new SimpleFieldTopAggregationDefinition("bar", false); AggregationBuilder[] subAggs = IntStream.range(0, 1 + new Random().nextInt(12)) .mapToObj(i -> AggregationBuilders.min("subAgg_" + i)) .toArray(AggregationBuilder[]::new); String topAggregationName = randomAlphabetic(10); AggregationBuilder aggregationBuilder = underTest.buildTopAggregation(topAggregationName, topAggregation, NO_EXTRA_FILTER, t -> Arrays.stream(subAggs).forEach(t::subAggregation)); assertThat(aggregationBuilder.getName()).isEqualTo(topAggregationName); assertThat(aggregationBuilder.getSubAggregations()).hasSize(subAggs.length); assertThat(aggregationBuilder.getSubAggregations()).containsExactlyInAnyOrder(subAggs); }
boolean isCollection( BeanInjectionInfo.Property property ) { if ( property == null ) { // not sure if this is necessary return false; } BeanLevelInfo beanLevelInfo = getFinalPath( property ); return ( beanLevelInfo != null ) ? isCollection( beanLevelInfo ) : null; }
@Test public void isCollection_BeanLevelInfo() { BeanInjector bi = new BeanInjector(null ); BeanLevelInfo bli_list = new BeanLevelInfo(); bli_list.dim = BeanLevelInfo.DIMENSION.LIST; assertTrue( bi.isCollection( bli_list )); BeanLevelInfo bli_array = new BeanLevelInfo(); bli_array.dim = BeanLevelInfo.DIMENSION.ARRAY; assertTrue( bi.isCollection( bli_array )); BeanLevelInfo bli_none = new BeanLevelInfo(); bli_list.dim = BeanLevelInfo.DIMENSION.NONE; assertFalse( bi.isCollection( bli_none )); }
public void callTrack(JSONObject eventObject) { JSONObject jsonObject = new JSONObject(); try { jsonObject.put("eventJSON", eventObject); if ("$AppStart".equals(eventObject.optString("event"))) { if (mFunctionListener == null) { cacheData = jsonObject; new Handler(Looper.getMainLooper()).postDelayed(new Runnable() { @Override public void run() { cacheData = null; } }, 2 * 1000); return; } } call("trackEvent",jsonObject); } catch (JSONException e) { SALog.printStackTrace(e); } }
@Test public void callTrack() { SensorsDataAPI sensorsDataAPI = SAHelper.initSensors(mApplication); sensorsDataAPI.addFunctionListener(new SAFunctionListener() { @Override public void call(String function, JSONObject args) { Assert.assertEquals("trackEvent", function); } }); sensorsDataAPI.track("AppTest"); }
public void sendMessage(M message, MessageHeaders headers) { this.sendMessage(responseTopic, message, headers); }
@Test public void testMarshallingVerstrekkingAanAfnemer() throws IOException { VerstrekkingAanAfnemer verstrekkingAanAfnemer = new VerstrekkingAanAfnemer(); verstrekkingAanAfnemer.setDatumtijdstempelDigilevering(parseTime("2018-02-02T11:59:04.170+01:00")); verstrekkingAanAfnemer.setDatumtijdstempelLV(parseTime("2017-11-27T14:33:05.010+01:00")); verstrekkingAanAfnemer.setKenmerkDigilevering("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"); verstrekkingAanAfnemer.setKenmerkLV("SSSSSSSSSSSSSSSSSSSSSSSSSSSSS"); verstrekkingAanAfnemer.setVersieBerichttype("3.10"); GeversioneerdType abonnement = new GeversioneerdType(); abonnement.setNaam(""); abonnement.setVersie(""); verstrekkingAanAfnemer.setAbonnement(abonnement); verstrekkingAanAfnemer.setBasisregistratie("BRP"); GeversioneerdType gebeurtenissoort = new GeversioneerdType(); gebeurtenissoort.setNaam("Gv01"); gebeurtenissoort.setVersie("1.0"); verstrekkingAanAfnemer.setGebeurtenissoort(gebeurtenissoort); VerstrekkingInhoudType inhoud = new VerstrekkingInhoudType(); Gv01 gv01 = new Gv01(); gv01.setANummer("SSSSSSSSSS"); gv01.setRandomKey("00000000"); Container categorie08 = new Container(); categorie08.setNummer("08"); Element element081110 = new Element(); element081110.setNummer("PPPP"); element081110.setValue("PPPPPPPPPPPPPPPPPPPP"); categorie08.getElement().add(element081110); Element element081115 = new Element(); element081115.setNummer("PPPP"); element081115.setValue("PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP"); categorie08.getElement().add(element081115); Element element088410 = new Element(); element088410.setNummer("PPPP"); element088410.setValue(""); categorie08.getElement().add(element088410); gv01.getCategorie().add(categorie08); Container categorie58 = new Container(); categorie58.setNummer("58"); Element element581110 = new Element(); element581110.setNummer("PPPP"); element581110.setValue("PPPPPPPPPPPPPPPPPPPPP"); categorie58.getElement().add(element581110); Element element581115 = new Element(); element581115.setNummer("PPPP"); element581115.setValue("PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP"); categorie58.getElement().add(element581115); Element element588410 = new Element(); element588410.setNummer("PPPP"); element588410.setValue("O"); categorie58.getElement().add(element588410); gv01.getCategorie().add(categorie58); inhoud.setGv01(gv01); verstrekkingAanAfnemer.setGebeurtenisinhoud(inhoud); Path xmlFile = Paths.get("src","test","resources", "verstrekkingAanAfnemer.xml"); String xmlMessage = Files.readString(xmlFile); verstrekkingAanAfnemerSender.sendMessage(verstrekkingAanAfnemer, new MessageHeaders(new HashMap<>())); ArgumentCaptor<String> stringArgumentCaptor = ArgumentCaptor.forClass(String.class); Mockito.verify(jmsTemplateMock).convertAndSend(nullable(String.class), stringArgumentCaptor.capture(), any(MessagePostProcessor.class)); String sentXML = stringArgumentCaptor.getValue(); assertThat(sentXML, is(xmlMessage)); }
@GET @Path("/{connector}/tasks") @Operation(summary = "List all tasks and their configurations for the specified connector") public List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector) throws Throwable { FutureCallback<List<TaskInfo>> cb = new FutureCallback<>(); herder.taskConfigs(connector, cb); return requestHandler.completeRequest(cb); }
@Test public void testGetConnectorTaskConfigs() throws Throwable { final ArgumentCaptor<Callback<List<TaskInfo>>> cb = ArgumentCaptor.forClass(Callback.class); expectAndCallbackResult(cb, TASK_INFOS).when(herder).taskConfigs(eq(CONNECTOR_NAME), cb.capture()); List<TaskInfo> taskInfos = connectorsResource.getTaskConfigs(CONNECTOR_NAME); assertEquals(TASK_INFOS, taskInfos); }
public DropTypeCommand create(final DropType statement) { final String typeName = statement.getTypeName(); final boolean ifExists = statement.getIfExists(); if (!ifExists && !metaStore.resolveType(typeName).isPresent()) { throw new KsqlException("Type " + typeName + " does not exist."); } return new DropTypeCommand(typeName); }
@Test public void shouldCreateDropTypeForExistingTypeAndIfExistsSet() { // Given: final DropType dropType = new DropType(Optional.empty(), EXISTING_TYPE, true); // When: final DropTypeCommand cmd = factory.create(dropType); // Then: assertThat(cmd.getTypeName(), equalTo(EXISTING_TYPE)); }
static Method getGetter(final Class<?> clazz, final String propertyName) { final String getterName = "get" + Character.toUpperCase(propertyName.charAt(0)) + propertyName.substring(1); final String iserName = "is" + Character.toUpperCase(propertyName.charAt(0)) + propertyName.substring(1); try { return clazz.getMethod(getterName, NO_ARGS); } catch (NoSuchMethodException e) { // ignore for now - might be a boolean property } try { return clazz.getMethod(iserName, NO_ARGS); } catch (NoSuchMethodException e) { final String className = clazz.getName(); throw SarLogger.ROOT_LOGGER.propertyMethodNotFound("Get", propertyName, className); } }
@Test(expected = IllegalStateException.class) public void doNotFindGetterWithArgument() throws Exception { ReflectionUtils.getGetter(Foo.class, "c"); fail("Should have thrown exception - getC is not a getter"); }
@Override public int getOriginalPort() { try { return getOriginalPort(getContext(), getHeaders(), getPort()); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } }
@Test void getOriginalPort_respectsProxyProtocol() throws URISyntaxException { SessionContext context = new SessionContext(); context.set( CommonContextKeys.PROXY_PROTOCOL_DESTINATION_ADDRESS, new InetSocketAddress(InetAddresses.forString("1.1.1.1"), 443)); Headers headers = new Headers(); headers.add("X-Forwarded-Port", "6000"); assertEquals(443, HttpRequestMessageImpl.getOriginalPort(context, headers, 9999)); }
protected static Map<String, String> appendParameters( Map<String, String> parameters, Map<String, String> appendParameters) { if (parameters == null) { parameters = new HashMap<>(); } parameters.putAll(appendParameters); return parameters; }
@Test void appendParameter2() { Map<String, String> source = new HashMap<>(); source.put("default.num", "one1"); source.put("num", "ONE1"); Map<String, String> parameters = new HashMap<>(); parameters.put("default.num", "one"); parameters.put("num", "ONE"); source = AbstractBuilder.appendParameters(source, parameters); Assertions.assertTrue(source.containsKey("default.num")); Assertions.assertEquals("ONE", source.get("num")); }
@Override public Num calculate(BarSeries series, Position position) { if (position == null || position.getEntry() == null || position.getExit() == null) { return series.zero(); } Returns returns = new Returns(series, position, Returns.ReturnType.LOG); return calculateES(returns, confidence); }
@Test public void calculateWithBuyAndHold() { series = new MockBarSeries(numFunction, 100d, 99d); Position position = new Position(Trade.buyAt(0, series), Trade.sellAt(1, series)); AnalysisCriterion varCriterion = getCriterion(); assertNumEquals(numOf(Math.log(99d / 100)), varCriterion.calculate(series, position)); }
@SuppressWarnings("unchecked") public static <S, F> S visit(final SqlType type, final SqlTypeWalker.Visitor<S, F> visitor) { final BiFunction<SqlTypeWalker.Visitor<?, ?>, SqlType, Object> handler = HANDLER .get(type.baseType()); if (handler == null) { throw new UnsupportedOperationException("Unsupported schema type: " + type.baseType()); } return (S) handler.apply(visitor, type); }
@Test public void shouldVisitAll() { // Given: visitor = new Visitor<String, Integer>() { @Override public String visitType(final SqlType type) { return "Expected"; } }; allTypes().forEach(type -> { // When: final String result = SqlTypeWalker.visit(type, visitor); // Then: assertThat(result, is("Expected")); }); }
@Override public void handleRequest(RestRequest request, RequestContext requestContext, Callback<RestResponse> callback) { //This code path cannot accept content types or accept types that contain //multipart/related. This is because these types of requests will usually have very large payloads and therefore //would degrade server performance since RestRequest reads everything into memory. if (!isMultipart(request, requestContext, callback)) { _restRestLiServer.handleRequest(request, requestContext, callback); } }
@Test(dataProvider = TestConstants.RESTLI_PROTOCOL_1_2_PREFIX + "protocolVersions") public void testPostProcessingException(final ProtocolVersion protocolVersion, final String errorResponseHeaderName, final RestOrStream restOrStream) throws Exception { //request for nested projection within string field will generate error final StatusCollectionResource statusResource = getMockResource(StatusCollectionResource.class); EasyMock.expect(statusResource.get(eq(1L))).andReturn(buildStatusRecord()).once(); replay(statusResource); Callback<RestResponse> restResponseCallback = new Callback<RestResponse>() { @Override public void onSuccess(RestResponse restResponse) { fail(); } @Override public void onError(Throwable e) { assertTrue(e instanceof RestException); RestException restException = (RestException) e; RestResponse restResponse = restException.getResponse(); try { assertEquals(restResponse.getStatus(), 500); assertTrue(restResponse.getEntity().length() > 0); assertEquals(restResponse.getHeader(errorResponseHeaderName), RestConstants.HEADER_VALUE_ERROR); EasyMock.verify(statusResource); EasyMock.reset(statusResource); } catch (Exception e2) { fail(e2.toString()); } } }; if (restOrStream == RestOrStream.REST) { RestRequest request = new RestRequestBuilder(new URI("/statuses/1?fields=text:(invalid)")) .setHeader(RestConstants.HEADER_RESTLI_PROTOCOL_VERSION, protocolVersion.toString()).build(); _server.handleRequest(request, new RequestContext(), restResponseCallback); } else { StreamRequest streamRequest = new StreamRequestBuilder(new URI("/statuses/1?fields=text:(invalid)")) .setHeader(RestConstants.HEADER_RESTLI_PROTOCOL_VERSION, protocolVersion.toString()) .build(EntityStreams.emptyStream()); Callback<StreamResponse> callback = new Callback<StreamResponse>() { @Override public void onSuccess(StreamResponse streamResponse) { fail(); } @Override public void onError(Throwable e) { Messages.toRestException((StreamException) e, new Callback<RestException>() { @Override public void onError(Throwable e) { Assert.fail(); } @Override public void onSuccess(RestException result) { restResponseCallback.onError(result); } }); } }; _server.handleRequest(streamRequest, new RequestContext(), callback); } }
public static FieldScope fromSetFields(Message message) { return fromSetFields( message, AnyUtils.defaultTypeRegistry(), AnyUtils.defaultExtensionRegistry()); }
@Test public void testFromSetFields() { Message scopeMessage = parse( "o_int: 1 r_string: \"x\" o_test_message: { o_int: 1 } " + "r_test_message: { r_string: \"x\" } r_test_message: { o_int: 1 } " + "o_sub_test_message: { o_test_message: { o_int: 1 } }"); // 1 = compared, [2, 3] = ignored, 4 = compared and fails Message message = parse( "o_int: 1 r_string: \"1\" o_test_message: {o_int: 1 r_string: \"2\" } " + "r_test_message: { o_int: 1 r_string: \"1\" } " + "r_test_message: { o_int: 1 r_string: \"1\" } " + "o_sub_test_message: { o_int: 2 o_test_message: { o_int: 1 r_string: \"2\" } }"); Message diffMessage = parse( "o_int: 4 r_string: \"4\" o_test_message: {o_int: 4 r_string: \"3\" } " + "r_test_message: { o_int: 4 r_string: \"4\" } " + "r_test_message: { o_int: 4 r_string: \"4\" }" + "o_sub_test_message: { r_string: \"3\" o_int: 3 " + "o_test_message: { o_int: 4 r_string: \"3\" } }"); Message eqMessage = parse( "o_int: 1 r_string: \"1\" o_test_message: {o_int: 1 r_string: \"3\" } " + "r_test_message: { o_int: 1 r_string: \"1\" } " + "r_test_message: { o_int: 1 r_string: \"1\" }" + "o_sub_test_message: { o_int: 3 o_test_message: { o_int: 1 r_string: \"3\" } }"); expectThat(diffMessage).isNotEqualTo(message); expectThat(eqMessage).isNotEqualTo(message); expectThat(diffMessage) .withPartialScope(FieldScopes.fromSetFields(scopeMessage)) .isNotEqualTo(message); expectThat(eqMessage) .withPartialScope(FieldScopes.fromSetFields(scopeMessage)) .isEqualTo(message); expectFailureWhenTesting().that(diffMessage).isEqualTo(message); expectIsEqualToFailed(); expectThatFailure().hasMessageThat().contains("1 -> 4"); expectThatFailure().hasMessageThat().contains("\"1\" -> \"4\""); expectThatFailure().hasMessageThat().contains("2 -> 3"); expectThatFailure().hasMessageThat().contains("\"2\" -> \"3\""); expectFailureWhenTesting() .that(diffMessage) .withPartialScope(FieldScopes.fromSetFields(scopeMessage)) .isEqualTo(message); expectIsEqualToFailed(); expectThatFailure().hasMessageThat().contains("1 -> 4"); expectThatFailure().hasMessageThat().contains("\"1\" -> \"4\""); expectThatFailure().hasMessageThat().doesNotContain("2 -> 3"); expectThatFailure().hasMessageThat().doesNotContain("\"2\" -> \"3\""); expectFailureWhenTesting() .that(eqMessage) .withPartialScope(FieldScopes.fromSetFields(scopeMessage)) .isNotEqualTo(message); expectIsNotEqualToFailed(); expectThatFailure().hasMessageThat().contains("ignored: o_test_message.r_string"); expectThatFailure().hasMessageThat().contains("ignored: o_sub_test_message.o_int"); expectThatFailure() .hasMessageThat() .contains("ignored: o_sub_test_message.o_test_message.r_string"); }
@Override public Thread newThread(Runnable r) { String threadName = name + id.getAndIncrement(); Thread thread = new Thread(r, threadName); thread.setDaemon(true); return thread; }
@Test void test() { NameThreadFactory threadFactory = new NameThreadFactory("test"); Thread t1 = threadFactory.newThread(() -> { }); Thread t2 = threadFactory.newThread(() -> { }); assertEquals("test.0", t1.getName()); assertEquals("test.1", t2.getName()); }
public static String getName(Object obj) { Objects.requireNonNull(obj, "obj"); return obj.getClass().getName(); }
@Test void testGetName() { final String name = "java.lang.Integer"; Integer val = 1; assertEquals(name, ClassUtils.getName(val)); assertEquals(name, ClassUtils.getName(Integer.class)); assertEquals(name, ClassUtils.getCanonicalName(val)); assertEquals(name, ClassUtils.getCanonicalName(Integer.class)); assertEquals("Integer", ClassUtils.getSimplaName(val)); assertEquals("Integer", ClassUtils.getSimplaName(Integer.class)); }
public static List<Group> enumerateFrom(Group root) { List<Group> leaves = new ArrayList<>(); visitNode(root, leaves); return leaves; }
@Test void rootGroupCountedAsLeafWhenNoChildren() { Group g = new Group(0, "donkeykong"); List<Group> leaves = LeafGroups.enumerateFrom(g); assertThat(leaves.size(), is(1)); assertThat(leaves.get(0).getName(), is("donkeykong")); }
public int getPartition(K key, V value, int numReduceTasks) { int h = SEED ^ key.hashCode(); h ^= (h >>> 20) ^ (h >>> 12); h = h ^ (h >>> 7) ^ (h >>> 4); return (h & Integer.MAX_VALUE) % numReduceTasks; }
@Test public void testPatterns() { int results[] = new int[PARTITIONS]; RehashPartitioner <IntWritable, NullWritable> p = new RehashPartitioner < IntWritable, NullWritable> (); /* test sequence 4, 8, 12, ... 128 */ for(int i = 0; i < END; i+= STEP) { results[p.getPartition(new IntWritable(i), null, PARTITIONS)]++; } int badbuckets = 0; Integer min = Collections.min(Arrays.asList(ArrayUtils.toObject(results))); Integer max = Collections.max(Arrays.asList(ArrayUtils.toObject(results))); Integer avg = (int) Math.round((max+min)/2.0); System.out.println("Dumping buckets distribution: min="+min+" avg="+avg+" max="+max); for (int i = 0; i < PARTITIONS; i++) { double var = (results[i]-avg)/(double)(avg); System.out.println("bucket "+i+" "+results[i]+" items, variance "+var); if (Math.abs(var) > MAX_ERROR) badbuckets++; } System.out.println(badbuckets + " of "+PARTITIONS+" are too small or large buckets"); assertTrue("too many overflow buckets", badbuckets < PARTITIONS * MAX_BADBUCKETS); }
@VisibleForTesting Object evaluate(final GenericRow row) { return term.getValue(new TermEvaluationContext(row)); }
@Test public void shouldEvaluateCastToString() { // Given: final Expression cast1 = new Cast( new IntegerLiteral(10), new Type(SqlPrimitiveType.of("STRING")) ); final Expression cast2 = new Cast( new LongLiteral(1234L), new Type(SqlPrimitiveType.of("STRING")) ); final Expression cast3 = new Cast( new DoubleLiteral(12.5), new Type(SqlPrimitiveType.of("STRING")) ); final Expression cast4 = new Cast( new DecimalLiteral(new BigDecimal("4567.5")), new Type(SqlPrimitiveType.of("STRING")) ); // When: InterpretedExpression interpreter1 = interpreter(cast1); InterpretedExpression interpreter2 = interpreter(cast2); InterpretedExpression interpreter3 = interpreter(cast3); InterpretedExpression interpreter4 = interpreter(cast4); // Then: assertThat(interpreter1.evaluate(ROW), is("10")); assertThat(interpreter2.evaluate(ROW), is("1234")); assertThat(interpreter3.evaluate(ROW), is("12.5")); assertThat(interpreter4.evaluate(ROW), is("4567.5")); }
@Override public List<String> splitAndEvaluate() { try (ReflectContext context = new ReflectContext(JAVA_CLASSPATH)) { if (Strings.isNullOrEmpty(inlineExpression)) { return Collections.emptyList(); } return flatten(evaluate(context, GroovyUtils.split(handlePlaceHolder(inlineExpression)))); } }
@Test void assertEvaluateForComplex() { List<String> expected = createInlineExpressionParser("t_${['new','old']}_order_${1..2}, t_config").splitAndEvaluate(); assertThat(expected.size(), is(5)); assertThat(expected, hasItems("t_new_order_1", "t_new_order_2", "t_old_order_1", "t_old_order_2", "t_config")); }
public static Serializable decode(final ByteBuf byteBuf) { int valueType = byteBuf.readUnsignedByte() & 0xff; StringBuilder result = new StringBuilder(); decodeValue(valueType, 1, byteBuf, result); return result.toString(); }
@Test void assertDecodeSmallJsonObjectWithUInt16() { List<JsonEntry> jsonEntries = new LinkedList<>(); jsonEntries.add(new JsonEntry(JsonValueTypes.UINT16, "key1", 0x00007fff)); jsonEntries.add(new JsonEntry(JsonValueTypes.UINT16, "key2", 0x00008000)); ByteBuf payload = mockJsonObjectByteBuf(jsonEntries, true); String actual = (String) MySQLJsonValueDecoder.decode(payload); assertThat(actual, is("{\"key1\":32767,\"key2\":32768}")); }
@Override public MapperResult findConfigInfoAggrByPageFetchRows(MapperContext context) { final Integer startRow = context.getStartRow(); final Integer pageSize = context.getPageSize(); final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID); final String groupId = (String) context.getWhereParameter(FieldConstant.GROUP_ID); final String tenantId = (String) context.getWhereParameter(FieldConstant.TENANT_ID); String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND " + "group_id=? AND tenant_id=? ORDER BY datum_id OFFSET " + startRow + " ROWS FETCH NEXT " + pageSize + " ROWS ONLY"; List<Object> paramList = CollectionUtils.list(dataId, groupId, tenantId); return new MapperResult(sql, paramList); }
@Test void testFindConfigInfoAggrByPageFetchRows() { String dataId = "data-id"; String groupId = "group-id"; String tenantId = "tenant-id"; Integer startRow = 0; Integer pageSize = 5; MapperContext context = new MapperContext(); context.putWhereParameter(FieldConstant.DATA_ID, dataId); context.putWhereParameter(FieldConstant.GROUP_ID, groupId); context.putWhereParameter(FieldConstant.TENANT_ID, tenantId); context.setStartRow(startRow); context.setPageSize(pageSize); MapperResult mapperResult = configInfoAggrMapperByDerby.findConfigInfoAggrByPageFetchRows(context); String sql = mapperResult.getSql(); List<Object> paramList = mapperResult.getParamList(); assertEquals(sql, "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE " + "data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id OFFSET 0 ROWS FETCH NEXT 5 ROWS ONLY"); assertEquals(paramList, CollectionUtils.list(dataId, groupId, tenantId)); }
@Override public Token redeem(@NonNull String code, String redirectUri, String clientId) { var redeemed = codeRepo.remove(code).orElse(null); if (redeemed == null) { return null; } if (!validateCode(redeemed, redirectUri, clientId)) { return null; } var accessTokenTtl = Duration.ofMinutes(5); return new Token( issueAccessToken(accessTokenTtl, redeemed.clientId()), issueIdToken(redeemed.clientId(), redeemed.nonce(), redeemed.federatedIdToken()), accessTokenTtl.getSeconds()); }
@Test void redeem_idToken() throws JOSEException, ParseException { var issuer = URI.create("https://idp.example.com"); var k = genKey(); var keyStore = mock(KeyStore.class); when(keyStore.signingKey()).thenReturn(k); var codeRepo = mock(CodeRepo.class); var sut = new TokenIssuerImpl(issuer, keyStore, codeRepo); var id = UUID.randomUUID().toString(); var nonce = UUID.randomUUID().toString(); var redirectUri = URI.create("https://myapp.example.com/callback"); var clientId = "myapp"; var federatedIdToken = new IdTokenJWS( null, new IdToken( null, "tobias", null, 0, 0, 0, null, null, null, null, null, null, null, null, null, null, null, null, null)); var code = new Code( id, null, Instant.now().plusSeconds(10), redirectUri, nonce, clientId, federatedIdToken); when(codeRepo.remove(id)).thenReturn(Optional.of(code)); // when var token = sut.redeem(id, redirectUri.toString(), clientId); // then var idToken = token.idToken(); var jws = JWSObject.parse(idToken); var verifier = new ECDSAVerifier(k); jws.verify(verifier); assertIdTokenClaims(jws, nonce, issuer, clientId); }
@ScalarOperator(SUBTRACT) @SqlType(StandardTypes.DOUBLE) public static double subtract(@SqlType(StandardTypes.DOUBLE) double left, @SqlType(StandardTypes.DOUBLE) double right) { return left - right; }
@Test public void testSubtract() { assertFunction("37.7E0 - 37.7E0", DOUBLE, 37.7 - 37.7); assertFunction("37.7E0 - 17.1E0", DOUBLE, 37.7 - 17.1); assertFunction("17.1E0 - 37.7E0", DOUBLE, 17.1 - 37.7); assertFunction("17.1E0 - 17.1E0", DOUBLE, 17.1 - 17.1); }
static void dissectCatalogResize( final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int absoluteOffset = offset; absoluteOffset += dissectLogHeader(CONTEXT, CATALOG_RESIZE, buffer, absoluteOffset, builder); final int maxEntries = buffer.getInt(absoluteOffset, LITTLE_ENDIAN); absoluteOffset += SIZE_OF_INT; final long catalogLength = buffer.getLong(absoluteOffset, LITTLE_ENDIAN); absoluteOffset += SIZE_OF_LONG; final int newMaxEntries = buffer.getInt(absoluteOffset, LITTLE_ENDIAN); absoluteOffset += SIZE_OF_INT; final long newCatalogLength = buffer.getLong(absoluteOffset, LITTLE_ENDIAN); builder.append(": ").append(maxEntries); builder.append(" entries (").append(catalogLength).append(" bytes)"); builder.append(" => ").append(newMaxEntries); builder.append(" entries (").append(newCatalogLength).append(" bytes)"); }
@Test void catalogResize() { internalEncodeLogHeader(buffer, 0, 6, 100, () -> 5_600_000_000L); buffer.putInt(LOG_HEADER_LENGTH, 24, LITTLE_ENDIAN); buffer.putLong(LOG_HEADER_LENGTH + SIZE_OF_INT, 100, LITTLE_ENDIAN); buffer.putInt(LOG_HEADER_LENGTH + SIZE_OF_INT + SIZE_OF_LONG, 777, LITTLE_ENDIAN); buffer.putLong(LOG_HEADER_LENGTH + SIZE_OF_INT * 2 + SIZE_OF_LONG, 10_000_000_000L, LITTLE_ENDIAN); dissectCatalogResize(buffer, 0, builder); assertEquals("[5.600000000] " + CONTEXT + ": " + CATALOG_RESIZE.name() + " [6/100]:" + " 24 entries (100 bytes) => 777 entries (10000000000 bytes)", builder.toString()); }
@Override public Num calculate(BarSeries series, Position position) { return getTradeCost(series, position, series.numOf(initialAmount)); }
@Test public void fixedCost() { MockBarSeries series = new MockBarSeries(numFunction, 100, 105, 110, 100, 95, 105); TradingRecord tradingRecord = new BaseTradingRecord(); Num criterion; tradingRecord.operate(0); tradingRecord.operate(1); criterion = getCriterion(1000d, 0d, 1.3d).calculate(series, tradingRecord); assertNumEquals(2.6d, criterion); tradingRecord.operate(2); tradingRecord.operate(3); criterion = getCriterion(1000d, 0d, 1.3d).calculate(series, tradingRecord); assertNumEquals(5.2d, criterion); tradingRecord.operate(0); criterion = getCriterion(1000d, 0d, 1.3d).calculate(series, tradingRecord); assertNumEquals(6.5d, criterion); }
public List<Path> getBaseDirs() { return this.baseDirs; }
@Test public void testGetBaseDirs() throws Exception { assertEquals(1, deletionTask.getBaseDirs().size()); assertEquals(baseDirs, deletionTask.getBaseDirs()); }
List<String> decorateTextWithHtml(String text, DecorationDataHolder decorationDataHolder) { return decorateTextWithHtml(text, decorationDataHolder, null, null); }
@Test public void should_allow_multiple_levels_highlighting() { String javaDocSample = "/**" + LF_END_OF_LINE + " * Creates a FormulaDecorator" + LF_END_OF_LINE + " *" + LF_END_OF_LINE + " * @param metric the metric should have an associated formula" + LF_END_OF_LINE + " * " + LF_END_OF_LINE + " * @throws IllegalArgumentException if no formula is associated to the metric" + LF_END_OF_LINE + " */" + LF_END_OF_LINE; DecorationDataHolder decorationData = new DecorationDataHolder(); decorationData.loadSyntaxHighlightingData("0,184,cppd;47,53,k;"); HtmlTextDecorator htmlTextDecorator = new HtmlTextDecorator(); List<String> htmlOutput = htmlTextDecorator.decorateTextWithHtml(javaDocSample, decorationData); assertThat(htmlOutput).containsExactly( "<span class=\"cppd\">/**</span>", "<span class=\"cppd\"> * Creates a FormulaDecorator</span>", "<span class=\"cppd\"> *</span>", "<span class=\"cppd\"> * @param <span class=\"k\">metric</span> the metric should have an associated formula</span>", "<span class=\"cppd\"> * </span>", "<span class=\"cppd\"> * @throws IllegalArgumentException if no formula is associated to the metric</span>", "<span class=\"cppd\"> */</span>", "" ); }
public String getWorkflowIdentity() { RestartConfig.RestartNode node = getCurrentNode(restartConfig); return "[" + node.getWorkflowId() + "][" + node.getInstanceId() + "]"; }
@Test public void testGetWorkflowIdentity() { RestartConfig config = RestartConfig.builder().addRestartNode("foo", 1, "bar").build(); RunRequest runRequest = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.RESTART_FROM_INCOMPLETE) .restartConfig(config) .build(); Assert.assertEquals("[foo][1]", runRequest.getWorkflowIdentity()); Assert.assertEquals("bar", runRequest.getRestartStepId()); }
@Override public SmsSendRespDTO sendSms(Long sendLogId, String mobile, String apiTemplateId, List<KeyValue<String, Object>> templateParams) throws Throwable { // 构建请求 SendSmsRequest request = new SendSmsRequest(); request.setSmsSdkAppId(getSdkAppId()); request.setPhoneNumberSet(new String[]{mobile}); request.setSignName(properties.getSignature()); request.setTemplateId(apiTemplateId); request.setTemplateParamSet(ArrayUtils.toArray(templateParams, e -> String.valueOf(e.getValue()))); request.setSessionContext(JsonUtils.toJsonString(new SessionContext().setLogId(sendLogId))); // 执行请求 SendSmsResponse response = client.SendSms(request); SendStatus status = response.getSendStatusSet()[0]; return new SmsSendRespDTO().setSuccess(Objects.equals(status.getCode(), API_CODE_SUCCESS)).setSerialNo(status.getSerialNo()) .setApiRequestId(response.getRequestId()).setApiCode(status.getCode()).setApiMsg(status.getMessage()); }
@Test public void testDoSendSms_success() throws Throwable { // 准备参数 Long sendLogId = randomLongId(); String mobile = randomString(); String apiTemplateId = randomString(); List<KeyValue<String, Object>> templateParams = Lists.newArrayList( new KeyValue<>("1", 1234), new KeyValue<>("2", "login")); String requestId = randomString(); String serialNo = randomString(); // mock 方法 SendSmsResponse response = randomPojo(SendSmsResponse.class, o -> { o.setRequestId(requestId); SendStatus[] sendStatuses = new SendStatus[1]; o.setSendStatusSet(sendStatuses); SendStatus sendStatus = new SendStatus(); sendStatuses[0] = sendStatus; sendStatus.setCode(TencentSmsClient.API_CODE_SUCCESS); sendStatus.setMessage("send success"); sendStatus.setSerialNo(serialNo); }); when(client.SendSms(argThat(request -> { assertEquals(mobile, request.getPhoneNumberSet()[0]); assertEquals(properties.getSignature(), request.getSignName()); assertEquals(apiTemplateId, request.getTemplateId()); assertEquals(toJsonString(ArrayUtils.toArray(new ArrayList<>(MapUtils.convertMap(templateParams).values()), String::valueOf)), toJsonString(request.getTemplateParamSet())); assertEquals(sendLogId, ReflectUtil.getFieldValue(JsonUtils.parseObject(request.getSessionContext(), TencentSmsClient.SessionContext.class), "logId")); return true; }))).thenReturn(response); // 调用 SmsSendRespDTO result = smsClient.sendSms(sendLogId, mobile, apiTemplateId, templateParams); // 断言 assertTrue(result.getSuccess()); assertEquals(response.getRequestId(), result.getApiRequestId()); assertEquals(response.getSendStatusSet()[0].getCode(), result.getApiCode()); assertEquals(response.getSendStatusSet()[0].getMessage(), result.getApiMsg()); assertEquals(response.getSendStatusSet()[0].getSerialNo(), result.getSerialNo()); }
public State currentState() { return lastState.get().state; }
@Test public void currentState() { for (State state : State.values()) { tracker.changeState(state, time.milliseconds()); assertEquals(state, tracker.currentState()); } }
public Exporter getCompatibleExporter(TransferExtension extension, DataVertical jobType) { Exporter<?, ?> exporter = getExporterOrNull(extension, jobType); if (exporter != null) { return exporter; } switch (jobType) { case MEDIA: exporter = getMediaExporter(extension); break; case PHOTOS: exporter = getPhotosExporter(extension); break; case VIDEOS: exporter = getVideosExporter(extension); break; } if (exporter == null) { return extension.getExporter(jobType); // preserve original exception } return exporter; }
@Test public void shouldConstructMediaExporterFromPhotoAndVideo() { TransferExtension ext = mock(TransferExtension.class); when(ext.getExporter(eq(PHOTOS))).thenReturn(mock(Exporter.class)); when(ext.getExporter(eq(VIDEOS))).thenReturn(mock(Exporter.class)); when(ext.getExporter(eq(MEDIA))).thenReturn(null); Exporter<?, ?> exp = compatibilityProvider.getCompatibleExporter(ext, MEDIA); assertThat(exp).isInstanceOf(MediaExporterDecorator.class); }
@Override public ExecuteResult execute(final ServiceContext serviceContext, final ConfiguredKsqlPlan plan, final boolean restoreInProgress) { try { final ExecuteResult result = EngineExecutor .create(primaryContext, serviceContext, plan.getConfig()) .execute(plan.getPlan(), restoreInProgress); return result; } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { // add the statement text to the KsqlException throw new KsqlStatementException( e.getMessage(), e.getMessage(), plan.getPlan().getStatementText(), e.getCause() ); } }
@Test public void shouldThrowOnTerminateAsNotExecutable() { // Given: setupKsqlEngineWithSharedRuntimeEnabled(); // When: final PersistentQueryMetadata query = (PersistentQueryMetadata) KsqlEngineTestUtil .execute( serviceContext, ksqlEngine, "create table bar as select * from test2;", ksqlConfig, Collections.emptyMap()) .get(0); final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> KsqlEngineTestUtil.execute( serviceContext, ksqlEngine, "TERMINATE " + query.getQueryId() + ";", ksqlConfig, Collections.emptyMap() ) ); // Then: assertThat(e, rawMessage(containsString( "Statement not executable"))); assertThat(e, statementText(is( "TERMINATE CTAS_BAR_0;"))); }
public ZipWriter add(boolean withSrcDir, FileFilter filter, File... files) throws IORuntimeException { for (File file : files) { // 如果只是压缩一个文件,则需要截取该文件的父目录 String srcRootDir; try { srcRootDir = file.getCanonicalPath(); if ((false == file.isDirectory()) || withSrcDir) { // 若是文件,则将父目录完整路径都截取掉;若设置包含目录,则将上级目录全部截取掉,保留本目录名 srcRootDir = file.getCanonicalFile().getParentFile().getCanonicalPath(); } } catch (IOException e) { throw new IORuntimeException(e); } _add(file, srcRootDir, filter); } return this; }
@Test @Disabled public void addTest(){ final ZipWriter writer = ZipWriter.of(FileUtil.file("d:/test/test.zip"), CharsetUtil.CHARSET_UTF_8); writer.add(new FileResource("d:/test/qr_c.png")); writer.close(); }
RegistryEndpointProvider<Optional<URL>> initializer() { return new Initializer(); }
@Test public void testInitializer_handleResponse_accepted() throws IOException, RegistryException { Mockito.when(mockResponse.getStatusCode()).thenReturn(202); // Accepted Mockito.when(mockResponse.getHeader("Location")) .thenReturn(Collections.singletonList("location")); GenericUrl requestUrl = new GenericUrl("https://someurl"); Mockito.when(mockResponse.getRequestUrl()).thenReturn(requestUrl); Assert.assertEquals( new URL("https://someurl/location"), testBlobPusher.initializer().handleResponse(mockResponse).get()); }
@Override public void truncate() { truncateToEntries(0); }
@Test public void truncate() throws IOException { try (OffsetIndex idx = new OffsetIndex(nonExistentTempFile(), 0L, 10 * 8)) { idx.truncate(); IntStream.range(1, 10).forEach(i -> idx.append(i, i)); // now check the last offset after various truncate points and validate that we can still append to the index. idx.truncateTo(12); assertEquals(new OffsetPosition(9, 9), idx.lookup(10), "Index should be unchanged by truncate past the end"); assertEquals(9, idx.lastOffset(), "9 should be the last entry in the index"); idx.append(10, 10); idx.truncateTo(10); assertEquals(new OffsetPosition(9, 9), idx.lookup(10), "Index should be unchanged by truncate at the end"); assertEquals(9, idx.lastOffset(), "9 should be the last entry in the index"); idx.append(10, 10); idx.truncateTo(9); assertEquals(new OffsetPosition(8, 8), idx.lookup(10), "Index should truncate off last entry"); assertEquals(8, idx.lastOffset(), "8 should be the last entry in the index"); idx.append(9, 9); idx.truncateTo(5); assertEquals(new OffsetPosition(4, 4), idx.lookup(10), "4 should be the last entry in the index"); assertEquals(4, idx.lastOffset(), "4 should be the last entry in the index"); idx.append(5, 5); idx.truncate(); assertEquals(0, idx.entries(), "Full truncation should leave no entries"); } }
public JRTConnectionPool updateSources(List<String> addresses) { ConfigSourceSet newSources = new ConfigSourceSet(addresses); return updateSources(newSources); }
@Test public void updateSources() { ConfigSourceSet twoSources = new ConfigSourceSet(List.of("host0", "host1")); JRTConnectionPool sourcePool = new JRTConnectionPool(twoSources); ConfigSourceSet sourcesBefore = sourcePool.getSourceSet(); // Update to the same set, should be equal sourcePool.updateSources(twoSources); assertEquals(sourcePool.getSourceSet(), sourcesBefore); // Update to new set List<String> newSources = new ArrayList<>(); newSources.add("host2"); newSources.add("host3"); sourcePool.updateSources(newSources); ConfigSourceSet newSourceSet = sourcePool.getSourceSet(); assertNotNull(newSourceSet); assertEquals(2, newSourceSet.getSources().size()); assertNotEquals(sourcesBefore, newSourceSet); assertTrue(newSourceSet.getSources().contains("host2")); assertTrue(newSourceSet.getSources().contains("host3")); // Update to new set with just one host List<String> newSources2 = new ArrayList<>(); newSources2.add("host4"); sourcePool.updateSources(newSources2); ConfigSourceSet newSourceSet2 = sourcePool.getSourceSet(); assertNotNull(newSourceSet2); assertEquals(1, newSourceSet2.getSources().size()); assertNotEquals(newSourceSet, newSourceSet2); assertTrue(newSourceSet2.getSources().contains("host4")); sourcePool.close(); }
public CeTaskMessageDto setMessage(String message) { checkArgument(message != null && !message.isEmpty(), "message can't be null nor empty"); this.message = abbreviate(message, MAX_MESSAGE_SIZE); return this; }
@Test void setMessage_fails_with_IAE_if_argument_is_null() { assertThatThrownBy(() -> underTest.setMessage(null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("message can't be null nor empty"); }
public static PostgreSQLErrorResponsePacket newInstance(final Exception cause) { Optional<ServerErrorMessage> serverErrorMessage = findServerErrorMessage(cause); return serverErrorMessage.map(PostgreSQLErrorPacketFactory::createErrorResponsePacket) .orElseGet(() -> createErrorResponsePacket(SQLExceptionTransformEngine.toSQLException(cause, DATABASE_TYPE))); }
@Test void assertPSQLExceptionWithServerErrorMessageIsNull() throws ReflectiveOperationException { PostgreSQLErrorResponsePacket actual = PostgreSQLErrorPacketFactory.newInstance(new PSQLException("psqlEx", PSQLState.UNEXPECTED_ERROR, new Exception("test"))); Map<Character, String> fields = (Map<Character, String>) Plugins.getMemberAccessor().get(PostgreSQLErrorResponsePacket.class.getDeclaredField("fields"), actual); assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_CODE), is(PSQLState.UNEXPECTED_ERROR.getState())); assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_MESSAGE), is("psqlEx")); }
@ShellMethod(key = "show restore", value = "Show details of a restore instant") public String showRestore( @ShellOption(value = {"--instant"}, help = "Restore instant") String restoreInstant, @ShellOption(value = {"--limit"}, help = "Limit #rows to be displayed", defaultValue = "10") Integer limit, @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, @ShellOption(value = {"--headeronly"}, help = "Print Header Only", defaultValue = "false") final boolean headerOnly) { HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); List<HoodieInstant> matchingInstants = activeTimeline.filterCompletedInstants().filter(completed -> completed.getTimestamp().equals(restoreInstant)).getInstants(); if (matchingInstants.isEmpty()) { matchingInstants = activeTimeline.filterInflights().filter(inflight -> inflight.getTimestamp().equals(restoreInstant)).getInstants(); } // Assuming a single exact match is found in either completed or inflight instants HoodieInstant instant = matchingInstants.get(0); List<Comparable[]> outputRows = new ArrayList<>(); populateOutputFromRestoreInstant(instant, outputRows, activeTimeline); TableHeader header = createResultHeader(); return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, outputRows); }
@Test public void testShowRestore() throws IOException { // get instant HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); Stream<HoodieInstant> restores = activeTimeline.getRestoreTimeline().filterCompletedInstants().getInstantsAsStream(); HoodieInstant instant = restores.findFirst().orElse(null); assertNotNull(instant, "The instant can not be null."); Object result = shell.evaluate(() -> "show restore --instant " + instant.getTimestamp()); assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // get metadata of instant HoodieRestoreMetadata instantMetadata = TimelineMetadataUtils.deserializeAvroMetadata( activeTimeline.getInstantDetails(instant).get(), HoodieRestoreMetadata.class); // generate expected result TableHeader header = new TableHeader() .addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT) .addTableHeaderField(HoodieTableHeaderFields.HEADER_RESTORE_INSTANT) .addTableHeaderField(HoodieTableHeaderFields.HEADER_TIME_TOKEN_MILLIS) .addTableHeaderField(HoodieTableHeaderFields.HEADER_RESTORE_STATE); List<Comparable[]> rows = new ArrayList<>(); instantMetadata.getInstantsToRollback().forEach((String rolledbackInstant) -> { Comparable[] row = new Comparable[4]; row[0] = instantMetadata.getStartRestoreTime(); row[1] = rolledbackInstant; row[2] = instantMetadata.getTimeTakenInMillis(); row[3] = HoodieInstant.State.COMPLETED.toString(); rows.add(row); }); String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); }
public boolean checkIfEnabled() { try { this.gitCommand = locateDefaultGit(); MutableString stdOut = new MutableString(); this.processWrapperFactory.create(null, l -> stdOut.string = l, gitCommand, "--version").execute(); return stdOut.string != null && stdOut.string.startsWith("git version") && isCompatibleGitVersion(stdOut.string); } catch (Exception e) { LOG.debug("Failed to find git native client", e); return false; } }
@Test public void execution_on_windows_is_disabled_if_git_not_on_path() { System2 system2 = mock(System2.class); when(system2.isOsWindows()).thenReturn(true); when(system2.property("PATH")).thenReturn("C:\\some-path;C:\\some-another-path"); ProcessWrapperFactory mockFactory = mock(ProcessWrapperFactory.class); mockGitWhereOnWindows(mockFactory); NativeGitBlameCommand blameCommand = new NativeGitBlameCommand(system2, mockFactory); assertThat(blameCommand.checkIfEnabled()).isFalse(); }
public static <T> Either<String, T> resolveImportDMN(Import importElement, Collection<T> dmns, Function<T, QName> idExtractor) { final String importerDMNNamespace = ((Definitions) importElement.getParent()).getNamespace(); final String importerDMNName = ((Definitions) importElement.getParent()).getName(); final String importNamespace = importElement.getNamespace(); final String importName = importElement.getName(); final String importLocationURI = importElement.getLocationURI(); // This is optional final String importModelName = importElement.getAdditionalAttributes().get(TImport.MODELNAME_QNAME); LOGGER.debug("Resolving an Import in DMN Model with name={} and namespace={}. " + "Importing a DMN model with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); List<T> matchingDMNList = dmns.stream() .filter(m -> idExtractor.apply(m).getNamespaceURI().equals(importNamespace)) .toList(); if (matchingDMNList.size() == 1) { T located = matchingDMNList.get(0); // Check if the located DMN Model in the NS, correspond for the import `drools:modelName`. if (importModelName == null || idExtractor.apply(located).getLocalPart().equals(importModelName)) { LOGGER.debug("DMN Model with name={} and namespace={} successfully imported a DMN " + "with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); return Either.ofRight(located); } else { LOGGER.error("DMN Model with name={} and namespace={} can't import a DMN with namespace={}, name={}, modelName={}, " + "located within namespace only {} but does not match for the actual modelName", importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, idExtractor.apply(located)); return Either.ofLeft(String.format( "DMN Model with name=%s and namespace=%s can't import a DMN with namespace=%s, name=%s, modelName=%s, " + "located within namespace only %s but does not match for the actual modelName", importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, idExtractor.apply(located))); } } else { List<T> usingNSandName = matchingDMNList.stream() .filter(dmn -> idExtractor.apply(dmn).getLocalPart().equals(importModelName)) .toList(); if (usingNSandName.size() == 1) { LOGGER.debug("DMN Model with name={} and namespace={} successfully imported a DMN " + "with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); return Either.ofRight(usingNSandName.get(0)); } else if (usingNSandName.isEmpty()) { LOGGER.error("DMN Model with name={} and namespace={} failed to import a DMN with namespace={} name={} locationURI={}, modelName={}.", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); return Either.ofLeft(String.format( "DMN Model with name=%s and namespace=%s failed to import a DMN with namespace=%s name=%s locationURI=%s, modelName=%s. ", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName)); } else { LOGGER.error("DMN Model with name={} and namespace={} detected a collision ({} elements) trying to import a DMN with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, usingNSandName.size(), importNamespace, importName, importLocationURI, importModelName); return Either.ofLeft(String.format( "DMN Model with name=%s and namespace=%s detected a collision trying to import a DMN with %s namespace, " + "%s name and modelName %s. There are %s DMN files with the same namespace in your project. " + "Please change the DMN namespaces and make them unique to fix this issue.", importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, usingNSandName.size())); } } }
@Test void locateInNSnoModelNameWithAlias() { final Import i = makeImport("nsA", "m1", null); final List<QName> available = Arrays.asList(new QName("nsA", "m1"), new QName("nsA", "m2"), new QName("nsB", "m3")); final Either<String, QName> result = ImportDMNResolverUtil.resolveImportDMN(i, available, Function.identity()); assertThat(result.isLeft()).isTrue(); }
@Nonnull @Override public Optional<? extends INode> parse( @Nullable final String str, @Nonnull DetectionLocation detectionLocation) { if (str == null) { return Optional.empty(); } for (IMapper mapper : jcaSpecificAlgorithmMappers) { Optional<? extends INode> asset = mapper.parse(str, detectionLocation); if (asset.isPresent()) { return asset; } } return switch (str.toUpperCase().trim()) { case "PBE", "PBES2" -> Optional.of(new PasswordBasedEncryption(detectionLocation)); case "DH", "DIFFIEHELLMAN" -> Optional.of(new DH(detectionLocation)); case "RSA" -> Optional.of(new RSA(detectionLocation)); case "EC" -> Optional.of(new Algorithm(str, PublicKeyEncryption.class, detectionLocation)); default -> { final Algorithm algorithm = new Algorithm(str, Unknown.class, detectionLocation); algorithm.put(new Unknown(detectionLocation)); yield Optional.of(algorithm); } }; }
@Test void prng() { DetectionLocation testDetectionLocation = new DetectionLocation("testfile", 1, 1, List.of("test"), () -> "SSL"); JcaAlgorithmMapper jcaAlgorithmMapper = new JcaAlgorithmMapper(); Optional<? extends INode> assetOptional = jcaAlgorithmMapper.parse("SHA1PRNG", testDetectionLocation); assertThat(assetOptional).isPresent(); assertThat(assetOptional.get().is(PseudorandomNumberGenerator.class)).isTrue(); }
public void resolveAssertionConsumerService(AuthenticationRequest authenticationRequest) throws SamlValidationException { // set URL if set in authnRequest final String authnAcsURL = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceURL(); if (authnAcsURL != null) { authenticationRequest.setAssertionConsumerURL(authnAcsURL); return; } // search url from metadata endpoints final Integer authnAcsIdx = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceIndex(); List<Endpoint> endpoints = authenticationRequest.getConnectionEntity().getRoleDescriptors().get(0).getEndpoints(AssertionConsumerService.DEFAULT_ELEMENT_NAME); if (endpoints.isEmpty()) { throw new SamlValidationException("Authentication: Assertion Consumer Service not found in metadata"); } if (authnAcsIdx != null && endpoints.size() <= authnAcsIdx) { throw new SamlValidationException("Authentication: Assertion Consumer Index is out of bounds"); } // TODO: check if this statement is correct if (endpoints.size() == 1) { authenticationRequest.setAssertionConsumerURL(endpoints.get(0).getLocation()); return; } if(authnAcsIdx == null) { AssertionConsumerService defaultAcs = endpoints.stream() .filter(e -> e instanceof AssertionConsumerService) .map(acs -> (AssertionConsumerService) acs) .filter(IndexedEndpoint::isDefault) .findAny() .orElse(null); if (defaultAcs == null) { throw new SamlValidationException("Authentication: There is no default AssertionConsumerService"); } authenticationRequest.setAssertionConsumerURL(defaultAcs.getLocation()); return; } authenticationRequest.setAssertionConsumerURL(endpoints.get(authnAcsIdx).getLocation()); }
@Test void resolveAcsUrlWithAcsUrl() throws SamlValidationException { AuthnRequest authnRequest = OpenSAMLUtils.buildSAMLObject(AuthnRequest.class); authnRequest.setAssertionConsumerServiceURL(URL_ASSERTION_CONSUMER_SERVICE); AuthenticationRequest authenticationRequest = new AuthenticationRequest(); authenticationRequest.setAuthnRequest(authnRequest); assertionConsumerServiceUrlService.resolveAssertionConsumerService(authenticationRequest); assertEquals(URL_ASSERTION_CONSUMER_SERVICE, authenticationRequest.getAssertionConsumerURL()); }
public byte[] getKey() { return key; }
@Test public void shouldGenerateAValidAndSafeDESKey() throws Exception { DESCipherProvider desCipherProvider = new DESCipherProvider(new SystemEnvironment()); byte[] key = desCipherProvider.getKey(); assertThat(DESKeySpec.isWeak(key, 0), is(false)); }
@Override public CompletionStage<Void> setAsync(K key, V value) { return cache.putAsync(key, value); }
@Test(expected = MethodNotAvailableException.class) public void testSetAsyncWithTtl() { adapter.setAsync(42, "value", 1, TimeUnit.MILLISECONDS); }
public static DateTimeFormatter createDateTimeFormatter(String format, Mode mode) { DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); boolean formatContainsHourOfAMPM = false; for (Token token : tokenize(format)) { switch (token.getType()) { case DateFormat.TEXT: builder.appendLiteral(token.getText()); break; case DateFormat.DD: builder.appendValue(DAY_OF_MONTH, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.HH24: builder.appendValue(HOUR_OF_DAY, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.HH: builder.appendValue(HOUR_OF_AMPM, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); formatContainsHourOfAMPM = true; break; case DateFormat.MI: builder.appendValue(MINUTE_OF_HOUR, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.MM: builder.appendValue(MONTH_OF_YEAR, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.SS: builder.appendValue(SECOND_OF_MINUTE, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.YY: builder.appendValueReduced(YEAR, 2, 2, 2000); break; case DateFormat.YYYY: builder.appendValue(YEAR, 4); break; case DateFormat.UNRECOGNIZED: default: throw new PrestoException( StandardErrorCode.INVALID_FUNCTION_ARGUMENT, String.format("Failed to tokenize string [%s] at offset [%d]", token.getText(), token.getCharPositionInLine())); } } try { // Append default values(0) for time fields(HH24, HH, MI, SS) because JSR-310 does not accept bare Date value as DateTime if (formatContainsHourOfAMPM) { // At the moment format does not allow to include AM/PM token, thus it was never possible to specify PM hours using 'HH' token in format // Keep existing behaviour by defaulting to 0(AM) for AMPM_OF_DAY if format string contains 'HH' builder.parseDefaulting(HOUR_OF_AMPM, 0) .parseDefaulting(AMPM_OF_DAY, 0); } else { builder.parseDefaulting(HOUR_OF_DAY, 0); } return builder.parseDefaulting(MINUTE_OF_HOUR, 0) .parseDefaulting(SECOND_OF_MINUTE, 0) .toFormatter(); } catch (UnsupportedOperationException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e); } }
@Test public void testCreateDateTimeFormatter() { DateTimeFormatter formatter = DateFormatParser.createDateTimeFormatter("yyyy/mm/dd", FORMATTER); assertEquals(formatter.format(LocalDateTime.of(1988, 4, 8, 0, 0)), "1988/04/08"); }
public synchronized void setSamples(final long samples) { if (samples < 0) { this.samples = 0; } else { this.samples = samples; } }
@Test public void testSetSamples() throws Throwable { MeanStatistic stat = tenFromOne.copy(); stat.setSamples(10); Assertions.assertThat(stat) .isEqualTo(tenFromTen); }
public T fromInstance(T instance) throws IOException { return fromJson(toJson(instance)); }
@Test public void testCloneViaJson() throws Throwable { KeyVal unmarshalled = serDeser.fromInstance(source); assertEquals(source, unmarshalled); }
protected void insertModelAfter(EpoxyModel<?> modelToInsert, EpoxyModel<?> modelToInsertAfter) { int modelIndex = getModelPosition(modelToInsertAfter); if (modelIndex == -1) { throw new IllegalStateException("Model is not added: " + modelToInsertAfter); } int targetIndex = modelIndex + 1; pauseModelListNotifications(); models.add(targetIndex, modelToInsert); resumeModelListNotifications(); notifyItemInserted(targetIndex); }
@Test(expected = IllegalStateException.class) public void testInsertModelAfterThrowsForInvalidModel() { testAdapter.insertModelAfter(new TestModel(), new TestModel()); }
@SuppressWarnings("unchecked") public QueryMetadataHolder handleStatement( final ServiceContext serviceContext, final Map<String, Object> configOverrides, final Map<String, Object> requestProperties, final PreparedStatement<?> statement, final Optional<Boolean> isInternalRequest, final MetricsCallbackHolder metricsCallbackHolder, final Context context, final boolean excludeTombstones ) { if (statement.getStatement() instanceof Query) { return handleQuery( serviceContext, (PreparedStatement<Query>) statement, isInternalRequest, metricsCallbackHolder, configOverrides, requestProperties, context, excludeTombstones ); } else { return QueryMetadataHolder.unhandled(); } }
@Test public void shouldRunPushQuery_error() { // Given: when(ksqlEngine.executeTransientQuery(any(), any(), anyBoolean())) .thenThrow(new RuntimeException("Error executing!")); // When: Exception e = assertThrows(RuntimeException.class, () -> queryExecutor.handleStatement( serviceContext, ImmutableMap.of(), ImmutableMap.of(), pushQuery, Optional.empty(), metricsCallbackHolder, context, false)); // Should be no metrics reported for push queries metricsCallbackHolder.reportMetrics(200, 1000L, 5000L, 20000L); // Then: assertThat(e.getMessage(), is("Error executing!")); verifyNoMoreInteractions(pullQueryExecutorMetrics); }
public int length() { return length; }
@Test public static void testLength() { for (int i = 1; i <= 10; i++) { Bitmap bitmap = new Bitmap(i * 8); assertEquals(bitmap.length(), i * 8); } }
@Override public final SSLEngine newEngine(ByteBufAllocator alloc) { SSLEngine engine = ctx.newEngine(alloc); initEngine(engine); return engine; }
@Test public void testInitEngineOnNewEngine() throws Exception { SslContext delegating = newDelegatingSslContext(); SSLEngine engine = delegating.newEngine(UnpooledByteBufAllocator.DEFAULT); assertArrayEquals(EXPECTED_PROTOCOLS, engine.getEnabledProtocols()); engine = delegating.newEngine(UnpooledByteBufAllocator.DEFAULT, "localhost", 9090); assertArrayEquals(EXPECTED_PROTOCOLS, engine.getEnabledProtocols()); }
public static String buildFromParamsMap(String paramsRule, Multimap<String, String> paramsMap) { if (paramsMap != null && !paramsMap.isEmpty()) { Multimap<String, String> criteriaMap = TreeMultimap.create(paramsMap); // Just appends sorted entries, separating them with ?. StringBuilder result = new StringBuilder(); for (Map.Entry<String, String> criteria : criteriaMap.entries()) { /* * Check that criteria is embedded into the rule. Simply check word boundary with \b is not enough as - are * valid in params (according RFC 3986) but not included into word boundary - so "word-ext" string is * matching ".*\\bword\\b.*" We need to tweak it a bit to prevent matching when there's a - before or after * the criteria we're looking for (see * https://stackoverflow.com/questions/32380375/hyphen-dash-to-be-included-in-regex-word-boundary-b) */ if (paramsRule.matches(".*(^|[^-])\\b" + criteria.getKey() + "\\b([^-]|$).*")) { result.append("?").append(criteria.getKey()).append("=").append(criteria.getValue()); } } return result.toString(); } return ""; }
@Test void testBuildFromParamsMap() { Multimap<String, String> paramsMap = ArrayListMultimap.create(); paramsMap.put("page", "1"); paramsMap.put("limit", "20"); paramsMap.put("limitation", "20"); paramsMap.put("status", "available"); // Only 1 parameter should be taken into account according to rules. String dispatchCriteria = DispatchCriteriaHelper.buildFromParamsMap("page", paramsMap); assertEquals("?page=1", dispatchCriteria); // 2 parameters should be considered and sorted according to rules. dispatchCriteria = DispatchCriteriaHelper.buildFromParamsMap("page && limit", paramsMap); assertEquals("?limit=20?page=1", dispatchCriteria); // 2 parameters should be considered and sorted according to rules with no inclusion of limit. dispatchCriteria = DispatchCriteriaHelper.buildFromParamsMap("page && limitation", paramsMap); assertEquals("?limitation=20?page=1", dispatchCriteria); }
@VisibleForTesting ExportResult<PhotosContainerResource> exportOneDrivePhotos( TokensAndUrlAuthData authData, Optional<IdOnlyContainerResource> albumData, Optional<PaginationData> paginationData, UUID jobId) throws IOException { Optional<String> albumId = Optional.empty(); if (albumData.isPresent()) { albumId = Optional.of(albumData.get().getId()); } Optional<String> paginationUrl = getDrivePaginationToken(paginationData); MicrosoftDriveItemsResponse driveItemsResponse; if (paginationData.isPresent() || albumData.isPresent()) { driveItemsResponse = getOrCreatePhotosInterface(authData).getDriveItems(albumId, paginationUrl); } else { driveItemsResponse = getOrCreatePhotosInterface(authData) .getDriveItemsFromSpecialFolder(MicrosoftSpecialFolder.FolderType.photos); } PaginationData nextPageData = setNextPageToken(driveItemsResponse); ContinuationData continuationData = new ContinuationData(nextPageData); PhotosContainerResource containerResource; MicrosoftDriveItem[] driveItems = driveItemsResponse.getDriveItems(); List<PhotoAlbum> albums = new ArrayList<>(); List<PhotoModel> photos = new ArrayList<>(); if (driveItems != null && driveItems.length > 0) { for (MicrosoftDriveItem driveItem : driveItems) { PhotoAlbum album = tryConvertDriveItemToPhotoAlbum(driveItem, jobId); if (album != null) { albums.add(album); continuationData.addContainerResource(new IdOnlyContainerResource(driveItem.id)); } PhotoModel photo = tryConvertDriveItemToPhotoModel(albumId, driveItem, jobId); if (photo != null) { photos.add(photo); } } } ExportResult.ResultType result = nextPageData == null ? ExportResult.ResultType.END : ExportResult.ResultType.CONTINUE; containerResource = new PhotosContainerResource(albums, photos); return new ExportResult<>(result, containerResource, continuationData); }
@Test public void exportPhotoWithoutNextPage() throws IOException { // Setup when(driveItemsResponse.getNextPageLink()).thenReturn(null); MicrosoftDriveItem photoItem = setUpSinglePhoto(IMAGE_URI, PHOTO_ID); when(driveItemsResponse.getDriveItems()).thenReturn(new MicrosoftDriveItem[] {photoItem}); when(driveItemsResponse.getNextPageLink()).thenReturn(null); StringPaginationToken inputPaginationToken = new StringPaginationToken(DRIVE_TOKEN_PREFIX + DRIVE_PAGE_URL); IdOnlyContainerResource idOnlyContainerResource = new IdOnlyContainerResource(FOLDER_ID); // Run ExportResult<PhotosContainerResource> result = microsoftPhotosExporter.exportOneDrivePhotos( null, Optional.of(idOnlyContainerResource), Optional.of(inputPaginationToken), uuid); // Verify method calls verify(photosInterface).getDriveItems(Optional.of(FOLDER_ID), Optional.of(DRIVE_PAGE_URL)); verify(driveItemsResponse).getDriveItems(); // Verify next pagination token is absent ContinuationData continuationData = result.getContinuationData(); StringPaginationToken paginationToken = (StringPaginationToken) continuationData.getPaginationData(); assertThat(paginationToken).isEqualTo(null); // Verify no albums are exported Collection<PhotoAlbum> actualAlbums = result.getExportedData().getAlbums(); assertThat(actualAlbums).isEmpty(); // Verify one photo (in an album) should be exported Collection<PhotoModel> actualPhotos = result.getExportedData().getPhotos(); assertThat(actualPhotos.stream().map(PhotoModel::getFetchableUrl).collect(Collectors.toList())) .containsExactly(IMAGE_URI); assertThat(actualPhotos.stream().map(PhotoModel::getAlbumId).collect(Collectors.toList())) .containsExactly(FOLDER_ID); assertThat(actualPhotos.stream().map(PhotoModel::getTitle).collect(Collectors.toList())) .containsExactly(FILENAME); // Verify there are no containers ready for sub-processing List<ContainerResource> actualResources = continuationData.getContainerResources(); assertThat(actualResources).isEmpty(); }
public double findPutMessageEntireTimePX(double px) { Map<Long, LongAdder> lastBuckets = this.lastBuckets; long start = System.currentTimeMillis(); double result = 0.0; long totalRequest = lastBuckets.values().stream().mapToLong(LongAdder::longValue).sum(); long pxIndex = (long) (totalRequest * px); long passCount = 0; List<Long> bucketValue = new ArrayList<>(lastBuckets.keySet()); for (int i = 0; i < bucketValue.size(); i++) { long count = lastBuckets.get(bucketValue.get(i)).longValue(); if (pxIndex <= passCount + count) { long relativeIndex = pxIndex - passCount; if (i == 0) { result = count == 0 ? 0 : bucketValue.get(i) * relativeIndex / (double)count; } else { long lastBucket = bucketValue.get(i - 1); result = lastBucket + (count == 0 ? 0 : (bucketValue.get(i) - lastBucket) * relativeIndex / (double)count); } break; } else { passCount += count; } } log.info("findPutMessageEntireTimePX {}={}ms cost {}ms", px, String.format("%.2f", result), System.currentTimeMillis() - start); return result; }
@Test public void findPutMessageEntireTimePXTest() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException { final StoreStatsService storeStatsService = new StoreStatsService(); for (int i = 1; i <= 1000; i++) { for (int j = 0; j < i; j++) { storeStatsService.incPutMessageEntireTime(i); } } Method method = StoreStatsService.class.getDeclaredMethod("resetPutMessageTimeBuckets"); method.setAccessible(true); method.invoke(storeStatsService); }
@Override public KeyValueSegment getOrCreateSegmentIfLive(final long segmentId, final ProcessorContext context, final long streamTime) { final KeyValueSegment segment = super.getOrCreateSegmentIfLive(segmentId, context, streamTime); cleanupExpiredSegments(streamTime); return segment; }
@Test public void shouldCleanupSegmentsThatHaveExpired() { final KeyValueSegment segment1 = segments.getOrCreateSegmentIfLive(0, context, -1L); final KeyValueSegment segment2 = segments.getOrCreateSegmentIfLive(1, context, -1L); final KeyValueSegment segment3 = segments.getOrCreateSegmentIfLive(7, context, SEGMENT_INTERVAL * 7L); assertFalse(segment1.isOpen()); assertFalse(segment2.isOpen()); assertTrue(segment3.isOpen()); assertFalse(new File(context.stateDir(), "test/test.0").exists()); assertFalse(new File(context.stateDir(), "test/test." + SEGMENT_INTERVAL).exists()); assertTrue(new File(context.stateDir(), "test/test." + 7 * SEGMENT_INTERVAL).exists()); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final StoregateApiClient client = session.getClient(); final HttpUriRequest request = new HttpGet(String.format("%s/v4.2/download/files/%s?stream=true", client.getBasePath(), fileid.getFileId(file))); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } request.addHeader(new BasicHeader(HttpHeaders.RANGE, header)); // Disable compression request.addHeader(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "identity")); } final HttpResponse response = client.getClient().execute(request); switch(response.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_PARTIAL_CONTENT: return new HttpMethodReleaseInputStream(response); case HttpStatus.SC_NOT_FOUND: fileid.cache(file, null); // Break through default: throw new DefaultHttpResponseExceptionMappingService().map("Download {0} failed", new HttpResponseException( response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()), file); } } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file); } }
@Test(expected = NotfoundException.class) public void testReadNotFound() throws Exception { final TransferStatus status = new TransferStatus(); final StoregateIdProvider nodeid = new StoregateIdProvider(session); final Path room = new StoregateDirectoryFeature(session, nodeid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); try { new StoregateReadFeature(session, nodeid).read(new Path(room, "nosuchname", EnumSet.of(Path.Type.file)), status, new DisabledConnectionCallback()); } finally { new StoregateDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); } }
@Operation(summary = "Get single session") @PutMapping(value = "/iapi/saml/ad_sessions/{id}", produces = "application/json") @ResponseBody public AdSession update(@PathVariable("id") String id, @RequestBody Map<String, Object> body) throws AdException, AdValidationException { AdSession adSession = adService.getAdSession(id); return adService.updateAdSession(adSession, body); }
@Test public void updateSession() throws AdException, AdValidationException { AdSession adSession2 = new AdSession(); adSession2.setAuthenticationLevel(20); HashMap<String, Object> body = new HashMap<>(); body.put("authentication_level", 20); body.put("authentication_status", "anyString"); body.put("bsn", "anyString"); when(adServiceMock.getAdSession(anyString())).thenReturn(adSession); when(adServiceMock.updateAdSession(any(AdSession.class), any(HashMap.class))).thenReturn(adSession2); AdSession result = sessionController.update("httpSessionId", body); verify(adServiceMock, times(1)).getAdSession(anyString()); assertEquals(result.getAuthenticationLevel(), adSession2.getAuthenticationLevel()); }
public static Rectangle getExtent(OGCGeometry ogcGeometry) { return getExtent(ogcGeometry, 0.0); }
@Test public void testGetExtent() { assertGetExtent( "POINT (-23.4 12.2)", new Rectangle(-23.4, 12.2, -23.4, 12.2)); assertGetExtent( "LINESTRING (-75.9375 23.6359, -75.9375 23.6364)", new Rectangle(-75.9375, 23.6359, -75.9375, 23.6364)); assertGetExtent( "GEOMETRYCOLLECTION (" + " LINESTRING (-75.9375 23.6359, -75.9375 23.6364)," + " MULTIPOLYGON (((-75.9375 23.45520, -75.9371 23.4554, -75.9375 23.46023325, -75.9375 23.45520)))" + ")", new Rectangle(-75.9375, 23.4552, -75.9371, 23.6364)); }
@Override public void onTaskFinished(TaskAttachment attachment) { if (attachment instanceof BrokerPendingTaskAttachment) { onPendingTaskFinished((BrokerPendingTaskAttachment) attachment); } else if (attachment instanceof BrokerLoadingTaskAttachment) { onLoadingTaskFinished((BrokerLoadingTaskAttachment) attachment); } }
@Test public void testLoadingTaskOnFinishedWithErrorNum(@Injectable BrokerLoadingTaskAttachment attachment1, @Injectable BrokerLoadingTaskAttachment attachment2, @Injectable LoadTask loadTask1, @Injectable LoadTask loadTask2, @Mocked GlobalStateMgr globalStateMgr) { BrokerLoadJob brokerLoadJob = new BrokerLoadJob(); Deencapsulation.setField(brokerLoadJob, "state", JobState.LOADING); Map<Long, LoadTask> idToTasks = Maps.newHashMap(); idToTasks.put(1L, loadTask1); idToTasks.put(2L, loadTask2); Deencapsulation.setField(brokerLoadJob, "idToTasks", idToTasks); new Expectations() { { attachment1.getCounter(BrokerLoadJob.DPP_NORMAL_ALL); minTimes = 0; result = 10; attachment2.getCounter(BrokerLoadJob.DPP_NORMAL_ALL); minTimes = 0; result = 20; attachment1.getCounter(BrokerLoadJob.DPP_ABNORMAL_ALL); minTimes = 0; result = 1; attachment2.getCounter(BrokerLoadJob.DPP_ABNORMAL_ALL); minTimes = 0; result = 2; attachment1.getTaskId(); minTimes = 0; result = 1L; attachment2.getTaskId(); minTimes = 0; result = 2L; } }; brokerLoadJob.onTaskFinished(attachment1); brokerLoadJob.onTaskFinished(attachment2); Set<Long> finishedTaskIds = Deencapsulation.getField(brokerLoadJob, "finishedTaskIds"); Assert.assertEquals(2, finishedTaskIds.size()); EtlStatus loadingStatus = Deencapsulation.getField(brokerLoadJob, "loadingStatus"); Assert.assertEquals("30", loadingStatus.getCounters().get(BrokerLoadJob.DPP_NORMAL_ALL)); Assert.assertEquals("3", loadingStatus.getCounters().get(BrokerLoadJob.DPP_ABNORMAL_ALL)); int progress = Deencapsulation.getField(brokerLoadJob, "progress"); Assert.assertEquals(99, progress); Assert.assertEquals(JobState.CANCELLED, Deencapsulation.getField(brokerLoadJob, "state")); }
public static void onNewIntent(Object activity, Intent intent) { if (!isTrackPushEnabled()) return; try { if (activity instanceof Activity) { PushProcess.getInstance().onNotificationClick((Activity) activity, intent); SALog.i(TAG, "onNewIntent"); } } catch (Exception e) { SALog.printStackTrace(e); } }
@Test public void onNewIntent() { SchemeActivity activity = Robolectric.setupActivity(SchemeActivity.class); PushAutoTrackHelper.onNewIntent(activity, activity.getIntent()); }
public boolean isRetryAnotherBrokerWhenNotStoreOK() { return retryAnotherBrokerWhenNotStoreOK; }
@Test public void assertIsRetryAnotherBrokerWhenNotStoreOK() { assertFalse(producer.isRetryAnotherBrokerWhenNotStoreOK()); }
@Operation(summary = "queryTaskListPaging", description = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") @Parameters({ @Parameter(name = "processInstanceId", description = "PROCESS_INSTANCE_ID", schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), @Parameter(name = "taskName", description = "TASK_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "taskCode", description = "TASK_CODE", schema = @Schema(implementation = Long.class)), @Parameter(name = "executorName", description = "EXECUTOR_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "stateType", description = "EXECUTION_STATUS", schema = @Schema(implementation = TaskExecutionStatus.class)), @Parameter(name = "host", description = "HOST", schema = @Schema(implementation = String.class)), @Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "taskExecuteType", description = "TASK_EXECUTE_TYPE", schema = @Schema(implementation = TaskExecuteType.class, example = "STREAM")), @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20")), }) @GetMapping() @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_TASK_LIST_PAGING_ERROR) public Result queryTaskListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, @RequestParam(value = "processInstanceName", required = false) String processInstanceName, @RequestParam(value = "processDefinitionName", required = false) String processDefinitionName, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "taskName", required = false) String taskName, @RequestParam(value = "taskCode", required = false) Long taskCode, @RequestParam(value = "executorName", required = false) String executorName, @RequestParam(value = "stateType", required = false) TaskExecutionStatus stateType, @RequestParam(value = "host", required = false) String host, @RequestParam(value = "startDate", required = false) String startTime, @RequestParam(value = "endDate", required = false) String endTime, @RequestParam(value = "taskExecuteType", required = false, defaultValue = "BATCH") TaskExecuteType taskExecuteType, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { checkPageParams(pageNo, pageSize); searchVal = ParameterUtils.handleEscapes(searchVal); return taskInstanceService.queryTaskListPaging( loginUser, projectCode, processInstanceId, processInstanceName, processDefinitionName, taskName, taskCode, executorName, startTime, endTime, searchVal, stateType, host, taskExecuteType, pageNo, pageSize); }
@Test public void testQueryTaskListPaging() { Result result = new Result(); Integer pageNo = 1; Integer pageSize = 20; PageInfo pageInfo = new PageInfo<TaskInstance>(pageNo, pageSize); result.setData(pageInfo); result.setCode(Status.SUCCESS.getCode()); result.setMsg(Status.SUCCESS.getMsg()); when(taskInstanceService.queryTaskListPaging(any(), eq(1L), eq(1), eq(""), eq(""), eq(""), any(), eq(""), any(), any(), eq(""), Mockito.any(), eq("192.168.xx.xx"), eq(TaskExecuteType.BATCH), any(), any())) .thenReturn(result); Result taskResult = taskInstanceController.queryTaskListPaging(null, 1L, 1, "", "", "", "", 1L, "", TaskExecutionStatus.SUCCESS, "192.168.xx.xx", "2020-01-01 00:00:00", "2020-01-02 00:00:00", TaskExecuteType.BATCH, pageNo, pageSize); Assertions.assertEquals(Integer.valueOf(Status.SUCCESS.getCode()), taskResult.getCode()); }
public static BIP38PrivateKey fromBase58(Network network, String base58) throws AddressFormatException { byte[] versionAndDataBytes = Base58.decodeChecked(base58); int version = versionAndDataBytes[0] & 0xFF; byte[] bytes = Arrays.copyOfRange(versionAndDataBytes, 1, versionAndDataBytes.length); if (version != 0x01) throw new AddressFormatException.InvalidPrefix("Mismatched version number: " + version); if (bytes.length != 38) throw new AddressFormatException.InvalidDataLength("Wrong number of bytes: " + bytes.length); boolean hasLotAndSequence = (bytes[1] & 0x04) != 0; // bit 2 boolean compressed = (bytes[1] & 0x20) != 0; // bit 5 if ((bytes[1] & 0x01) != 0) // bit 0 throw new AddressFormatException("Bit 0x01 reserved for future use."); if ((bytes[1] & 0x02) != 0) // bit 1 throw new AddressFormatException("Bit 0x02 reserved for future use."); if ((bytes[1] & 0x08) != 0) // bit 3 throw new AddressFormatException("Bit 0x08 reserved for future use."); if ((bytes[1] & 0x10) != 0) // bit 4 throw new AddressFormatException("Bit 0x10 reserved for future use."); final int byte0 = bytes[0] & 0xff; final boolean ecMultiply; if (byte0 == 0x42) { // Non-EC-multiplied key if ((bytes[1] & 0xc0) != 0xc0) // bits 6+7 throw new AddressFormatException("Bits 0x40 and 0x80 must be set for non-EC-multiplied keys."); ecMultiply = false; if (hasLotAndSequence) throw new AddressFormatException("Non-EC-multiplied keys cannot have lot/sequence."); } else if (byte0 == 0x43) { // EC-multiplied key if ((bytes[1] & 0xc0) != 0x00) // bits 6+7 throw new AddressFormatException("Bits 0x40 and 0x80 must be cleared for EC-multiplied keys."); ecMultiply = true; } else { throw new AddressFormatException("Second byte must by 0x42 or 0x43."); } byte[] addressHash = Arrays.copyOfRange(bytes, 2, 6); byte[] content = Arrays.copyOfRange(bytes, 6, 38); return new BIP38PrivateKey(network, bytes, ecMultiply, compressed, hasLotAndSequence, addressHash, content); }
@Test(expected = AddressFormatException.InvalidDataLength.class) public void fromBase58_invalidLength() { String base58 = Base58.encodeChecked(1, new byte[16]); BIP38PrivateKey.fromBase58((Network) null, base58); }
boolean tearDown() { return future.cancel(true); }
@Test @SuppressWarnings("unchecked") public void tearDownTriggersCancellation() throws Exception { when(mockExecutorService.scheduleAtFixedRate(any(Runnable.class), eq(0L), eq(1L), eq(TimeUnit.SECONDS))). thenReturn(mockFuture); when(mockFuture.cancel(true)).thenReturn(true); AsyncHealthCheckDecorator asyncDecorator = new AsyncHealthCheckDecorator(new DefaultAsyncHealthCheck(), mockExecutorService); asyncDecorator.tearDown(); verify(mockExecutorService, times(1)).scheduleAtFixedRate(any(Runnable.class), eq(0L), eq(1L), eq(TimeUnit.SECONDS)); verify(mockFuture, times(1)).cancel(eq(true)); }
public static String formatBytes(long bytes) { if (bytes < 0) { return String.valueOf(bytes); } double asDouble = (double) bytes; int ordinal = (int) Math.floor(Math.log(asDouble) / Math.log(1024.0)); double scale = Math.pow(1024.0, ordinal); double scaled = asDouble / scale; String formatted = TWO_DIGIT_FORMAT.format(scaled); try { return formatted + " " + BYTE_SCALE_SUFFIXES[ordinal]; } catch (IndexOutOfBoundsException e) { //huge number? return String.valueOf(asDouble); } }
@Test public void testFormatBytes() { assertEquals("-1", formatBytes(-1)); assertEquals("1023 B", formatBytes(1023)); assertEquals("1 KB", formatBytes(1024)); assertEquals("1024 KB", formatBytes((1024 * 1024) - 1)); assertEquals("1 MB", formatBytes(1024 * 1024)); assertEquals("1.1 MB", formatBytes((long) (1.1 * 1024 * 1024))); assertEquals("10 MB", formatBytes(10 * 1024 * 1024)); }
public void processIssuesByBatch(DbSession dbSession, Set<String> issueKeysSnapshot, Consumer<List<IssueDto>> listConsumer, Predicate<? super IssueDto> filter) { boolean hasMoreIssues = !issueKeysSnapshot.isEmpty(); long offset = 0; List<IssueDto> issueDtos = new ArrayList<>(); while (hasMoreIssues) { Set<String> page = paginate(issueKeysSnapshot, offset); List<IssueDto> nextOpenIssues = nextOpenIssues(dbSession, page) .stream() .filter(filter) .toList(); issueDtos.addAll(nextOpenIssues); offset += page.size(); hasMoreIssues = offset < issueKeysSnapshot.size(); } listConsumer.accept(issueDtos); }
@Test public void processIssuesByBatch_givenNoIssuesReturnedByDatabase_noIssuesConsumed() { var pullActionIssuesRetriever = new PullActionIssuesRetriever(dbClient, queryParams); when(issueDao.selectByBranch(any(), any(), any())) .thenReturn(List.of()); List<IssueDto> returnedDtos = new ArrayList<>(); Consumer<List<IssueDto>> listConsumer = returnedDtos::addAll; pullActionIssuesRetriever.processIssuesByBatch(dbClient.openSession(true), Set.of(), listConsumer, issueDto -> true); assertThat(returnedDtos).isEmpty(); }
@Override public FlinkPod decorateFlinkPod(FlinkPod flinkPod) { final Pod mountedPod = decoratePod(flinkPod.getPodWithoutMainContainer()); final Container mountedMainContainer = new ContainerBuilder(flinkPod.getMainContainer()) .addNewVolumeMount() .withName(FLINK_CONF_VOLUME) .withMountPath(kubernetesComponentConf.getFlinkConfDirInPod()) .endVolumeMount() .build(); return new FlinkPod.Builder(flinkPod) .withPod(mountedPod) .withMainContainer(mountedMainContainer) .build(); }
@Test void testDecoratedFlinkContainer() { final Container resultMainContainer = flinkConfMountDecorator.decorateFlinkPod(baseFlinkPod).getMainContainer(); assertThat(resultMainContainer.getVolumeMounts()).hasSize(1); final VolumeMount volumeMount = resultMainContainer.getVolumeMounts().get(0); assertThat(volumeMount.getName()).isEqualTo(Constants.FLINK_CONF_VOLUME); assertThat(volumeMount.getMountPath()).isEqualTo(FLINK_CONF_DIR_IN_POD); }
public static Slice unscaledDecimal() { return Slices.allocate(UNSCALED_DECIMAL_128_SLICE_LENGTH); }
@Test public void testRescaleOverflows() { assertRescaleOverflows(unscaledDecimal(1), 38); }
@VisibleForTesting public Supplier<PageProjection> compileProjection( SqlFunctionProperties sqlFunctionProperties, RowExpression projection, Optional<String> classNameSuffix) { return compileProjection(sqlFunctionProperties, emptyMap(), projection, classNameSuffix); }
@Test public void testCache() { PageFunctionCompiler cacheCompiler = new PageFunctionCompiler(createTestMetadataManager(), 100); assertSame( cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.empty()), cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.empty())); assertSame( cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint")), cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint"))); assertSame( cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint")), cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint2"))); assertSame( cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.empty()), cacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint2"))); PageFunctionCompiler noCacheCompiler = new PageFunctionCompiler(createTestMetadataManager(), 0); assertNotSame( noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.empty()), noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.empty())); assertNotSame( noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint")), noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint"))); assertNotSame( noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint")), noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint2"))); assertNotSame( noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.empty()), noCacheCompiler.compileProjection(SESSION.getSqlFunctionProperties(), ADD_10_EXPRESSION, Optional.of("hint2"))); }
public static UserInfo map(SecurityContext context) { Authentication authentication = context.getAuthentication(); if (authentication instanceof JwtAuthenticationToken jwtToken) { Jwt jwt = jwtToken.getToken(); String[] microcksGroups = new String[] {}; if (jwt.hasClaim(MICROCKS_GROUPS_TOKEN_CLAIM)) { microcksGroups = jwt.getClaimAsStringList(MICROCKS_GROUPS_TOKEN_CLAIM).toArray(String[]::new); } // Create and return UserInfo. UserInfo userInfo = new UserInfo(jwt.getClaimAsString(NAME), jwt.getClaimAsString(PREFERRED_USERNAME), jwt.getClaimAsString(GIVEN_NAME), jwt.getClaimAsString(FAMILY_NAME), jwt.getClaimAsString(EMAIL), authentication.getAuthorities().stream() .map(grantedAuthority -> grantedAuthority.getAuthority().replace("ROLE_", "")) .toArray(String[]::new), microcksGroups); log.debug("Current user is: {}", userInfo); return userInfo; } return null; }
@Test void testMap() { // UserInfo mapper to test. KeycloakTokenToUserInfoMapper mapper = new KeycloakTokenToUserInfoMapper(); // Prepare a Security Context. MicrocksJwtConverter converter = new MicrocksJwtConverter(); Jwt jwt = null; try { JWT parsedJwt = JWTParser.parse(jwtBearer); jwt = MicrocksJwtConverterTest.createJwt(jwtBearer, parsedJwt); } catch (Exception e) { fail("Parsing Jwt bearer should not fail"); } // Convert and assert granted authorities. JwtAuthenticationToken authenticationToken = converter.convert(jwt); SecurityContext context = new SecurityContextImpl(authenticationToken); // Execute and assert user data. UserInfo userInfo = mapper.map(context); assertEquals("Pastry Manager", userInfo.getName()); assertEquals("pastry-manager", userInfo.getUsername()); assertEquals("Pastry", userInfo.getGivenName()); assertEquals("Manager", userInfo.getFamilyName()); assertNull(userInfo.getEmail()); assertEquals(2, userInfo.getRoles().length); assertTrue(Arrays.stream(userInfo.getRoles()).toList().contains("user")); assertTrue(Arrays.stream(userInfo.getRoles()).toList().contains("manager")); assertEquals(1, userInfo.getGroups().length); assertTrue(Arrays.stream(userInfo.getGroups()).toList().contains("/microcks/manager/pastry")); }
@GetInitialRestriction public OffsetRange initialRestriction(@Element KafkaSourceDescriptor kafkaSourceDescriptor) { Map<String, Object> updatedConsumerConfig = overrideBootstrapServersConfig(consumerConfig, kafkaSourceDescriptor); TopicPartition partition = kafkaSourceDescriptor.getTopicPartition(); LOG.info("Creating Kafka consumer for initial restriction for {}", partition); try (Consumer<byte[], byte[]> offsetConsumer = consumerFactoryFn.apply(updatedConsumerConfig)) { ConsumerSpEL.evaluateAssign(offsetConsumer, ImmutableList.of(partition)); long startOffset; @Nullable Instant startReadTime = kafkaSourceDescriptor.getStartReadTime(); if (kafkaSourceDescriptor.getStartReadOffset() != null) { startOffset = kafkaSourceDescriptor.getStartReadOffset(); } else if (startReadTime != null) { startOffset = ConsumerSpEL.offsetForTime(offsetConsumer, partition, startReadTime); } else { startOffset = offsetConsumer.position(partition); } long endOffset = Long.MAX_VALUE; @Nullable Instant stopReadTime = kafkaSourceDescriptor.getStopReadTime(); if (kafkaSourceDescriptor.getStopReadOffset() != null) { endOffset = kafkaSourceDescriptor.getStopReadOffset(); } else if (stopReadTime != null) { endOffset = ConsumerSpEL.offsetForTime(offsetConsumer, partition, stopReadTime); } new OffsetRange(startOffset, endOffset); Lineage.getSources() .add( "kafka", ImmutableList.of( (String) updatedConsumerConfig.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG), MoreObjects.firstNonNull(kafkaSourceDescriptor.getTopic(), partition.topic()))); return new OffsetRange(startOffset, endOffset); } }
@Test public void testInitialRestrictionWhenHasStartOffset() throws Exception { long expectedStartOffset = 10L; consumer.setStartOffsetForTime(15L, Instant.now()); consumer.setCurrentPos(5L); OffsetRange result = dofnInstance.initialRestriction( KafkaSourceDescriptor.of( topicPartition, expectedStartOffset, null, null, null, ImmutableList.of())); assertEquals(new OffsetRange(expectedStartOffset, Long.MAX_VALUE), result); }
void resolveSelectors(EngineDiscoveryRequest request, CucumberEngineDescriptor engineDescriptor) { Predicate<String> packageFilter = buildPackageFilter(request); resolve(request, engineDescriptor, packageFilter); filter(engineDescriptor, packageFilter); pruneTree(engineDescriptor); }
@Test void resolveRequestWithClasspathResourceSelector() { DiscoverySelector resource = selectClasspathResource("io/cucumber/junit/platform/engine/single.feature"); EngineDiscoveryRequest discoveryRequest = new SelectorRequest(resource); resolver.resolveSelectors(discoveryRequest, testDescriptor); assertEquals(1, testDescriptor.getChildren().size()); }
public static <T> RetryOperator<T> of(Retry retry) { return new RetryOperator<>(retry); }
@Test public void retryOnResultUsingMono() { RetryConfig config = RetryConfig.<String>custom() .retryOnResult("retry"::equals) .waitDuration(Duration.ofMillis(10)) .maxAttempts(3).build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willReturn("retry") .willReturn("success"); StepVerifier.create(Mono.fromCallable(helloWorldService::returnHelloWorld) .transformDeferred(RetryOperator.of(retry))) .expectSubscription() .expectNext("success") .expectComplete() .verify(Duration.ofSeconds(1)); then(helloWorldService).should(times(2)).returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); assertThat(metrics.getNumberOfSuccessfulCallsWithRetryAttempt()).isEqualTo(1); }
public MetricDto setKey(String key) { this.kee = checkMetricKey(key); return this; }
@Test void fail_if_key_longer_than_64_characters() { String a65 = repeat("a", 65); assertThatThrownBy(() -> underTest.setKey(a65)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Metric key length (65) is longer than the maximum authorized (64). '" + a65 + "' was provided."); }
@Override public byte[] serialize() { int length; if (this.dataOffset == 0) { this.dataOffset = 5; // default header length } length = this.dataOffset << 2; byte[] payloadData = null; if (this.payload != null) { this.payload.setParent(this); payloadData = this.payload.serialize(); length += payloadData.length; } final byte[] data = new byte[length]; final ByteBuffer bb = ByteBuffer.wrap(data); bb.putShort((short) (this.sourcePort & 0xffff)); bb.putShort((short) (this.destinationPort & 0xffff)); bb.putInt(this.sequence); bb.putInt(this.acknowledge); bb.putShort((short) (this.flags | this.dataOffset << 12)); bb.putShort(this.windowSize); bb.putShort(this.checksum); bb.putShort(this.urgentPointer); if (this.dataOffset > 5) { int padding; bb.put(this.options); padding = (this.dataOffset << 2) - 20 - this.options.length; for (int i = 0; i < padding; i++) { bb.put((byte) 0); } } if (payloadData != null) { bb.put(payloadData); } if (this.parent != null && this.parent instanceof IPv4) { ((IPv4) this.parent).setProtocol(IPv4.PROTOCOL_TCP); } // compute checksum if needed if (this.checksum == 0) { bb.rewind(); int accumulation = 0; // compute pseudo header mac if (this.parent != null) { if (this.parent instanceof IPv4) { final IPv4 ipv4 = (IPv4) this.parent; accumulation += (ipv4.getSourceAddress() >> 16 & 0xffff) + (ipv4.getSourceAddress() & 0xffff); accumulation += (ipv4.getDestinationAddress() >> 16 & 0xffff) + (ipv4.getDestinationAddress() & 0xffff); accumulation += ipv4.getProtocol() & 0xff; accumulation += length & 0xffff; } else if (this.parent instanceof IPv6) { final IPv6 ipv6 = (IPv6) this.parent; final int bbLength = Ip6Address.BYTE_LENGTH * 2 // IPv6 src, dst + 2 // nextHeader (with padding) + 4; // length final ByteBuffer bbChecksum = ByteBuffer.allocate(bbLength); bbChecksum.put(ipv6.getSourceAddress()); bbChecksum.put(ipv6.getDestinationAddress()); bbChecksum.put((byte) 0); // padding bbChecksum.put(ipv6.getNextHeader()); bbChecksum.putInt(length); bbChecksum.rewind(); for (int i = 0; i < bbLength / 2; ++i) { accumulation += 0xffff & bbChecksum.getShort(); } } } for (int i = 0; i < length / 2; ++i) { accumulation += 0xffff & bb.getShort(); } // pad to an even number of shorts if (length % 2 > 0) { accumulation += (bb.get() & 0xff) << 8; } accumulation = (accumulation >> 16 & 0xffff) + (accumulation & 0xffff); this.checksum = (short) (~accumulation & 0xffff); bb.putShort(16, this.checksum); } return data; }
@Test public void testSerialize() { TCP tcp = new TCP(); tcp.setSourcePort(0x50); tcp.setDestinationPort(0x60); tcp.setSequence(0x10); tcp.setAcknowledge(0x20); tcp.setDataOffset((byte) 0x5); tcp.setFlags((short) 0x2); tcp.setWindowSize((short) 0x1000); tcp.setUrgentPointer((short) 0x1); tcp.setParent(ipv4); assertArrayEquals(bytePacketTCP4, tcp.serialize()); tcp.resetChecksum(); tcp.setParent(ipv6); assertArrayEquals(bytePacketTCP6, tcp.serialize()); }
public void create(int nodes, int expectedShortcuts) { if (nodeCount >= 0) throw new IllegalStateException("CHStorage can only be created once"); if (nodes < 0) throw new IllegalStateException("CHStorage must be created with a positive number of nodes"); nodesCH.create((long) nodes * nodeCHEntryBytes); nodeCount = nodes; for (int node = 0; node < nodes; node++) setLastShortcut(toNodePointer(node), -1); shortcuts.create((long) expectedShortcuts * shortcutEntryBytes); }
@Test public void testLargeNodeA() { int nodeA = Integer.MAX_VALUE; RAMIntDataAccess access = new RAMIntDataAccess("", "", false, -1); access.create(1000); access.setInt(0, nodeA << 1 | 1 & PrepareEncoder.getScFwdDir()); assertTrue(access.getInt(0) < 0); assertEquals(Integer.MAX_VALUE, access.getInt(0) >>> 1); }
@Override public void execute(EventNotificationContext ctx) throws EventNotificationException { final TeamsEventNotificationConfig config = (TeamsEventNotificationConfig) ctx.notificationConfig(); LOG.debug("TeamsEventNotification backlog size in method execute is [{}]", config.backlogSize()); try { TeamsMessage teamsMessage = createTeamsMessage(ctx, config); requestClient.send(objectMapperProvider.getForTimeZone(config.timeZone()).writeValueAsString(teamsMessage), config.webhookUrl()); } catch (TemporaryEventNotificationException exp) { //scheduler needs to retry a TemporaryEventNotificationException throw exp; } catch (PermanentEventNotificationException exp) { String errorMessage = String.format(Locale.ROOT, "Error sending the TeamsEventNotification :: %s", exp.getMessage()); final Notification systemNotification = notificationService.buildNow() .addNode(nodeId.getNodeId()) .addType(Notification.Type.GENERIC) .addSeverity(Notification.Severity.URGENT) .addDetail("title", "TeamsEventNotification Failed") .addDetail("description", errorMessage); notificationService.publishIfFirst(systemNotification); throw exp; } catch (Exception exp) { throw new EventNotificationException("There was an exception triggering the TeamsEventNotification", exp); } }
@Test(expected = EventNotificationException.class) public void executeWithInvalidWebhookUrl() throws EventNotificationException { givenGoodNotificationService(); givenTeamsClientThrowsPermException(); //when execute is called with a invalid webhook URL, we expect a event notification exception teamsEventNotification.execute(eventNotificationContext); }
@Override public boolean isFinished() { return finishing && outputPage == null; }
@Test(dataProvider = "hashEnabledValues") public void testProbeSideNulls(boolean hashEnabled) { DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext(); // build OperatorContext operatorContext = driverContext.addOperatorContext(0, new PlanNodeId("test"), ValuesOperator.class.getSimpleName()); List<Type> buildTypes = ImmutableList.of(BIGINT); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), buildTypes); Operator buildOperator = new ValuesOperator(operatorContext, rowPagesBuilder .row(0L) .row(1L) .row(3L) .build()); SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory( 1, new PlanNodeId("test"), buildTypes.get(0), 0, rowPagesBuilder.getHashChannel(), 10, new JoinCompiler(createTestMetadataManager())); Operator setBuilderOperator = setBuilderOperatorFactory.createOperator(driverContext); Driver driver = Driver.createDriver(driverContext, buildOperator, setBuilderOperator); while (!driver.isFinished()) { driver.process(); } // probe List<Type> probeTypes = ImmutableList.of(BIGINT); RowPagesBuilder rowPagesBuilderProbe = rowPagesBuilder(hashEnabled, Ints.asList(0), probeTypes); List<Page> probeInput = rowPagesBuilderProbe .row(0L) .row((Object) null) .row(1L) .row(2L) .build(); Optional<Integer> probeHashChannel = hashEnabled ? Optional.of(probeTypes.size()) : Optional.empty(); HashSemiJoinOperatorFactory joinOperatorFactory = new HashSemiJoinOperatorFactory( 2, new PlanNodeId("test"), setBuilderOperatorFactory.getSetProvider(), rowPagesBuilderProbe.getTypes(), 0, probeHashChannel); // expected MaterializedResult expected = resultBuilder(driverContext.getSession(), concat(probeTypes, ImmutableList.of(BOOLEAN))) .row(0L, true) .row(null, null) .row(1L, true) .row(2L, false) .build(); OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size())); }
protected SuppressionRules rules() { return rules; }
@Test public void addAnnotationRule() { final String key1 = "key1", key2 = "key2"; final String value1 = "value1"; Map<String, String> annotation = new HashMap<>(); annotation.put(key1, value1); cfg.annotation(annotation); configEvent(NetworkConfigEvent.Type.CONFIG_ADDED); assertAfter(EVENT_MS, () -> { assertTrue(provider.rules().getSuppressedAnnotation().containsKey(key1)); assertEquals(value1, provider.rules().getSuppressedAnnotation().get(key1)); assertFalse(provider.rules().getSuppressedAnnotation().containsKey(key2)); }); }
@WorkerThread @Override public Unit call() throws IOException, StreamNotFoundException, ShellNotRunningException, IllegalArgumentException { OutputStream outputStream; File destFile = null; switch (fileAbstraction.scheme) { case CONTENT: Objects.requireNonNull(fileAbstraction.uri); if (fileAbstraction.uri.getAuthority().equals(context.get().getPackageName())) { DocumentFile documentFile = DocumentFile.fromSingleUri(AppConfig.getInstance(), fileAbstraction.uri); if (documentFile != null && documentFile.exists() && documentFile.canWrite()) { outputStream = contentResolver.openOutputStream(fileAbstraction.uri, "wt"); } else { destFile = FileUtils.fromContentUri(fileAbstraction.uri); outputStream = openFile(destFile, context.get()); } } else { outputStream = contentResolver.openOutputStream(fileAbstraction.uri, "wt"); } break; case FILE: final HybridFileParcelable hybridFileParcelable = fileAbstraction.hybridFileParcelable; Objects.requireNonNull(hybridFileParcelable); Context context = this.context.get(); if (context == null) { return null; } outputStream = openFile(hybridFileParcelable.getFile(), context); destFile = fileAbstraction.hybridFileParcelable.getFile(); break; default: throw new IllegalArgumentException( "The scheme for '" + fileAbstraction.scheme + "' cannot be processed!"); } Objects.requireNonNull(outputStream); outputStream.write(dataToSave.getBytes()); outputStream.close(); if (cachedFile != null && cachedFile.exists() && destFile != null) { // cat cache content to original file and delete cache file ConcatenateFileCommand.INSTANCE.concatenateFile(cachedFile.getPath(), destFile.getPath()); cachedFile.delete(); } return Unit.INSTANCE; }
@Test public void testWriteFileOverwriting() throws IOException, StreamNotFoundException, ShellNotRunningException { File file = new File(Environment.getExternalStorageDirectory(), "test.txt"); IoUtils.copy(new StringReader("Dummy test content"), new FileWriter(file), 1024); Uri uri = Uri.fromFile(file); Context ctx = ApplicationProvider.getApplicationContext(); ContentResolver cr = ctx.getContentResolver(); WriteTextFileCallable task = new WriteTextFileCallable( ctx, cr, new EditableFileAbstraction(ctx, uri), contents, null, false); task.call(); String verify = IoUtils.readFully(new FileInputStream(file)); assertEquals(contents, verify); }
@Nonnull @Override public ProgressState call() { progTracker.reset(); stateMachineStep(); return progTracker.toProgressState(); }
@Test public void when_doneItemOnInput_then_eventuallyDone() { // When init(singletonList(DONE_ITEM)); // Then assertEquals(DONE, sst.call()); }