focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public boolean isRegisteredUser(@Nonnull final JID user, final boolean checkRemoteDomains) { if (xmppServer.isLocal(user)) { try { getUser(user.getNode()); return true; } catch (final UserNotFoundException e) { return false; } } else if (!checkRemoteDomains) { return false; } else { // Look up in the cache using the full JID Boolean isRegistered = remoteUsersCache.get(user.toString()); if (isRegistered == null) { // Check if the bare JID of the user is cached isRegistered = remoteUsersCache.get(user.toBareJID()); if (isRegistered == null) { // No information is cached so check user identity and cache it // A disco#info is going to be sent to the bare JID of the user. This packet // is going to be handled by the remote server. final IQ iq = new IQ(IQ.Type.get); iq.setFrom(xmppServer.getServerInfo().getXMPPDomain()); iq.setTo(user.toBareJID()); iq.setChildElement("query", "http://jabber.org/protocol/disco#info"); final Semaphore completionSemaphore = new Semaphore(0); // Send the disco#info request to the remote server. final IQRouter iqRouter = xmppServer.getIQRouter(); final long timeoutInMillis = REMOTE_DISCO_INFO_TIMEOUT.getValue().toMillis(); iqRouter.addIQResultListener(iq.getID(), new IQResultListener() { @Override public void receivedAnswer(final IQ packet) { final JID from = packet.getFrom(); // Assume that the user is not a registered user Boolean isRegistered = Boolean.FALSE; // Analyze the disco result packet if (IQ.Type.result == packet.getType()) { final Element child = packet.getChildElement(); if (child != null) { for (final Iterator it = child.elementIterator("identity"); it.hasNext();) { final Element identity = (Element) it.next(); final String accountType = identity.attributeValue("type"); if ("registered".equals(accountType) || "admin".equals(accountType)) { isRegistered = Boolean.TRUE; break; } } } } // Update cache of remote registered users remoteUsersCache.put(from.toBareJID(), isRegistered); completionSemaphore.release(); } @Override public void answerTimeout(final String packetId) { Log.warn("The result from the disco#info request was never received. request: {}", iq); completionSemaphore.release(); } }, timeoutInMillis); // Send the request iqRouter.route(iq); // Wait for the response try { completionSemaphore.tryAcquire(timeoutInMillis, TimeUnit.MILLISECONDS); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); Log.warn("Interrupted whilst waiting for response from remote server", e); } isRegistered = remoteUsersCache.computeIfAbsent(user.toBareJID(), ignored -> Boolean.FALSE); } } return isRegistered; } }
@Test public void isRegisteredUserTrueWillReturnFalseNoAnswer() { final boolean result = userManager.isRegisteredUser(new JID(USER_ID, REMOTE_XMPP_DOMAIN, null), true); assertThat(result, is(false)); verify(iqRouter).route(any()); }
@Override public void login(final LoginCallback prompt, final CancelCallback cancel) throws BackgroundException { authorizationService.validate(); }
@Test(expected = LoginCanceledException.class) public void testConnectMissingKey() throws Exception { session.close(); session.getHost().getCredentials().setOauth(OAuthTokens.EMPTY); session.login(new DisabledLoginCallback() { @Override public Credentials prompt(final Host bookmark, final String username, final String title, final String reason, final LoginOptions options) throws LoginCanceledException { assertEquals("OAuth2 Authentication", title); throw new LoginCanceledException(); } }, null); }
public void generate() throws IOException { packageNameByTypes.clear(); generatePackageInfo(); generateTypeStubs(); generateMessageHeaderStub(); for (final List<Token> tokens : ir.messages()) { final Token msgToken = tokens.get(0); final List<Token> messageBody = getMessageBody(tokens); final boolean hasVarData = -1 != findSignal(messageBody, Signal.BEGIN_VAR_DATA); int i = 0; final List<Token> fields = new ArrayList<>(); i = collectFields(messageBody, i, fields); final List<Token> groups = new ArrayList<>(); i = collectGroups(messageBody, i, groups); final List<Token> varData = new ArrayList<>(); collectVarData(messageBody, i, varData); final String decoderClassName = formatClassName(decoderName(msgToken.name())); final String decoderStateClassName = decoderClassName + "#CodecStates"; final FieldPrecedenceModel decoderPrecedenceModel = precedenceChecks.createDecoderModel( decoderStateClassName, tokens); generateDecoder(decoderClassName, msgToken, fields, groups, varData, hasVarData, decoderPrecedenceModel); final String encoderClassName = formatClassName(encoderName(msgToken.name())); final String encoderStateClassName = encoderClassName + "#CodecStates"; final FieldPrecedenceModel encoderPrecedenceModel = precedenceChecks.createEncoderModel( encoderStateClassName, tokens); generateEncoder(encoderClassName, msgToken, fields, groups, varData, hasVarData, encoderPrecedenceModel); } }
@Test void shouldGeneratePutCharSequence() throws Exception { final UnsafeBuffer buffer = new UnsafeBuffer(new byte[4096]); generator().generate(); final Object encoder = wrap(buffer, compileCarEncoder().getConstructor().newInstance()); final Object decoder = getCarDecoder(buffer, encoder); set(encoder, "vehicleCode", CharSequence.class, "R11"); assertThat(get(decoder, "vehicleCode"), is("R11")); set(encoder, "vehicleCode", CharSequence.class, ""); assertThat(get(decoder, "vehicleCode"), is("")); set(encoder, "vehicleCode", CharSequence.class, "R11R12"); assertThat(get(decoder, "vehicleCode"), is("R11R12")); }
List<DataflowPackage> stageClasspathElements( Collection<StagedFile> classpathElements, String stagingPath, CreateOptions createOptions) { return stageClasspathElements(classpathElements, stagingPath, DEFAULT_SLEEPER, createOptions); }
@Test public void testPackageUploadWithExplicitPackageName() throws Exception { Pipe pipe = Pipe.open(); File tmpFile = makeFileWithContents("file.txt", "This is a test!"); final String overriddenName = "alias.txt"; when(mockGcsUtil.getObjects(anyListOf(GcsPath.class))) .thenReturn( ImmutableList.of( StorageObjectOrIOException.create(new FileNotFoundException("some/path")))); when(mockGcsUtil.create(any(GcsPath.class), any(GcsUtil.CreateOptions.class))) .thenReturn(pipe.sink()); List<DataflowPackage> targets = defaultPackageUtil.stageClasspathElements( ImmutableList.of(makeStagedFile(tmpFile.getAbsolutePath(), overriddenName)), STAGING_PATH, createOptions); DataflowPackage target = Iterables.getOnlyElement(targets); verify(mockGcsUtil).getObjects(anyListOf(GcsPath.class)); verify(mockGcsUtil).create(any(GcsPath.class), any(GcsUtil.CreateOptions.class)); verifyNoMoreInteractions(mockGcsUtil); assertThat(target.getName(), equalTo(overriddenName)); assertThat(target.getLocation(), RegexMatcher.matches(STAGING_PATH + "alias.txt")); }
static Range consolidateRanges(List<RangeNode> ranges) { boolean consistent = true; Range result = new RangeImpl(); for (RangeNode r : ranges) { Comparable lowValue = null; if (r.getStart() instanceof NumberNode startNode) { lowValue = startNode.getValue(); } else if (r.getStart() instanceof AtLiteralNode atLiteralNode) { Object evaluated = MapperHelper.evaluateAtLiteralNode(atLiteralNode); lowValue = evaluated instanceof Comparable<?> ? (Comparable) evaluated : null; } if (lowValue != null) { if (result.getLowEndPoint() == null) { result = new RangeImpl(Range.RangeBoundary.valueOf(r.getLowerBound().name()), lowValue, result.getHighEndPoint(), result.getHighBoundary()); } else { consistent = false; } } Comparable highValue = null; if (r.getEnd() instanceof NumberNode endNode) { highValue = endNode.getValue(); } else if (r.getEnd() instanceof AtLiteralNode atLiteralNode) { Object evaluated = MapperHelper.evaluateAtLiteralNode(atLiteralNode); highValue = evaluated instanceof Comparable<?> ? (Comparable) evaluated : null; } if (highValue != null) { if (result.getHighEndPoint() == null) { result = new RangeImpl(result.getLowBoundary(), result.getLowEndPoint(), highValue, Range.RangeBoundary.valueOf(r.getUpperBound().name())); } else { consistent = false; } } } return consistent ? result : null; }
@Test void consolidateRangesInvalidRepeatedUB() { Range lowRange = new RangeImpl(Range.RangeBoundary.CLOSED, null, 50, Range.RangeBoundary.CLOSED); Range highRange = new RangeImpl(Range.RangeBoundary.CLOSED, null, 100, Range.RangeBoundary.CLOSED); List<RangeNode> ranges = getRangeNodes(lowRange, highRange); Range result = RangeNodeSchemaMapper.consolidateRanges(ranges); assertThat(result).isNull(); }
public static <K, V> Write<K, V> write() { return new AutoValue_CdapIO_Write.Builder<K, V>().build(); }
@Test public void testWriteObjectCreationFailsIfPluginConfigIsNull() { assertThrows( IllegalArgumentException.class, () -> CdapIO.<String, String>write().withPluginConfig(null)); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testDbGetHex() throws Exception { web3j.dbGetHex("testDB", "myKey").send(); verifyResult( "{\"jsonrpc\":\"2.0\",\"method\":\"db_getHex\"," + "\"params\":[\"testDB\",\"myKey\"],\"id\":1}"); }
public void deserialize() throws KettleException { String xml = rep.getStepAttributeString( idStep, REPO_TAG ); requireNonNull( MetaXmlSerializer.deserialize( xml ) ) .to( stepMetaInterface ); }
@Test public void testDeserialize() throws KettleException { StepMetaPropsTest.FooMeta blankMeta = new StepMetaPropsTest.FooMeta(); String serialized = serialize( from( stepMeta ) ); doReturn( serialized ).when( repo ).getStepAttributeString( stepId, "step-xml" ); RepoSerializer .builder() .repo( repo ) .stepId( stepId ) .stepMeta( blankMeta ) .deserialize(); // blankMeta hydrated from the RepoSerializer should be the same as the serialized stepMeta assertThat( stepMeta, equalTo( blankMeta ) ); }
@Override public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return join(otherStream, toValueJoinerWithKey(joiner), windows); }
@Test public void shouldNotAllowNullValueJoinerOnTableJoinWithJoiner() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.join(testTable, (ValueJoiner<? super String, ? super String, ?>) null, Joined.as("name"))); assertThat(exception.getMessage(), equalTo("joiner can't be null")); }
@Override protected File getFile(HandlerRequest<EmptyRequestBody> handlerRequest) { if (logDir == null) { return null; } // wrapping around another File instantiation is a simple way to remove any path information // - we're // solely interested in the filename String filename = new File(handlerRequest.getPathParameter(LogFileNamePathParameter.class)).getName(); return new File(logDir, filename); }
@Test void testGetJobManagerCustomLogsExistingButForbiddenFile() throws Exception { File actualFile = testInstance.getFile( createHandlerRequest(String.format("../%s", FORBIDDEN_FILENAME))); assertThat(actualFile).isNotNull().doesNotExist(); }
public static void main(String[] args) throws IOException, ClassNotFoundException { // Write V1 var fishV1 = new RainbowFish("Zed", 10, 11, 12); LOGGER.info("fishV1 name={} age={} length={} weight={}", fishV1.getName(), fishV1.getAge(), fishV1.getLengthMeters(), fishV1.getWeightTons()); RainbowFishSerializer.writeV1(fishV1, "fish1.out"); // Read V1 var deserializedRainbowFishV1 = RainbowFishSerializer.readV1("fish1.out"); LOGGER.info("deserializedFishV1 name={} age={} length={} weight={}", deserializedRainbowFishV1.getName(), deserializedRainbowFishV1.getAge(), deserializedRainbowFishV1.getLengthMeters(), deserializedRainbowFishV1.getWeightTons()); // Write V2 var fishV2 = new RainbowFishV2("Scar", 5, 12, 15, true, true, true); LOGGER.info( "fishV2 name={} age={} length={} weight={} sleeping={} hungry={} angry={}", fishV2.getName(), fishV2.getAge(), fishV2.getLengthMeters(), fishV2.getWeightTons(), fishV2.isHungry(), fishV2.isAngry(), fishV2.isSleeping()); RainbowFishSerializer.writeV2(fishV2, "fish2.out"); // Read V2 with V1 method var deserializedFishV2 = RainbowFishSerializer.readV1("fish2.out"); LOGGER.info("deserializedFishV2 name={} age={} length={} weight={}", deserializedFishV2.getName(), deserializedFishV2.getAge(), deserializedFishV2.getLengthMeters(), deserializedFishV2.getWeightTons()); }
@Test void shouldExecuteWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public Long clusterCountKeysInSlot(int slot) { RedisClusterNode node = clusterGetNodeForSlot(slot); MasterSlaveEntry entry = executorService.getConnectionManager().getEntry(new InetSocketAddress(node.getHost(), node.getPort())); RFuture<Long> f = executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_COUNTKEYSINSLOT, slot); return syncFuture(f); }
@Test public void testClusterCountKeysInSlot() { Long t = connection.clusterCountKeysInSlot(1); assertThat(t).isZero(); }
@Udf(description = "Converts a number of milliseconds since 1970-01-01 00:00:00 UTC/GMT into the" + " string representation of the timestamp in the given format. Single quotes in the" + " timestamp format can be escaped with '', for example: 'yyyy-MM-dd''T''HH:mm:ssX'." + " The system default time zone is used when no time zone is explicitly provided." + " The format pattern should be in the format expected" + " by java.time.format.DateTimeFormatter") public String timestampToString( @UdfParameter( description = "Milliseconds since" + " January 1, 1970, 00:00:00 UTC/GMT.") final long epochMilli, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { if (formatPattern == null) { return null; } try { final Timestamp timestamp = new Timestamp(epochMilli); final DateTimeFormatter formatter = formatters.get(formatPattern); return timestamp.toInstant() .atZone(ZoneId.systemDefault()) .format(formatter); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to format timestamp " + epochMilli + " with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldThrowIfInvalidTimeZone() { // When: final KsqlException e = assertThrows( KsqlFunctionException.class, () -> udf.timestampToString(1638360611123L, "yyyy-MM-dd HH:mm:ss.SSS", "PST") ); // Then: assertThat(e.getMessage(), containsString("Unknown time-zone ID: PST")); }
public boolean readData() throws KettleException { // Clear the information // clear(); File file = new File( getKettleLocalRepositoriesFile() ); if ( !file.exists() || !file.isFile() ) { if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "RepositoryMeta.Log.NoRepositoryFileInLocalDirectory", file.getAbsolutePath() ) ); } file = new File( getKettleUserRepositoriesFile() ); if ( !file.exists() || !file.isFile() ) { return true; // nothing to read! } } if ( log.isBasic() ) { log.logBasic( BaseMessages.getString( PKG, "RepositoryMeta.Log.ReadingXMLFile", file.getAbsoluteFile() ) ); } try { // Check and open XML document DocumentBuilderFactory dbf = XMLParserFactoryProducer.createSecureDocBuilderFactory(); DocumentBuilder db = dbf.newDocumentBuilder(); Document doc; try { doc = db.parse( file ); } catch ( FileNotFoundException ef ) { try ( InputStream is = getClass().getResourceAsStream( "/org/pentaho/di/repository/repositories.xml" ) ) { if ( is != null ) { doc = db.parse( is ); } else { throw new KettleException( BaseMessages.getString( PKG, "RepositoryMeta.Error.OpeningFile", file.getAbsoluteFile() ), ef ); } } } parseRepositoriesDoc( doc ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "RepositoryMeta.Error.ReadingInfo" ), e ); } return true; }
@Test public void testReadData() throws Exception { LogChannel log = mock( LogChannel.class ); doReturn( getClass().getResource( "repositories.xml" ).getPath() ).when( repoMeta ).getKettleUserRepositoriesFile(); doReturn( log ).when( repoMeta ).newLogChannel(); repoMeta.readData(); String repositoriesXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + Const.CR + "<repositories>" + Const.CR + " <connection>" + Const.CR + " <name>local postgres</name>" + Const.CR + " <server>localhost</server>" + Const.CR + " <type>POSTGRESQL</type>" + Const.CR + " <access>Native</access>" + Const.CR + " <database>hibernate</database>" + Const.CR + " <port>5432</port>" + Const.CR + " <username>auser</username>" + Const.CR + " <password>Encrypted 2be98afc86aa7f285bb18bd63c99dbdde</password>" + Const.CR + " <servername/>" + Const.CR + " <data_tablespace/>" + Const.CR + " <index_tablespace/>" + Const.CR + " <attributes>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_LOWERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_UPPERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>IS_CLUSTERED</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>MSSQL_DOUBLE_DECIMAL_SEPARATOR</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>PORT_NUMBER</code><attribute>5432</attribute></attribute>" + Const.CR + " <attribute><code>PRESERVE_RESERVED_WORD_CASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>QUOTE_ALL_FIELDS</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_BOOLEAN_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_TIMESTAMP_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>USE_POOLING</code><attribute>N</attribute></attribute>" + Const.CR + " </attributes>" + Const.CR + " </connection>" + Const.CR + " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <is_default>false</is_default>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository> </repositories>" + Const.CR; assertEquals( repositoriesXml, repoMeta.getXML() ); RepositoriesMeta clone = repoMeta.clone(); assertEquals( repositoriesXml, repoMeta.getXML() ); assertNotSame( clone, repoMeta ); assertEquals( 1, repoMeta.nrRepositories() ); RepositoryMeta repository = repoMeta.getRepository( 0 ); assertEquals( "Test Repository", repository.getName() ); assertEquals( "Test Repository Description", repository.getDescription() ); assertEquals( " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <is_default>false</is_default>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository>", repository.getXML() ); assertSame( repository, repoMeta.searchRepository( "Test Repository" ) ); assertSame( repository, repoMeta.findRepositoryById( "KettleFileRepository" ) ); assertSame( repository, repoMeta.findRepository( "Test Repository" ) ); assertNull( repoMeta.findRepository( "not found" ) ); assertNull( repoMeta.findRepositoryById( "not found" ) ); assertEquals( 0, repoMeta.indexOfRepository( repository ) ); repoMeta.removeRepository( 0 ); assertEquals( 0, repoMeta.nrRepositories() ); assertNull( repoMeta.searchRepository( "Test Repository" ) ); repoMeta.addRepository( 0, repository ); assertEquals( 1, repoMeta.nrRepositories() ); repoMeta.removeRepository( 1 ); assertEquals( 1, repoMeta.nrRepositories() ); assertEquals( 1, repoMeta.nrDatabases() ); assertEquals( "local postgres", repoMeta.getDatabase( 0 ).getName() ); DatabaseMeta searchDatabase = repoMeta.searchDatabase( "local postgres" ); assertSame( searchDatabase, repoMeta.getDatabase( 0 ) ); assertEquals( 0, repoMeta.indexOfDatabase( searchDatabase ) ); repoMeta.removeDatabase( 0 ); assertEquals( 0, repoMeta.nrDatabases() ); assertNull( repoMeta.searchDatabase( "local postgres" ) ); repoMeta.addDatabase( 0, searchDatabase ); assertEquals( 1, repoMeta.nrDatabases() ); repoMeta.removeDatabase( 1 ); assertEquals( 1, repoMeta.nrDatabases() ); assertEquals( "Unable to read repository with id [junk]. RepositoryMeta is not available.", repoMeta.getErrorMessage() ); }
@Nonnull public static List<IndexIterationPointer> normalizePointers(@Nonnull List<IndexIterationPointer> result, boolean descending) { if (result.size() <= 1) { // single pointer, nothing to do return result; } // without the same ordering of pointers order of results would be unspecified assert result.stream().allMatch(r -> r.isDescending() == descending) : "All iteration pointers must have the same direction"; // order of ranges is critical for preserving ordering of the results Collections.sort(result, descending ? POINTER_COMPARATOR_REVERSED : POINTER_COMPARATOR); // loop until we processed the last remaining pair // // do the normalization in place without extra shifts in the array // we write normalized pointers from the beginning int writeIdx = 0; IndexIterationPointer currentMerged = result.get(0); for (int nextPointerIdx = 1; nextPointerIdx < result.size(); nextPointerIdx++) { // compare current pointer with next one and merge if they overlap // otherwise go to next pointer // pointers might be ordered in descending way but util methods expect ascending order of arguments IndexIterationPointer next = result.get(nextPointerIdx); if (!descending && overlapsOrdered(currentMerged, next, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR)) { // merge overlapping ranges currentMerged = union(currentMerged, next, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR); } else if (descending && overlapsOrdered(next, currentMerged, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR)) { // merge overlapping ranges currentMerged = union(next, currentMerged, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR); } else { // write current pointer and advance result.set(writeIdx++, currentMerged); currentMerged = next; } } // write last remaining pointer result.set(writeIdx++, currentMerged); return result.subList(0, writeIdx); }
@Test void normalizePointersMerge() { assertThat(normalizePointers(arrayListOf( pointer(singleton(5)), pointer(singleton(5))), false)) .as("Should merge overlapping ranges") .containsExactly(pointer(singleton(5))); assertThat(normalizePointers(arrayListOf( pointer(singleton(5)), pointer(singleton(6))), false)) .as("Should not merge non overlapping ranges") .containsExactly(pointer(singleton(5)), pointer(singleton(6))); assertThat(normalizePointers(arrayListOf( pointer(singleton(6), true), pointer(singleton(5), true)), true)) .as("Should not merge non overlapping desc ranges") .containsExactly(pointer(singleton(6), true), pointer(singleton(5), true)); assertThat(normalizePointers(arrayListOf( pointer(lessThan(2)), pointer(lessThan(5))), false)) .as("Should merge ranges in correct order") .containsExactly(pointer(lessThan(5))); assertThat(normalizePointers(arrayListOf( pointer(lessThan(5)), pointer(lessThan(2))), false)) .as("Should merge ranges in wrong order") .containsExactly(pointer(lessThan(5))); }
@Override public Enumerable<Object> execute(final ShardingSphereTable table, final ScanExecutorContext scanContext) { String databaseName = executorContext.getDatabaseName(); String schemaName = executorContext.getSchemaName(); DatabaseType databaseType = optimizerContext.getParserContext(databaseName).getDatabaseType(); if (new SystemDatabase(databaseType).getSystemSchemas().contains(schemaName)) { return createMemoryEnumerable(databaseName, schemaName, table, databaseType); } QueryContext queryContext = createQueryContext(federationContext.getMetaData(), scanContext, databaseType, federationContext.getQueryContext().isUseCache()); ExecutionContext executionContext = new KernelProcessor().generateExecutionContext(queryContext, globalRuleMetaData, executorContext.getProps(), new ConnectionContext(Collections::emptySet)); if (federationContext.isPreview()) { federationContext.getPreviewExecutionUnits().addAll(executionContext.getExecutionUnits()); return createEmptyEnumerable(); } return createJDBCEnumerable(queryContext, federationContext.getMetaData().getDatabase(databaseName), executionContext); }
@Test void assertExecuteWithStatistics() { OptimizerContext optimizerContext = mock(OptimizerContext.class, RETURNS_DEEP_STUBS); when(optimizerContext.getParserContext(any()).getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); SQLFederationExecutorContext executorContext = mock(SQLFederationExecutorContext.class); when(executorContext.getDatabaseName()).thenReturn("db"); when(executorContext.getSchemaName()).thenReturn("pg_catalog"); ShardingSphereStatistics statistics = mock(ShardingSphereStatistics.class, RETURNS_DEEP_STUBS); ShardingSphereDatabaseData databaseData = mock(ShardingSphereDatabaseData.class, RETURNS_DEEP_STUBS); when(statistics.getDatabase("db")).thenReturn(databaseData); ShardingSphereSchemaData schemaData = mock(ShardingSphereSchemaData.class, RETURNS_DEEP_STUBS); when(databaseData.getSchema("pg_catalog")).thenReturn(schemaData); ShardingSphereTableData tableData = mock(ShardingSphereTableData.class); when(tableData.getRows()).thenReturn(Collections.singletonList(new ShardingSphereRowData(Collections.singletonList(1)))); when(schemaData.getTable("test")).thenReturn(tableData); ShardingSphereTable table = mock(ShardingSphereTable.class, RETURNS_DEEP_STUBS); when(table.getName()).thenReturn("test"); when(table.getColumns().values()).thenReturn(Collections.singleton(new ShardingSphereColumn("id", Types.INTEGER, true, false, false, false, true, false))); Enumerable<Object> enumerable = new EnumerableScanExecutor(null, null, null, optimizerContext, executorContext, null, null, statistics) .execute(table, mock(ScanExecutorContext.class)); try (Enumerator<Object> actual = enumerable.enumerator()) { actual.moveNext(); Object row = actual.current(); assertThat(row, instanceOf(Object[].class)); assertThat(((Object[]) row)[0], is(1)); } }
public static double of(int[] truth, int[] prediction) { if (truth.length != prediction.length) { throw new IllegalArgumentException(String.format("The vector sizes don't match: %d != %d.", truth.length, prediction.length)); } ConfusionMatrix confusion = ConfusionMatrix.of(truth, prediction); int[][] matrix = confusion.matrix; if (matrix.length != 2 || matrix[0].length != 2) { throw new IllegalArgumentException("MCC can only be applied to binary classification: " + confusion); } int tp = matrix[1][1]; int tn = matrix[0][0]; int fp = matrix[0][1]; int fn = matrix[1][0]; int numerator = (tp * tn - fp * fn); double denominator = Math.sqrt(tp + fp) * Math.sqrt(tp + fn) * Math.sqrt(tn + fp) * Math.sqrt(tn + fn); return numerator / denominator; }
@Test public void test() { System.out.println("MCC"); int[] truth = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; int[] prediction = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; double expResult = 0.83068; double result = MatthewsCorrelation.of(truth, prediction); assertEquals(expResult, result, 1E-5); }
@Override public boolean isWarProject() { String packaging = project.getPackaging(); return "war".equals(packaging) || "gwt-app".equals(packaging); }
@Test public void testIsWarProject_gwtAppPackagingIsWar() { when(mockMavenProject.getPackaging()).thenReturn("gwt-app"); assertThat(mavenProjectProperties.isWarProject()).isTrue(); }
public void addComponentEnvVariables(Map<String, String> env, Properties properties, boolean custom) { Set<String> toRemove = new HashSet<>(); env.forEach((k, v) -> { if (custom) { toRemove.add(k); String ck = "camel.component." + k.substring(16).toLowerCase(Locale.US).replace('_', '-'); ck = ck.replaceFirst("-", "."); properties.put(ck, v); } else { Optional<String> e = componentEnvNames.stream().filter(k::startsWith).findFirst(); if (e.isPresent()) { toRemove.add(k); String cname = "camel.component." + e.get().substring(16).toLowerCase(Locale.US).replace('_', '-'); String option = k.substring(cname.length() + 1).toLowerCase(Locale.US).replace('_', '-'); properties.put(cname + "." + option, v); } } }); toRemove.forEach(env::remove); }
@Test public void testAddComponentEnvVariables() { Map<String, String> env = MainHelper.filterEnvVariables(new String[] { "CAMEL_COMPONENT_" }); env.put("CAMEL_COMPONENT_AWS2_S3_ACCESS_KEY", "mysecretkey"); Properties prop = new OrderedProperties(); helper.addComponentEnvVariables(env, prop, false); Assertions.assertEquals(0, env.size()); Assertions.assertEquals(1, prop.size()); Assertions.assertEquals("mysecretkey", prop.getProperty("camel.component.aws2-s3.access-key")); }
public static String escape(String string) { return EscapeUtil.escapeHtml4(string); }
@Test public void escapeTest(){ final String a = "<>"; final String escape = XmlUtil.escape(a); Console.log(escape); }
@Override public int size() { return 0; }
@Test public void testForEachObject() { Set<Integer> results = new HashSet<>(); es.forEach((Consumer<? super Integer>) results::add); assertEquals(0, results.size()); }
@Operation(summary = "queryUnauthorizedProject", description = "QUERY_UNAUTHORIZED_PROJECT_NOTES") @Parameters({ @Parameter(name = "userId", description = "USER_ID", schema = @Schema(implementation = int.class, example = "100", required = true)) }) @GetMapping(value = "/unauth-project") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_UNAUTHORIZED_PROJECT_ERROR) public ProjectListResponse queryUnauthorizedProject(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { Result result = projectService.queryUnauthorizedProject(loginUser, userId); return new ProjectListResponse(result); }
@Test public void testQueryUnauthorizedProject() { Result result = new Result(); putMsg(result, Status.SUCCESS); Mockito.when(projectService.queryUnauthorizedProject(user, 2)).thenReturn(result); ProjectListResponse response = projectV2Controller.queryUnauthorizedProject(user, 2); Assertions.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); }
static ArgumentParser argParser() { ArgumentParser parser = ArgumentParsers .newArgumentParser("producer-performance") .defaultHelp(true) .description("This tool is used to verify the producer performance. To enable transactions, " + "you can specify a transaction id or set a transaction duration using --transaction-duration-ms. " + "There are three ways to specify the transaction id: set transaction.id=<id> via --producer-props, " + "set transaction.id=<id> in the config file via --producer.config, or use --transaction-id <id>."); MutuallyExclusiveGroup payloadOptions = parser .addMutuallyExclusiveGroup() .required(true) .description("either --record-size or --payload-file must be specified but not both."); parser.addArgument("--topic") .action(store()) .required(true) .type(String.class) .metavar("TOPIC") .help("produce messages to this topic"); parser.addArgument("--num-records") .action(store()) .required(true) .type(Long.class) .metavar("NUM-RECORDS") .dest("numRecords") .help("number of messages to produce"); payloadOptions.addArgument("--record-size") .action(store()) .required(false) .type(Integer.class) .metavar("RECORD-SIZE") .dest("recordSize") .help("message size in bytes. Note that you must provide exactly one of --record-size or --payload-file " + "or --payload-monotonic."); payloadOptions.addArgument("--payload-file") .action(store()) .required(false) .type(String.class) .metavar("PAYLOAD-FILE") .dest("payloadFile") .help("file to read the message payloads from. This works only for UTF-8 encoded text files. " + "Payloads will be read from this file and a payload will be randomly selected when sending messages. " + "Note that you must provide exactly one of --record-size or --payload-file or --payload-monotonic."); payloadOptions.addArgument("--payload-monotonic") .action(storeTrue()) .type(Boolean.class) .metavar("PAYLOAD-MONOTONIC") .dest("payloadMonotonic") .help("payload is monotonically increasing integer. Note that you must provide exactly one of --record-size " + "or --payload-file or --payload-monotonic."); parser.addArgument("--payload-delimiter") .action(store()) .required(false) .type(String.class) .metavar("PAYLOAD-DELIMITER") .dest("payloadDelimiter") .setDefault("\\n") .help("provides delimiter to be used when --payload-file is provided. " + "Defaults to new line. " + "Note that this parameter will be ignored if --payload-file is not provided."); parser.addArgument("--throughput") .action(store()) .required(true) .type(Double.class) .metavar("THROUGHPUT") .help("throttle maximum message throughput to *approximately* THROUGHPUT messages/sec. Set this to -1 to disable throttling."); parser.addArgument("--producer-props") .nargs("+") .required(false) .metavar("PROP-NAME=PROP-VALUE") .type(String.class) .dest("producerConfig") .help("kafka producer related configuration properties like bootstrap.servers,client.id etc. " + "These configs take precedence over those passed via --producer.config."); parser.addArgument("--producer.config") .action(store()) .required(false) .type(String.class) .metavar("CONFIG-FILE") .dest("producerConfigFile") .help("producer config properties file."); parser.addArgument("--print-metrics") .action(storeTrue()) .type(Boolean.class) .metavar("PRINT-METRICS") .dest("printMetrics") .help("print out metrics at the end of the test."); parser.addArgument("--transactional-id") .action(store()) .required(false) .type(String.class) .metavar("TRANSACTIONAL-ID") .dest("transactionalId") .help("The transactional id to use. This config takes precedence over the transactional.id " + "specified via --producer.config or --producer-props. Note that if the transactional id " + "is not specified while --transaction-duration-ms is provided, the default value for the " + "transactional id will be performance-producer- followed by a random uuid."); parser.addArgument("--transaction-duration-ms") .action(store()) .required(false) .type(Long.class) .metavar("TRANSACTION-DURATION") .dest("transactionDurationMs") .help("The max age of each transaction. The commitTransaction will be called after this time has elapsed. " + "The value should be greater than 0. If the transactional id is specified via --producer-props, " + "--producer.config, or --transactional-id but --transaction-duration-ms is not specified, " + "the default value will be 3000."); return parser; }
@Test public void testUnexpectedArg() { String[] args = new String[] { "--test", "test", "--topic", "Hello-Kafka", "--num-records", "5", "--throughput", "100", "--record-size", "100", "--producer-props", "bootstrap.servers=localhost:9000"}; ArgumentParser parser = ProducerPerformance.argParser(); ArgumentParserException thrown = assertThrows(ArgumentParserException.class, () -> parser.parseArgs(args)); assertEquals("unrecognized arguments: '--test'", thrown.getMessage()); }
@Override public long get() { return complete(asyncCounter.get()); }
@Test(expected = StorageException.Interrupted.class) public void testInterrupted() { AtomicCounterWithErrors atomicCounter = new AtomicCounterWithErrors(); atomicCounter.setErrorState(TestingCompletableFutures.ErrorState.INTERRUPTED_EXCEPTION); DefaultAtomicCounter counter = new DefaultAtomicCounter(atomicCounter, 1000); counter.get(); }
public CacheStats plus(CacheStats other) { return CacheStats.of( saturatedAdd(hitCount, other.hitCount), saturatedAdd(missCount, other.missCount), saturatedAdd(loadSuccessCount, other.loadSuccessCount), saturatedAdd(loadFailureCount, other.loadFailureCount), saturatedAdd(totalLoadTime, other.totalLoadTime), saturatedAdd(evictionCount, other.evictionCount), saturatedAdd(evictionWeight, other.evictionWeight)); }
@Test public void plus() { var one = CacheStats.of(11, 13, 15, 13, 11, 9, 18); var two = CacheStats.of(53, 47, 41, 39, 37, 35, 70); var sum = two.plus(one); checkStats(sum, 124, 64, 64.0 / 124, 60, 60.0 / 124, 56, 52, 52.0 / 108, 56 + 52, 48, 48.0 / (56 + 52), 44, 88); assertThat(sum).isEqualTo(one.plus(two)); }
public static String matchOrigin(HttpServerExchange exchange, Collection<String> allowedOrigins) throws Exception { HeaderMap headers = exchange.getRequestHeaders(); String[] origins = headers.get(Headers.ORIGIN).toArray(); if(logger.isTraceEnabled()) logger.trace("origins from the request header = " + Arrays.toString(origins) + " allowedOrigins = " + allowedOrigins); if (allowedOrigins != null && !allowedOrigins.isEmpty()) { for (String allowedOrigin : allowedOrigins) { for (String origin : origins) { if (allowedOrigin.equalsIgnoreCase(sanitizeDefaultPort(origin))) { return allowedOrigin; } } } } String allowedOrigin = defaultOrigin(exchange); if(logger.isTraceEnabled()) logger.trace("allowedOrigin from the exchange = " + allowedOrigin); for (String origin : origins) { if (allowedOrigin.equalsIgnoreCase(sanitizeDefaultPort(origin))) { return allowedOrigin; } } logger.debug("Request rejected due to HOST/ORIGIN mis-match."); ResponseCodeHandler.HANDLE_403.handleRequest(exchange); return null; }
@Test public void testMatchOrigin() throws Exception { HeaderMap headerMap = new HeaderMap(); headerMap.add(HOST, "localhost:80"); headerMap.add(ORIGIN, "http://localhost"); HttpServerExchange exchange = new HttpServerExchange(null, headerMap, new HeaderMap(), 10); exchange.setRequestScheme("http"); exchange.setRequestMethod(HttpString.EMPTY); Collection<String> allowedOrigins = null; assertThat(CorsUtil.matchOrigin(exchange, allowedOrigins), is("http://localhost")); allowedOrigins = Collections.singletonList("http://www.example.com:9990"); //Default origin assertThat(CorsUtil.matchOrigin(exchange, allowedOrigins), is("http://localhost")); headerMap.clear(); headerMap.add(HOST, "localhost:80"); headerMap.add(ORIGIN, "http://www.example.com:9990"); assertThat(CorsUtil.matchOrigin(exchange, allowedOrigins), is("http://www.example.com:9990")); headerMap.clear(); headerMap.add(HOST, "localhost:80"); headerMap.add(ORIGIN, "http://www.example.com"); assertThat(CorsUtil.matchOrigin(exchange, allowedOrigins), is(nullValue())); headerMap.addAll(ORIGIN, Arrays.asList("http://localhost:7080", "http://www.example.com:9990", "http://localhost")); allowedOrigins = Arrays.asList("http://localhost", "http://www.example.com:9990"); assertThat(CorsUtil.matchOrigin(exchange, allowedOrigins), is("http://localhost")); }
@Override public DirectoryTimestamp getDirectoryTimestamp() { return DirectoryTimestamp.implicit; }
@Test public void testFeatures() { assertEquals(Protocol.Case.sensitive, new SFTPProtocol().getCaseSensitivity()); assertEquals(Protocol.DirectoryTimestamp.implicit, new SFTPProtocol().getDirectoryTimestamp()); }
public final void parseVersion(String version) { versionParts = new ArrayList<>(); if (version != null) { final Pattern rx = Pattern .compile("(\\d+[a-z]{1,3}$|[a-z]{1,3}[_-]?\\d+|\\d+|(rc|release|snapshot|beta|alpha)$)", Pattern.CASE_INSENSITIVE); final Matcher matcher = rx.matcher(version.toLowerCase()); while (matcher.find()) { versionParts.add(matcher.group()); } if (versionParts.isEmpty()) { versionParts.add(version); } } }
@Test public void testParseVersion() { String version = "1.2r1"; DependencyVersion instance = new DependencyVersion(); instance.parseVersion(version); List<String> parts = instance.getVersionParts(); assertEquals(3, parts.size()); assertEquals("1", parts.get(0)); assertEquals("2", parts.get(1)); assertEquals("r1", parts.get(2)); instance.parseVersion("x6.0"); parts = instance.getVersionParts(); assertEquals(2, parts.size()); assertEquals("x6", parts.get(0)); assertEquals("0", parts.get(1)); // TODO(code review): should this be here/do something? //assertEquals("0", parts.get(2)); }
private CompletableFuture<Boolean> verifyTxnOwnership(TxnID txnID) { assert ctx.executor().inEventLoop(); return service.pulsar().getTransactionMetadataStoreService() .verifyTxnOwnership(txnID, getPrincipal()) .thenComposeAsync(isOwner -> { if (isOwner) { return CompletableFuture.completedFuture(true); } if (service.isAuthenticationEnabled() && service.isAuthorizationEnabled()) { return isSuperUser(); } else { return CompletableFuture.completedFuture(false); } }, ctx.executor()); }
@Test(timeOut = 30000) public void sendEndTxnOnSubscription() throws Exception { final TransactionMetadataStoreService txnStore = mock(TransactionMetadataStoreService.class); when(txnStore.getTxnMeta(any())).thenReturn(CompletableFuture.completedFuture(mock(TxnMeta.class))); when(txnStore.verifyTxnOwnership(any(), any())).thenReturn(CompletableFuture.completedFuture(true)); when(txnStore.endTransaction(any(TxnID.class), anyInt(), anyBoolean())) .thenReturn(CompletableFuture.completedFuture(null)); when(pulsar.getTransactionMetadataStoreService()).thenReturn(txnStore); svcConfig.setTransactionCoordinatorEnabled(true); resetChannel(); setChannelConnected(); Topic topic = mock(Topic.class); final org.apache.pulsar.broker.service.Subscription sub = mock(org.apache.pulsar.broker.service.Subscription.class); doReturn(sub).when(topic).getSubscription(any()); doReturn(CompletableFuture.completedFuture(null)) .when(sub).endTxn(anyLong(), anyLong(), anyInt(), anyLong()); doReturn(CompletableFuture.completedFuture(Optional.of(topic))).when(brokerService) .getTopicIfExists(any(String.class)); ByteBuf clientCommand = Commands.newEndTxnOnSubscription(89L, 1L, 12L, successTopicName, successSubName, TxnAction.COMMIT, 1L); channel.writeInbound(clientCommand); CommandEndTxnOnSubscriptionResponse response = (CommandEndTxnOnSubscriptionResponse) getResponse(); assertEquals(response.getRequestId(), 89L); assertEquals(response.getTxnidLeastBits(), 1L); assertEquals(response.getTxnidMostBits(), 12L); assertFalse(response.hasError()); assertFalse(response.hasMessage()); channel.finish(); }
@Override public TypeSerializerSchemaCompatibility<T> resolveSchemaCompatibility( TypeSerializerSnapshot<T> oldSerializerSnapshot) { if (!(oldSerializerSnapshot instanceof AvroSerializerSnapshot)) { return TypeSerializerSchemaCompatibility.incompatible(); } AvroSerializerSnapshot<?> oldAvroSerializerSnapshot = (AvroSerializerSnapshot<?>) oldSerializerSnapshot; return resolveSchemaCompatibility(oldAvroSerializerSnapshot.schema, schema); }
@Test void anAvroSpecificRecordIsCompatibleAfterARoundTrip() throws IOException { // user is an avro generated test object. AvroSerializer<User> serializer = new AvroSerializer<>(User.class); AvroSerializerSnapshot<User> restored = roundTrip(serializer.snapshotConfiguration()); assertThat(serializer.snapshotConfiguration().resolveSchemaCompatibility(restored)) .is(isCompatibleAsIs()); }
@Subscribe public void onChatMessage(ChatMessage chatMessage) { if (chatMessage.getType() != ChatMessageType.TRADE && chatMessage.getType() != ChatMessageType.GAMEMESSAGE && chatMessage.getType() != ChatMessageType.SPAM && chatMessage.getType() != ChatMessageType.FRIENDSCHATNOTIFICATION) { return; } String message = chatMessage.getMessage(); Matcher matcher = KILLCOUNT_PATTERN.matcher(message); if (matcher.find()) { final String boss = matcher.group("boss"); final int kc = Integer.parseInt(matcher.group("kc")); final String pre = matcher.group("pre"); final String post = matcher.group("post"); if (Strings.isNullOrEmpty(pre) && Strings.isNullOrEmpty(post)) { unsetKc(boss); return; } String renamedBoss = KILLCOUNT_RENAMES .getOrDefault(boss, boss) // The config service doesn't support keys with colons in them .replace(":", ""); if (boss != renamedBoss) { // Unset old TOB kc unsetKc(boss); unsetPb(boss); unsetKc(boss.replace(":", ".")); unsetPb(boss.replace(":", ".")); // Unset old story mode unsetKc("Theatre of Blood Story Mode"); unsetPb("Theatre of Blood Story Mode"); } setKc(renamedBoss, kc); // We either already have the pb, or need to remember the boss for the upcoming pb if (lastPb > -1) { log.debug("Got out-of-order personal best for {}: {}", renamedBoss, lastPb); if (renamedBoss.contains("Theatre of Blood")) { // TOB team size isn't sent in the kill message, but can be computed from varbits int tobTeamSize = tobTeamSize(); lastTeamSize = tobTeamSize == 1 ? "Solo" : (tobTeamSize + " players"); } else if (renamedBoss.contains("Tombs of Amascut")) { // TOA team size isn't sent in the kill message, but can be computed from varbits int toaTeamSize = toaTeamSize(); lastTeamSize = toaTeamSize == 1 ? "Solo" : (toaTeamSize + " players"); } final double pb = getPb(renamedBoss); // If a raid with a team size, only update the pb if it is lower than the existing pb // so that the pb is the overall lowest of any team size if (lastTeamSize == null || pb == 0 || lastPb < pb) { log.debug("Setting overall pb (old: {})", pb); setPb(renamedBoss, lastPb); } if (lastTeamSize != null) { log.debug("Setting team size pb: {}", lastTeamSize); setPb(renamedBoss + " " + lastTeamSize, lastPb); } lastPb = -1; lastTeamSize = null; } else { lastBossKill = renamedBoss; lastBossTime = client.getTickCount(); } return; } matcher = DUEL_ARENA_WINS_PATTERN.matcher(message); if (matcher.find()) { final int oldWins = getKc("Duel Arena Wins"); final int wins = matcher.group(2).equals("one") ? 1 : Integer.parseInt(matcher.group(2).replace(",", "")); final String result = matcher.group(1); int winningStreak = getKc("Duel Arena Win Streak"); int losingStreak = getKc("Duel Arena Lose Streak"); if (result.equals("won") && wins > oldWins) { losingStreak = 0; winningStreak += 1; } else if (result.equals("were defeated")) { losingStreak += 1; winningStreak = 0; } else { log.warn("unrecognized duel streak chat message: {}", message); } setKc("Duel Arena Wins", wins); setKc("Duel Arena Win Streak", winningStreak); setKc("Duel Arena Lose Streak", losingStreak); } matcher = DUEL_ARENA_LOSSES_PATTERN.matcher(message); if (matcher.find()) { int losses = matcher.group(1).equals("one") ? 1 : Integer.parseInt(matcher.group(1).replace(",", "")); setKc("Duel Arena Losses", losses); } matcher = KILL_DURATION_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = NEW_PB_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = RAIDS_PB_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = RAIDS_DURATION_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = HS_PB_PATTERN.matcher(message); if (matcher.find()) { int floor = Integer.parseInt(matcher.group("floor")); String floortime = matcher.group("floortime"); String floorpb = matcher.group("floorpb"); String otime = matcher.group("otime"); String opb = matcher.group("opb"); String pb = MoreObjects.firstNonNull(floorpb, floortime); setPb("Hallowed Sepulchre Floor " + floor, timeStringToSeconds(pb)); if (otime != null) { pb = MoreObjects.firstNonNull(opb, otime); setPb("Hallowed Sepulchre", timeStringToSeconds(pb)); } } matcher = HS_KC_FLOOR_PATTERN.matcher(message); if (matcher.find()) { int floor = Integer.parseInt(matcher.group(1)); int kc = Integer.parseInt(matcher.group(2).replaceAll(",", "")); setKc("Hallowed Sepulchre Floor " + floor, kc); } matcher = HS_KC_GHC_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1).replaceAll(",", "")); setKc("Hallowed Sepulchre", kc); } matcher = HUNTER_RUMOUR_KC_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1).replaceAll(",", "")); setKc("Hunter Rumours", kc); } if (lastBossKill != null && lastBossTime != client.getTickCount()) { lastBossKill = null; lastBossTime = -1; } matcher = COLLECTION_LOG_ITEM_PATTERN.matcher(message); if (matcher.find()) { String item = matcher.group(1); int petId = findPet(item); if (petId != -1) { final List<Integer> petList = new ArrayList<>(getPetList()); if (!petList.contains(petId)) { log.debug("New pet added: {}/{}", item, petId); petList.add(petId); setPetList(petList); } } } matcher = GUARDIANS_OF_THE_RIFT_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1)); setKc("Guardians of the Rift", kc); } }
@Test public void testHsFloorNoPb() { ChatMessage chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Floor 1 time: <col=ff0000>1:19</col>. Personal best: 0:28", null, 0); chatCommandsPlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre floor 1", 28.0); // Precise times chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Floor 1 time: <col=ff0000>1:19.20</col>. Personal best: 0:28.40", null, 0); chatCommandsPlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre floor 1", 28.4); }
public static int indexOf(ByteBuf needle, ByteBuf haystack) { if (haystack == null || needle == null) { return -1; } if (needle.readableBytes() > haystack.readableBytes()) { return -1; } int n = haystack.readableBytes(); int m = needle.readableBytes(); if (m == 0) { return 0; } // When the needle has only one byte that can be read, // the ByteBuf.indexOf() can be used if (m == 1) { return haystack.indexOf(haystack.readerIndex(), haystack.writerIndex(), needle.getByte(needle.readerIndex())); } int i; int j = 0; int aStartIndex = needle.readerIndex(); int bStartIndex = haystack.readerIndex(); long suffixes = maxSuf(needle, m, aStartIndex, true); long prefixes = maxSuf(needle, m, aStartIndex, false); int ell = Math.max((int) (suffixes >> 32), (int) (prefixes >> 32)); int per = Math.max((int) suffixes, (int) prefixes); int memory; int length = Math.min(m - per, ell + 1); if (equals(needle, aStartIndex, needle, aStartIndex + per, length)) { memory = -1; while (j <= n - m) { i = Math.max(ell, memory) + 1; while (i < m && needle.getByte(i + aStartIndex) == haystack.getByte(i + j + bStartIndex)) { ++i; } if (i > n) { return -1; } if (i >= m) { i = ell; while (i > memory && needle.getByte(i + aStartIndex) == haystack.getByte(i + j + bStartIndex)) { --i; } if (i <= memory) { return j + bStartIndex; } j += per; memory = m - per - 1; } else { j += i - ell; memory = -1; } } } else { per = Math.max(ell + 1, m - ell - 1) + 1; while (j <= n - m) { i = ell + 1; while (i < m && needle.getByte(i + aStartIndex) == haystack.getByte(i + j + bStartIndex)) { ++i; } if (i > n) { return -1; } if (i >= m) { i = ell; while (i >= 0 && needle.getByte(i + aStartIndex) == haystack.getByte(i + j + bStartIndex)) { --i; } if (i < 0) { return j + bStartIndex; } j += per; } else { j += i - ell; } } } return -1; }
@Test public void testIndexOf() { ByteBuf haystack = Unpooled.copiedBuffer("abc123", CharsetUtil.UTF_8); assertEquals(0, ByteBufUtil.indexOf(Unpooled.copiedBuffer("a", CharsetUtil.UTF_8), haystack)); assertEquals(1, ByteBufUtil.indexOf(Unpooled.copiedBuffer("bc".getBytes(CharsetUtil.UTF_8)), haystack)); assertEquals(2, ByteBufUtil.indexOf(Unpooled.copiedBuffer("c".getBytes(CharsetUtil.UTF_8)), haystack)); assertEquals(0, ByteBufUtil.indexOf(Unpooled.copiedBuffer("abc12".getBytes(CharsetUtil.UTF_8)), haystack)); assertEquals(-1, ByteBufUtil.indexOf(Unpooled.copiedBuffer("abcdef".getBytes(CharsetUtil.UTF_8)), haystack)); assertEquals(-1, ByteBufUtil.indexOf(Unpooled.copiedBuffer("abc12x".getBytes(CharsetUtil.UTF_8)), haystack)); assertEquals(-1, ByteBufUtil.indexOf(Unpooled.copiedBuffer("abc123def".getBytes(CharsetUtil.UTF_8)), haystack)); final ByteBuf needle = Unpooled.copiedBuffer("abc12", CharsetUtil.UTF_8); haystack.readerIndex(1); needle.readerIndex(1); assertEquals(1, ByteBufUtil.indexOf(needle, haystack)); haystack.readerIndex(2); needle.readerIndex(3); assertEquals(3, ByteBufUtil.indexOf(needle, haystack)); haystack.readerIndex(1); needle.readerIndex(2); assertEquals(2, ByteBufUtil.indexOf(needle, haystack)); haystack.release(); haystack = new WrappedByteBuf(Unpooled.copiedBuffer("abc123", CharsetUtil.UTF_8)); assertEquals(0, ByteBufUtil.indexOf(Unpooled.copiedBuffer("a", CharsetUtil.UTF_8), haystack)); assertEquals(1, ByteBufUtil.indexOf(Unpooled.copiedBuffer("bc".getBytes(CharsetUtil.UTF_8)), haystack)); assertEquals(-1, ByteBufUtil.indexOf(Unpooled.copiedBuffer("abcdef".getBytes(CharsetUtil.UTF_8)), haystack)); haystack.release(); haystack = Unpooled.copiedBuffer("123aab123", CharsetUtil.UTF_8); assertEquals(3, ByteBufUtil.indexOf(Unpooled.copiedBuffer("aab", CharsetUtil.UTF_8), haystack)); haystack.release(); needle.release(); }
public LatLong avgLatLong() { return LatLong.quickAvgLatLong(point1.latLong(), point2.latLong()); }
@Test public void testAvgLatLong() { Point p1 = Point.builder().latLong(33.63143, -84.33913).time(EPOCH).build(); Point p2 = Point.builder().latLong(33.64143, -84.43913).time(EPOCH).build(); PointPair pair = PointPair.of(p1, p2); LatLong actual = pair.avgLatLong(); LatLong expected = LatLong.of(33.63643, -84.38913); assertTrue(expected.distanceTo(actual).isLessThan(Distance.ofNauticalMiles(0.00001))); }
public static URL parseURL(String address, Map<String, String> defaults) { if (StringUtils.isEmpty(address)) { throw new IllegalArgumentException("Address is not allowed to be empty, please re-enter."); } String url; if (address.contains("://") || address.contains(URL_PARAM_STARTING_SYMBOL)) { url = address; } else { String[] addresses = COMMA_SPLIT_PATTERN.split(address); url = addresses[0]; if (addresses.length > 1) { StringBuilder backup = new StringBuilder(); for (int i = 1; i < addresses.length; i++) { if (i > 1) { backup.append(','); } backup.append(addresses[i]); } url += URL_PARAM_STARTING_SYMBOL + RemotingConstants.BACKUP_KEY + "=" + backup.toString(); } } String defaultProtocol = defaults == null ? null : defaults.get(PROTOCOL_KEY); if (StringUtils.isEmpty(defaultProtocol)) { defaultProtocol = DUBBO_PROTOCOL; } String defaultUsername = defaults == null ? null : defaults.get(USERNAME_KEY); String defaultPassword = defaults == null ? null : defaults.get(PASSWORD_KEY); int defaultPort = StringUtils.parseInteger(defaults == null ? null : defaults.get(PORT_KEY)); String defaultPath = defaults == null ? null : defaults.get(PATH_KEY); Map<String, String> defaultParameters = defaults == null ? null : new HashMap<>(defaults); if (defaultParameters != null) { defaultParameters.remove(PROTOCOL_KEY); defaultParameters.remove(USERNAME_KEY); defaultParameters.remove(PASSWORD_KEY); defaultParameters.remove(HOST_KEY); defaultParameters.remove(PORT_KEY); defaultParameters.remove(PATH_KEY); } URL u = URL.cacheableValueOf(url); boolean changed = false; String protocol = u.getProtocol(); String username = u.getUsername(); String password = u.getPassword(); String host = u.getHost(); int port = u.getPort(); String path = u.getPath(); Map<String, String> parameters = new HashMap<>(u.getParameters()); if (StringUtils.isEmpty(protocol)) { changed = true; protocol = defaultProtocol; } if (StringUtils.isEmpty(username) && StringUtils.isNotEmpty(defaultUsername)) { changed = true; username = defaultUsername; } if (StringUtils.isEmpty(password) && StringUtils.isNotEmpty(defaultPassword)) { changed = true; password = defaultPassword; } /*if (u.isAnyHost() || u.isLocalHost()) { changed = true; host = NetUtils.getLocalHost(); }*/ if (port <= 0) { if (defaultPort > 0) { changed = true; port = defaultPort; } else { changed = true; port = 9090; } } if (StringUtils.isEmpty(path)) { if (StringUtils.isNotEmpty(defaultPath)) { changed = true; path = defaultPath; } } if (defaultParameters != null && defaultParameters.size() > 0) { for (Map.Entry<String, String> entry : defaultParameters.entrySet()) { String key = entry.getKey(); String defaultValue = entry.getValue(); if (StringUtils.isNotEmpty(defaultValue)) { String value = parameters.get(key); if (StringUtils.isEmpty(value)) { changed = true; parameters.put(key, defaultValue); } } } } if (changed) { u = new ServiceConfigURL(protocol, username, password, host, port, path, parameters); } return u; }
@Test void testParseUrl2() { String address = "192.168.0.1"; String backupAddress1 = "192.168.0.2"; String backupAddress2 = "192.168.0.3"; Map<String, String> parameters = new HashMap<String, String>(); parameters.put("username", "root"); parameters.put("password", "alibaba"); parameters.put("port", "10000"); parameters.put("protocol", "dubbo"); URL url = UrlUtils.parseURL(address + "," + backupAddress1 + "," + backupAddress2, parameters); assertEquals("192.168.0.1:10000", url.getAddress()); assertEquals("root", url.getUsername()); assertEquals("alibaba", url.getPassword()); assertEquals(10000, url.getPort()); assertEquals("dubbo", url.getProtocol()); assertEquals("192.168.0.2" + "," + "192.168.0.3", url.getParameter("backup")); }
@Override public Set<Link> getIngressLinks(ConnectPoint dst) { return filter(links.values(), link -> dst.equals(link.dst())); }
@Test public final void testGetIngressLinks() { final ConnectPoint d1P1 = new ConnectPoint(DID1, P1); final ConnectPoint d2P2 = new ConnectPoint(DID2, P2); LinkKey linkId1 = LinkKey.linkKey(d1P1, d2P2); LinkKey linkId2 = LinkKey.linkKey(d2P2, d1P1); LinkKey linkId3 = LinkKey.linkKey(new ConnectPoint(DID1, P2), new ConnectPoint(DID2, P3)); putLink(linkId1, DIRECT); putLink(linkId2, DIRECT); putLink(linkId3, DIRECT); // DID1,P1 => DID2,P2 // DID2,P2 => DID1,P1 // DID1,P2 => DID2,P3 Set<Link> links1 = linkStore.getIngressLinks(d2P2); assertEquals(1, links1.size()); assertLink(linkId1, DIRECT, links1.iterator().next()); Set<Link> links2 = linkStore.getIngressLinks(d1P1); assertEquals(1, links2.size()); assertLink(linkId2, DIRECT, links2.iterator().next()); }
@SuppressWarnings("unchecked") public <V> V run(String callableName, RetryOperation<V> operation) { int attempt = 1; while (true) { try { return operation.run(); } catch (Exception e) { if (attempt >= maxAttempts || !retryPredicate.test(e)) { throwIfUnchecked(e); throw new RuntimeException(e); } QueryException qe = (QueryException) e; attempt++; int delayMillis = (int) min(minBackoffDelay.toMillis() * pow(scaleFactor, attempt - 1), maxBackoffDelay.toMillis()); int jitterMillis = ThreadLocalRandom.current().nextInt(max(1, (int) (delayMillis * 0.1))); log.debug( "Failed on executing %s with attempt %d. Retry after %sms. Cause: %s", callableName, attempt - 1, delayMillis, qe.getMessage()); try { MILLISECONDS.sleep(delayMillis + jitterMillis); } catch (InterruptedException ie) { currentThread().interrupt(); throw new RuntimeException(ie); } } } }
@Test public void testSuccess() { assertEquals( retryDriver.run("test", new MockOperation(5, RETRYABLE_EXCEPTION)), Integer.valueOf(5)); }
public static Range<Comparable<?>> safeIntersection(final Range<Comparable<?>> range, final Range<Comparable<?>> connectedRange) { try { return range.intersection(connectedRange); } catch (final ClassCastException ex) { Class<?> clazz = getRangeTargetNumericType(range, connectedRange); if (null == clazz) { throw ex; } Range<Comparable<?>> newRange = createTargetNumericTypeRange(range, clazz); Range<Comparable<?>> newConnectedRange = createTargetNumericTypeRange(connectedRange, clazz); return newRange.intersection(newConnectedRange); } }
@Test void assertSafeIntersectionForDouble() { Range<Comparable<?>> range = Range.closed(1242.114, 31474836.12); Range<Comparable<?>> connectedRange = Range.downTo(567.34F, BoundType.OPEN); Range<Comparable<?>> newRange = SafeNumberOperationUtils.safeIntersection(range, connectedRange); assertThat(newRange.lowerEndpoint(), is(1242.114)); assertThat(newRange.lowerBoundType(), is(BoundType.CLOSED)); assertThat(newRange.upperEndpoint(), is(31474836.12)); assertThat(newRange.upperBoundType(), is(BoundType.CLOSED)); }
public static void loadPropertiesFile(final String filenameOrUrl) { loadPropertiesFile(PropertyAction.REPLACE, filenameOrUrl); }
@Test void shouldDoNothingToSystemPropsWhenLoadingFileWhichDoesNotExist() { final int originalSystemPropSize = System.getProperties().size(); loadPropertiesFile("$unknown-file$"); assertEquals(originalSystemPropSize, System.getProperties().size()); }
public static <K, C, V, T> V computeIfAbsent(Map<K, V> target, K key, BiFunction<C, T, V> mappingFunction, C param1, T param2) { Objects.requireNonNull(target, "target"); Objects.requireNonNull(key, "key"); Objects.requireNonNull(mappingFunction, "mappingFunction"); Objects.requireNonNull(param1, "param1"); Objects.requireNonNull(param2, "param2"); V val = target.get(key); if (val == null) { V ret = mappingFunction.apply(param1, param2); target.put(key, ret); return ret; } return val; }
@Test public void computeIfAbsentNotExistTargetTest() { BiFunction<String, String, String> mappingFunction = (a, b) -> a + b; try { MapUtil.computeIfAbsent(null, "key", mappingFunction, "param1", "param2"); } catch (Exception e) { if (e instanceof NullPointerException) { Assert.isTrue(Objects.equals("target", e.getMessage())); } } }
@Override public boolean isReturnEntityRequested() { String returnEntityValue = getParameter(RestConstants.RETURN_ENTITY_PARAM); if (returnEntityValue == null) { // Default to true for backward compatibility so that existing clients can receive entity without using parameter return true; } return ArgumentUtils.parseReturnEntityParameter(returnEntityValue); }
@Test(dataProvider = "returnEntityParameterData") public void testReturnEntityParameter(String uri, boolean expectReturnEntity, boolean expectException) throws RestLiSyntaxException { final ResourceContextImpl context = new ResourceContextImpl(new PathKeysImpl(), new RestRequestBuilder(URI.create(uri)) .setHeader(RestConstants.HEADER_RESTLI_PROTOCOL_VERSION, AllProtocolVersions.LATEST_PROTOCOL_VERSION.toString()) .build(), new RequestContext()); try { final boolean returnEntity = context.isReturnEntityRequested(); if (expectException) { Assert.fail("Exception should be thrown for URI: " + uri); } Assert.assertEquals(returnEntity, expectReturnEntity, "Resource context was wrong about whether the URI \"" + uri + "\" indicates that the entity should be returned."); } catch (RestLiServiceException e) { if (!expectException) { Assert.fail("Exception should not be thrown for URI: " + uri); } } }
public static String encodeUrlWithoutPadding(byte[] bytes) { return getInstance().internalEncodeUrlWithoutPadding(bytes); }
@Test public void testEncodeUrlWithoutPadding() { final String helloWorldEncoded = "SGVsbG8gV29ybGQ"; final String helloWorldTwoLinesEncoded = "SGVsbG8gV29ybGQNCk5ldyBMaW5lMg"; final String helloWorldTwoLinesAndNewLineEncoded = "SGVsbG8gV29ybGQNClNlY29uZCBMaW5lDQo"; final String helloWorldDifferentCharsEncoded = "YDEyMzQ1Njc4OTAtPX4hQCMkJV4mKigpXyvRkdCBIuKEljs6P3F3ZXJ0eXVpb3B" + "bXWFzZGZnaGprbDsnenhjdmJubSwuL1FXRVJUWVVJT1B7fXxBU0RGR0hKS0w6WlhDVkJOTTw-P9C50YTRj9GG0YvRh9GD0LLRgdC" + "60LDQvNC10L_QuNC90YDRgtCz0L7RjNGI0LvQsdGJ0LTRjtC30LYu0YXRjdGKXNCZ0KTQr9Cm0KvQp9Cj0JLQodCa0JDQnNCV0J_" + "QmNCd0KDQotCT0J7QrNCo0JvQkdCp0JTQrtCX0JYs0KXQrdCqLw0JDCcICg"; final String str = "MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMPQ5BCMxlUq2TYy" + "iRIoEUsz6HGTJhHuasS2nx1Se4Co3lxwxyubVdFj8AuhHNJSmJvjlpbTsGOjLZpr" + "HyDEDdJmf1Fensh1MhUnBZ4a7uLrZrKzFHHJdamX9pxapB89vLeHlCot9hVXdrZH" + "nNtg6FdmRKH_8gbs8iDyIayFvzYDAgMBAAECgYA-c9MpTBy9cQsR9BAvkEPjvkx2" + "XL4ZnfbDgpNA4Nuu7yzsQrPjPomiXMNkkiAFHH67yVxwAlgRjyuuQlgNNTpKvyQt" + "XcHxffnU0820VmE23M-L7jg2TlB3-rUnEDmDvCoyjlwGDR6lNb7t7Fgg2iR-iaov" + "0iVzz-l9w0slRlyGsQJBAPWXW2m3NmFgqfDxtw8fsKC2y8o17_cnPjozRGtWb8LQ" + "g3VCb8kbOFHOYNGazq3M7-wD1qILF2h_HecgK9eQrZ0CQQDMHXoJMfKKbrFrTKgE" + "zyggO1gtuT5OXYeFewMEb5AbDI2FfSc2YP7SHij8iQ2HdukBrbTmi6qxh3HmIR58" + "I_AfAkEA0Y9vr0tombsUB8cZv0v5OYoBZvCTbMANtzfb4AOHpiKqqbohDOevLQ7_" + "SpvgVCmVaDz2PptcRAyEBZ5MCssneQJAB2pmvaDH7Ambfod5bztLfOhLCtY5EkXJ" + "n6rZcDbRaHorRhdG7m3VtDKOUKZ2DF7glkQGV33phKukErVPUzlHBwJAScD9TqaG" + "wJ3juUsVtujV23SnH43iMggXT7m82STpPGam1hPfmqu2Z0niePFo927ogQ7H1EMJ" + "UHgqXmuvk2X_Ww"; final String allBytesStr = "gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp-goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2" + "-v8DBwsPExcbHyMnKy8zNzs_Q0dLT1NXW19jZ2tvc3d7f4OHi4-Tl5ufo6err7O3u7_Dx8vP09fb3-Pn6-_z9_v8AAQIDBAUGBwg" + "JCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4_QEFCQ0RFRkdISUpLTE1OT1BRUlN" + "UVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1-fw"; assertEquals(helloWorldEncoded, java8Base64.internalEncodeUrlWithoutPadding(helloWorldBytes)); assertEquals(helloWorldTwoLinesEncoded, java8Base64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesBytes)); assertEquals(helloWorldTwoLinesAndNewLineEncoded, java8Base64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesAndNewLineBytes)); assertEquals(helloWorldDifferentCharsEncoded, java8Base64.internalEncodeUrlWithoutPadding(helloWorldDifferentCharsBytes)); assertEquals(str, java8Base64.internalEncodeUrlWithoutPadding(bytes)); assertEquals(allBytesStr, java8Base64.internalEncodeUrlWithoutPadding(allBytes)); assertEquals(helloWorldEncoded, commonsCodecBase64.internalEncodeUrlWithoutPadding(helloWorldBytes)); assertEquals(helloWorldTwoLinesEncoded, commonsCodecBase64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesBytes)); assertEquals(helloWorldTwoLinesAndNewLineEncoded, commonsCodecBase64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesAndNewLineBytes)); assertEquals(helloWorldDifferentCharsEncoded, commonsCodecBase64.internalEncodeUrlWithoutPadding(helloWorldDifferentCharsBytes)); assertEquals(str, commonsCodecBase64.internalEncodeUrlWithoutPadding(bytes)); assertEquals(allBytesStr, commonsCodecBase64.internalEncodeUrlWithoutPadding(allBytes)); assertEquals(helloWorldEncoded, jaxbBase64.internalEncodeUrlWithoutPadding(helloWorldBytes)); assertEquals(helloWorldTwoLinesEncoded, jaxbBase64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesBytes)); assertEquals(helloWorldTwoLinesAndNewLineEncoded, jaxbBase64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesAndNewLineBytes)); assertEquals(helloWorldDifferentCharsEncoded, jaxbBase64.internalEncodeUrlWithoutPadding(helloWorldDifferentCharsBytes)); assertEquals(str, jaxbBase64.internalEncodeUrlWithoutPadding(bytes)); assertEquals(allBytesStr, jaxbBase64.internalEncodeUrlWithoutPadding(allBytes)); assertEquals(helloWorldEncoded, jaxb230Base64.internalEncodeUrlWithoutPadding(helloWorldBytes)); assertEquals(helloWorldTwoLinesEncoded, jaxb230Base64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesBytes)); assertEquals(helloWorldTwoLinesAndNewLineEncoded, jaxb230Base64.internalEncodeUrlWithoutPadding(helloWorldTwoLinesAndNewLineBytes)); assertEquals(helloWorldDifferentCharsEncoded, jaxb230Base64.internalEncodeUrlWithoutPadding(helloWorldDifferentCharsBytes)); assertEquals(str, jaxb230Base64.internalEncodeUrlWithoutPadding(bytes)); assertEquals(allBytesStr, jaxb230Base64.internalEncodeUrlWithoutPadding(allBytes)); }
public static boolean electionWasClean(int newLeader, int[] isr) { return newLeader == NO_LEADER || Replicas.contains(isr, newLeader); }
@Test public void testElectionWasClean() { assertTrue(PartitionRegistration.electionWasClean(1, new int[]{1, 2})); assertFalse(PartitionRegistration.electionWasClean(1, new int[]{0, 2})); assertFalse(PartitionRegistration.electionWasClean(1, new int[]{})); assertTrue(PartitionRegistration.electionWasClean(3, new int[]{1, 2, 3, 4, 5, 6})); }
public X509Certificate sign(PrivateKey caPrivateKey, X509Certificate caCertificate, PKCS10CertificationRequest csr, RenewalPolicy renewalPolicy) throws Exception { Instant validFrom = Instant.now(clock); var validUntil = validFrom.plus(renewalPolicy.parsedCertificateLifetime()); return sign(caPrivateKey, caCertificate, csr, validFrom, validUntil); }
@Test void testSigningCertWithTwoHoursLifetime() throws Exception { var result = sign("PT2H"); assertThat(result).isNotNull(); assertThat(result.getNotAfter()).isEqualTo(fixedInstant.plus(2, ChronoUnit.HOURS)); }
public static String getBoxedClassName(ParameterField parameterField) { return parameterField.getDataType() == null ? Object.class.getName() : getBoxedClassName(parameterField.getDataType()); }
@Test void getBoxedClassNameByDataTypes() { List<DataType> dataTypes = getDataTypes(); dataTypes.forEach(dataType -> { String retrieved = org.kie.pmml.compiler.api.utils.ModelUtils.getBoxedClassName(dataType); commonVerifyEventuallyBoxedClassName(retrieved, dataType); }); }
public static String toJson(UpdateRequirement updateRequirement) { return toJson(updateRequirement, false); }
@Test public void testAssertViewUUIDToJson() { String uuid = "2cc52516-5e73-41f2-b139-545d41a4e151"; String expected = String.format("{\"type\":\"assert-view-uuid\",\"uuid\":\"%s\"}", uuid); UpdateRequirement actual = new UpdateRequirement.AssertViewUUID(uuid); assertThat(UpdateRequirementParser.toJson(actual)) .as("AssertViewUUID should convert to the correct JSON value") .isEqualTo(expected); }
public void assignStates() { checkStateMappingCompleteness(allowNonRestoredState, operatorStates, tasks); Map<OperatorID, OperatorState> localOperators = new HashMap<>(operatorStates); // find the states of all operators belonging to this task and compute additional // information in first pass for (ExecutionJobVertex executionJobVertex : tasks) { List<OperatorIDPair> operatorIDPairs = executionJobVertex.getOperatorIDs(); Map<OperatorID, OperatorState> operatorStates = CollectionUtil.newHashMapWithExpectedSize(operatorIDPairs.size()); for (OperatorIDPair operatorIDPair : operatorIDPairs) { OperatorID operatorID = operatorIDPair .getUserDefinedOperatorID() .filter(localOperators::containsKey) .orElse(operatorIDPair.getGeneratedOperatorID()); OperatorState operatorState = localOperators.remove(operatorID); if (operatorState == null) { operatorState = new OperatorState( operatorID, executionJobVertex.getParallelism(), executionJobVertex.getMaxParallelism()); } operatorStates.put(operatorIDPair.getGeneratedOperatorID(), operatorState); } final TaskStateAssignment stateAssignment = new TaskStateAssignment( executionJobVertex, operatorStates, consumerAssignment, vertexAssignments); vertexAssignments.put(executionJobVertex, stateAssignment); for (final IntermediateResult producedDataSet : executionJobVertex.getInputs()) { consumerAssignment.put(producedDataSet.getId(), stateAssignment); } } // repartition state for (TaskStateAssignment stateAssignment : vertexAssignments.values()) { if (stateAssignment.hasNonFinishedState // FLINK-31963: We need to run repartitioning for stateless operators that have // upstream output or downstream input states. || stateAssignment.hasUpstreamOutputStates() || stateAssignment.hasDownstreamInputStates()) { assignAttemptState(stateAssignment); } } // actually assign the state for (TaskStateAssignment stateAssignment : vertexAssignments.values()) { // If upstream has output states or downstream has input states, even the empty task // state should be assigned for the current task in order to notify this task that the // old states will send to it which likely should be filtered. if (stateAssignment.hasNonFinishedState || stateAssignment.isFullyFinished || stateAssignment.hasUpstreamOutputStates() || stateAssignment.hasDownstreamInputStates()) { assignTaskStateToExecutionJobVertices(stateAssignment); } } }
@Test void testChannelStateAssignmentDownscaling() throws JobException, JobExecutionException { List<OperatorID> operatorIds = buildOperatorIds(2); Map<OperatorID, OperatorState> states = buildOperatorStates(operatorIds, 3); Map<OperatorID, ExecutionJobVertex> vertices = buildVertices(operatorIds, 2, RANGE, ROUND_ROBIN); new StateAssignmentOperation(0, new HashSet<>(vertices.values()), states, false) .assignStates(); for (OperatorID operatorId : operatorIds) { // input is range partitioned, so there is an overlap assertState( vertices, operatorId, states, 0, OperatorSubtaskState::getInputChannelState, 0, 1); assertState( vertices, operatorId, states, 1, OperatorSubtaskState::getInputChannelState, 1, 2); // output is round robin redistributed assertState( vertices, operatorId, states, 0, OperatorSubtaskState::getResultSubpartitionState, 0, 2); assertState( vertices, operatorId, states, 1, OperatorSubtaskState::getResultSubpartitionState, 1); } assertThat( getAssignedState(vertices.get(operatorIds.get(0)), operatorIds.get(0), 0) .getOutputRescalingDescriptor()) .isEqualTo( rescalingDescriptor(to(0, 2), array(mappings(to(0, 1), to(1, 2))), set())); assertThat( getAssignedState(vertices.get(operatorIds.get(0)), operatorIds.get(0), 1) .getOutputRescalingDescriptor()) .isEqualTo(rescalingDescriptor(to(1), array(mappings(to(0, 1), to(1, 2))), set())); assertThat( getAssignedState(vertices.get(operatorIds.get(1)), operatorIds.get(1), 0) .getInputRescalingDescriptor()) .isEqualTo(rescalingDescriptor(to(0, 1), array(mappings(to(0, 2), to(1))), set(1))); assertThat( getAssignedState(vertices.get(operatorIds.get(1)), operatorIds.get(1), 1) .getInputRescalingDescriptor()) .isEqualTo(rescalingDescriptor(to(1, 2), array(mappings(to(0, 2), to(1))), set(1))); }
Double calculateMedian(List<Double> durationEntries) { if (durationEntries.isEmpty()) { return 0.0; } Collections.sort(durationEntries); int middle = durationEntries.size() / 2; if (durationEntries.size() % 2 == 1) { return durationEntries.get(middle); } else { double total = durationEntries.get(middle - 1) + durationEntries.get(middle); return total / 2; } }
@Test void calculateMedianOf() { OutputStream out = new ByteArrayOutputStream(); UsageFormatter usageFormatter = new UsageFormatter(out); Double result = usageFormatter.calculateMedian(asList(2.0, 9.0)); assertThat(result, is(closeTo(5.5, EPSILON))); }
@Override public void close() { close(Duration.ofMillis(Long.MAX_VALUE)); }
@Test public void testInterceptorConstructClose() { try { Properties props = new Properties(); // test with client ID assigned by KafkaProducer props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, MockProducerInterceptor.class.getName()); props.setProperty(MockProducerInterceptor.APPEND_STRING_PROP, "something"); KafkaProducer<String, String> producer = new KafkaProducer<>( props, new StringSerializer(), new StringSerializer()); assertEquals(1, MockProducerInterceptor.INIT_COUNT.get()); assertEquals(0, MockProducerInterceptor.CLOSE_COUNT.get()); // Cluster metadata will only be updated on calling onSend. assertNull(MockProducerInterceptor.CLUSTER_META.get()); producer.close(); assertEquals(1, MockProducerInterceptor.INIT_COUNT.get()); assertEquals(1, MockProducerInterceptor.CLOSE_COUNT.get()); } finally { // cleanup since we are using mutable static variables in MockProducerInterceptor MockProducerInterceptor.resetCounters(); } }
@Override public Addresses loadAddresses(ClientConnectionProcessListenerRegistry listenerRunner) { List<String> configuredAddresses = networkConfig.getAddresses(); if (configuredAddresses.isEmpty()) { configuredAddresses.add("127.0.0.1"); } Addresses addresses = new Addresses(); List<Address> allAddresses = new ArrayList<>(); for (String address : configuredAddresses) { Addresses socketAddresses = AddressHelper.getSocketAddresses(address, listenerRunner); addresses.addAll(socketAddresses); } allAddresses.addAll(addresses.primary()); allAddresses.addAll(addresses.secondary()); listenerRunner.onPossibleAddressesCollected(allAddresses); return addresses; }
@Test public void whenMix() throws UnknownHostException { ClientNetworkConfig config = new ClientNetworkConfig(); config.addAddress("10.0.0.1:5701"); config.addAddress("10.0.0.1:5702"); config.addAddress("10.0.0.2"); DefaultAddressProvider provider = new DefaultAddressProvider(config, () -> false); Addresses addresses = provider.loadAddresses(createConnectionProcessListenerRunner()); assertPrimary(addresses, new Address("10.0.0.1", 5701), new Address("10.0.0.1", 5702), new Address("10.0.0.2", 5701)); assertSecondary(addresses, new Address("10.0.0.2", 5702), new Address("10.0.0.2", 5703)); }
@Override public String getName() { return getLogger().getName(); }
@Test public void getName() throws Exception { Logger loggerFromClass = new MiddlewareLoggerImpl(MiddlewareLoggerImplTest.class); Logger logger = new MiddlewareLoggerImpl(MiddlewareLoggerImplTest.class.getCanonicalName()); Assert.assertEquals(loggerFromClass.getName(), logger.getName()); String appName = "app"; if (logger.isDebugEnabled()) { logger.debug("debug"); logger.debug("debug {}", "1"); logger.debug("debug {} {} {}", "1", "2", "3"); logger.debug("debug", new RuntimeException("runtime")); } if (logger.isDebugEnabled(appName)) { logger.debugWithApp(appName, "debug"); logger.debugWithApp(appName, "debug {}", "1"); logger.debugWithApp(appName, "debug {} {} {}", "1", "2", "3"); logger.debugWithApp(appName, "debug", new RuntimeException("runtime")); } if (logger.isInfoEnabled()) { logger.info("info"); logger.info("info {}", "1"); logger.info("info {} {} {}", "1", "2", "3"); logger.info("info", new RuntimeException("runtime")); } if (logger.isInfoEnabled(appName)) { logger.infoWithApp(appName, "info"); logger.infoWithApp(appName, "info {}", "1"); logger.infoWithApp(appName, "info {} {} {}", "1", "2", "3"); logger.infoWithApp(appName, "info", new RuntimeException("runtime")); } if (logger.isWarnEnabled()) { logger.warn("warn"); logger.warn("warn {}", "1"); logger.warn("warn {} {} {}", "1", "2", "3"); logger.warn("warn", new RuntimeException("runtime")); } if (logger.isWarnEnabled(appName)) { logger.warn(appName, "warn"); logger.warnWithApp(appName, "warn {}", "1"); logger.warnWithApp(appName, "warn {} {} {}", "1", "2", "3"); logger.warnWithApp(appName, "warn", new RuntimeException("runtime")); } if (logger.isErrorEnabled()) { logger.error("error"); logger.error("error {}", "1"); logger.error("error {} {} {}", "1", "2", "3"); logger.error("error", new RuntimeException("runtime")); } if (logger.isErrorEnabled(appName)) { logger.errorWithApp(appName, "error"); logger.errorWithApp(appName, "error {}", "1"); logger.errorWithApp(appName, "error {} {} {}", "1", "2", "3"); logger.errorWithApp(appName, "error", new RuntimeException("runtime")); } }
public static List<Transformation<?>> optimize(List<Transformation<?>> transformations) { final Map<Transformation<?>, Set<Transformation<?>>> outputMap = buildOutputMap(transformations); final LinkedHashSet<Transformation<?>> chainedTransformations = new LinkedHashSet<>(); final Set<Transformation<?>> alreadyTransformed = Sets.newIdentityHashSet(); final Queue<Transformation<?>> toTransformQueue = Queues.newArrayDeque(transformations); while (!toTransformQueue.isEmpty()) { final Transformation<?> transformation = toTransformQueue.poll(); if (!alreadyTransformed.contains(transformation)) { alreadyTransformed.add(transformation); final ChainInfo chainInfo = chainWithInputIfPossible(transformation, outputMap); chainedTransformations.add(chainInfo.newTransformation); chainedTransformations.removeAll(chainInfo.oldTransformations); alreadyTransformed.addAll(chainInfo.oldTransformations); // Add the chained transformation and its inputs to the to-optimize list toTransformQueue.add(chainInfo.newTransformation); toTransformQueue.addAll(chainInfo.newTransformation.getInputs()); } } return new ArrayList<>(chainedTransformations); }
@Test void testTransformationWithMultipleOutputs() { ExternalPythonProcessOperator<?, ?> processOperator1 = createProcessOperator("f1", Types.STRING(), Types.LONG()); ExternalPythonProcessOperator<?, ?> processOperator2 = createProcessOperator("f2", Types.STRING(), Types.LONG()); ExternalPythonProcessOperator<?, ?> processOperator3 = createProcessOperator("f3", Types.LONG(), Types.INT()); Transformation<?> sourceTransformation = mock(SourceTransformation.class); Transformation<?> processTransformation1 = new OneInputTransformation( sourceTransformation, "process", processOperator1, processOperator1.getProducedType(), 2); Transformation<?> processTransformation2 = new OneInputTransformation( processTransformation1, "process", processOperator2, processOperator2.getProducedType(), 2); Transformation<?> processTransformation3 = new OneInputTransformation( processTransformation1, "process", processOperator3, processOperator3.getProducedType(), 2); List<Transformation<?>> transformations = new ArrayList<>(); transformations.add(processTransformation2); transformations.add(processTransformation3); List<Transformation<?>> optimized = PythonOperatorChainingOptimizer.optimize(transformations); // no chaining optimization occurred assertThat(optimized).hasSize(4); }
@Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { return delegate.schedule(command, delay, unit); }
@Test public void schedule() { underTest.schedule(runnable, delay, SECONDS); verify(executorService).schedule(runnable, delay, SECONDS); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object k0, @Nullable Object v0, @Nullable Object... rest) { return containsAtLeastEntriesIn(accumulateMap("containsAtLeast", k0, v0, rest)); }
@Test public void containsAtLeastWrongValue_sameToStringForValues() { expectFailureWhenTestingThat(ImmutableMap.of("jan", 1L, "feb", 2L, "mar", 3L)) .containsAtLeast("jan", 1, "feb", 2); assertFailureKeys( "keys with wrong values", "for key", "expected value", "but got value", "for key", "expected value", "but got value", "---", "expected to contain at least", "but was"); assertFailureValueIndexed("for key", 0, "jan"); assertFailureValueIndexed("expected value", 0, "1 (java.lang.Integer)"); assertFailureValueIndexed("but got value", 0, "1 (java.lang.Long)"); assertFailureValueIndexed("for key", 1, "feb"); assertFailureValueIndexed("expected value", 1, "2 (java.lang.Integer)"); assertFailureValueIndexed("but got value", 1, "2 (java.lang.Long)"); }
@Override public LoggingConfiguration getConfiguration(final Path file) throws BackgroundException { final Path bucket = containerService.getContainer(file); if(bucket.isRoot()) { return LoggingConfiguration.empty(); } try { final Storage.Buckets.Get request = session.getClient().buckets().get(bucket.getName()); if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { request.setUserProject(session.getHost().getCredentials().getUsername()); } final Bucket.Logging status = request.execute().getLogging(); if(null == status) { return LoggingConfiguration.empty(); } final LoggingConfiguration configuration = new LoggingConfiguration( status.getLogObjectPrefix() != null, status.getLogBucket()); try { configuration.setContainers(new GoogleStorageBucketListService(session).list( new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), new DisabledListProgressListener()).toList()); } catch(AccessDeniedException | InteroperabilityException e) { log.warn(String.format("Failure listing buckets. %s", e.getMessage())); } return configuration; } catch(IOException e) { try { throw new GoogleStorageExceptionMappingService().map("Failure to read attributes of {0}", e, bucket); } catch(AccessDeniedException | InteroperabilityException l) { log.warn(String.format("Missing permission to read logging configuration for %s %s", bucket.getName(), e.getMessage())); return LoggingConfiguration.empty(); } } }
@Test(expected = NotfoundException.class) public void testReadNotFound() throws Exception { new GoogleStorageLoggingFeature(session).getConfiguration( new Path(new AsciiRandomStringService().random().toLowerCase(Locale.ROOT), EnumSet.of(Path.Type.directory)) ); }
static int determineOperatorReservoirSize(int operatorParallelism, int numPartitions) { int coordinatorReservoirSize = determineCoordinatorReservoirSize(numPartitions); int totalOperatorSamples = coordinatorReservoirSize * OPERATOR_OVER_SAMPLE_RATIO; return (int) Math.ceil((double) totalOperatorSamples / operatorParallelism); }
@Test public void testOperatorReservoirSize() { assertThat(SketchUtil.determineOperatorReservoirSize(5, 3)) .isEqualTo((10_002 * SketchUtil.OPERATOR_OVER_SAMPLE_RATIO) / 5); assertThat(SketchUtil.determineOperatorReservoirSize(123, 123)) .isEqualTo((123_00 * SketchUtil.OPERATOR_OVER_SAMPLE_RATIO) / 123); assertThat(SketchUtil.determineOperatorReservoirSize(256, 123)) .isEqualTo( (int) Math.ceil((double) (123_00 * SketchUtil.OPERATOR_OVER_SAMPLE_RATIO) / 256)); assertThat(SketchUtil.determineOperatorReservoirSize(5_120, 10_123)) .isEqualTo( (int) Math.ceil((double) (992_054 * SketchUtil.OPERATOR_OVER_SAMPLE_RATIO) / 5_120)); }
public Map<TopicPartition, Long> endOffsets(Collection<TopicPartition> partitions, Timer timer) { return beginningOrEndOffset(partitions, ListOffsetsRequest.LATEST_TIMESTAMP, timer); }
@Test public void testEndOffsets() { buildFetcher(); assignFromUser(singleton(tp0)); client.prepareResponse(listOffsetResponse(tp0, Errors.NONE, ListOffsetsRequest.LATEST_TIMESTAMP, 5L)); assertEquals(singletonMap(tp0, 5L), offsetFetcher.endOffsets(singleton(tp0), time.timer(5000L))); }
public static String collectPath(String... pathParts) { final StringBuilder sb = new StringBuilder(); for (String item : pathParts) { if (StringUtils.isBlank(item)) { continue; } final String path = trimPath(item); if (StringUtils.isNotBlank(path)) { sb.append(SLASH).append(path); } } return sb.length() > 0 ? sb.toString() : String.valueOf(SLASH); }
@Test(description = "not fail when passed path is empty") public void testEmptyCollectedPath() { final String path = PathUtils.collectPath(""); assertEquals(path, "/"); }
public String getReference(Reference reference) { StringWriter writer = new StringWriter(); try { getWriter(writer).writeReference(reference); } catch (IOException e) { throw new AssertionError("Unexpected IOException"); } return writer.toString(); }
@Test public void testReference() throws IOException { TestDexFormatter formatter = new TestDexFormatter(); Assert.assertEquals( "reference", formatter.getReference(mock(Reference.class))); }
public static void logSQL(final QueryContext queryContext, final boolean showSimple, final ExecutionContext executionContext) { log("Logic SQL: {}", queryContext.getSql()); if (showSimple) { logSimpleMode(executionContext.getExecutionUnits()); } else { logNormalMode(executionContext.getExecutionUnits()); } }
@Test void assertLogNormalSQLWithoutParameter() { SQLLogger.logSQL(queryContext, false, new ExecutionContext(queryContext, executionUnits, mock(RouteContext.class))); assertThat(appenderList.size(), is(4)); assertTrue(appenderList.stream().allMatch(loggingEvent -> Level.INFO == loggingEvent.getLevel())); assertThat(appenderList.get(0).getFormattedMessage(), is("Logic SQL: SELECT * FROM t_user")); assertThat(appenderList.get(1).getFormattedMessage(), is("Actual SQL: db1 ::: SELECT * FROM t_user")); assertThat(appenderList.get(2).getFormattedMessage(), is("Actual SQL: db2 ::: SELECT * FROM t_user")); assertThat(appenderList.get(3).getFormattedMessage(), is("Actual SQL: db3 ::: SELECT * FROM t_user")); }
public static boolean canLoad(String filePath) { if (filePath == null) { return false; } try (DataInputStream dis = new DataInputStream(Files.newInputStream(Paths.get(filePath)))) { int magic = dis.readInt(); return magic == CLASS_MAGIC || magic == BTraceProbePersisted.MAGIC; } catch (IOException ignored) { } return false; }
@Test void canLoadNonExistingFile() { assertFalse(BTraceProbeFactory.canLoad("!invalid path")); }
public synchronized void setLevel(Level newLevel) { if (level == newLevel) { // nothing to do; return; } if (newLevel == null && isRootLogger()) { throw new IllegalArgumentException("The level of the root logger cannot be set to null"); } level = newLevel; if (newLevel == null) { effectiveLevelInt = parent.effectiveLevelInt; newLevel = parent.getEffectiveLevel(); } else { effectiveLevelInt = newLevel.levelInt; } if (childrenList != null) { int len = childrenList.size(); for (int i = 0; i < len; i++) { Logger child = (Logger) childrenList.get(i); // tell child to handle parent levelInt change child.handleParentLevelChange(effectiveLevelInt); } } // inform listeners loggerContext.fireOnLevelChange(this, newLevel); }
@Test public void testEnabledX_Off() throws Exception { root.setLevel(Level.OFF); checkLevelThreshold(loggerTest, Level.OFF); }
public PullRequestFilesProducer(GitHubEndpoint endpoint) throws Exception { super(endpoint); Registry registry = endpoint.getCamelContext().getRegistry(); Object service = registry.lookupByName(GitHubConstants.GITHUB_PULL_REQUEST_SERVICE); if (service != null) { LOG.debug("Using PullRequestService found in registry {}", service.getClass().getCanonicalName()); pullRequestService = (PullRequestService) service; } else { pullRequestService = new PullRequestService(); } initService(pullRequestService); }
@Test public void testPullRequestFilesProducer() { PullRequest pullRequest = pullRequestService.addPullRequest("testPullRequestFilesProducer"); latestPullRequestNumber = pullRequest.getNumber(); CommitFile file = new CommitFile(); file.setFilename("testfile"); List<CommitFile> commitFiles = new ArrayList<>(); commitFiles.add(file); pullRequestService.setFiles(latestPullRequestNumber, commitFiles); Endpoint filesProducerEndpoint = getMandatoryEndpoint("direct:validPullRequest"); Exchange exchange = filesProducerEndpoint.createExchange(); Exchange resp = template.send(filesProducerEndpoint, exchange); assertEquals(resp.getMessage().getBody(), commitFiles); }
public Release findLatestActiveRelease(Namespace namespace) { return findLatestActiveRelease(namespace.getAppId(), namespace.getClusterName(), namespace.getNamespaceName()); }
@Test public void testFindRelease() throws Exception { String someAppId = "1"; String someClusterName = "someClusterName"; String someNamespaceName = "someNamespaceName"; long someReleaseId = 1; String someReleaseKey = "someKey"; String someValidConfiguration = "{\"apollo.bar\": \"foo\"}"; Release someRelease = MockBeanFactory.mockRelease(someReleaseId, someReleaseKey, someAppId, someClusterName, someNamespaceName, someValidConfiguration); when(releaseRepository.findFirstByAppIdAndClusterNameAndNamespaceNameAndIsAbandonedFalseOrderByIdDesc(someAppId, someClusterName, someNamespaceName)) .thenReturn(someRelease); Release result = releaseService.findLatestActiveRelease(someAppId, someClusterName, someNamespaceName); verify(releaseRepository, times(1)) .findFirstByAppIdAndClusterNameAndNamespaceNameAndIsAbandonedFalseOrderByIdDesc(someAppId, someClusterName, someNamespaceName); assertEquals(someAppId, result.getAppId()); assertEquals(someClusterName, result.getClusterName()); assertEquals(someReleaseId, result.getId()); assertEquals(someReleaseKey, result.getReleaseKey()); assertEquals(someValidConfiguration, result.getConfigurations()); }
@Override public ConnectResponse<ConnectorInfo> create( final String connector, final Map<String, String> config ) { try { final Map<String, String> maskedConfig = QueryMask.getMaskedConnectConfig(config); LOG.debug("Issuing create request to Kafka Connect at URI {} with name {} and config {}", connectUri, connector, maskedConfig); final ConnectResponse<ConnectorInfo> connectResponse = withRetries(() -> Request .post(resolveUri(CONNECTORS)) .setHeaders(requestHeaders) .responseTimeout(Timeout.ofMilliseconds(requestTimeoutMs)) .connectTimeout(Timeout.ofMilliseconds(requestTimeoutMs)) .bodyString( MAPPER.writeValueAsString( ImmutableMap.of( "name", connector, "config", config)), ContentType.APPLICATION_JSON ) .execute(httpClient) .handleResponse( createHandler(HttpStatus.SC_CREATED, new TypeReference<ConnectorInfo>() {}, Function.identity()))); connectResponse.error() .ifPresent(error -> LOG.warn("Did not CREATE connector {}: {}", connector, error)); return connectResponse; } catch (final Exception e) { throw new KsqlServerException(e); } }
@Test public void testCreateWithError() throws JsonProcessingException { // Given: WireMock.stubFor( WireMock.post(WireMock.urlEqualTo(pathPrefix + "/connectors")) .withHeader(AUTHORIZATION.toString(), new EqualToPattern(AUTH_HEADER)) .withHeader(CUSTOM_HEADER_NAME, new EqualToPattern(CUSTOM_HEADER_VALUE)) .willReturn(WireMock.aResponse() .withStatus(HttpStatus.SC_INTERNAL_SERVER_ERROR) .withBody("Oh no!")) ); // When: final ConnectResponse<ConnectorInfo> response = client.create("foo", ImmutableMap.of()); // Then: assertThat("Expected no datum!", !response.datum().isPresent()); assertThat(response.error(), OptionalMatchers.of(is("Oh no!"))); }
public static Index withRelations(String name) { return new Index(name, true); }
@Test public void withRelations_index_name_can_not_contain_underscore_except__all_keyword() { // doesn't fail Index.withRelations("_all"); assertThatThrownBy(() -> Index.withRelations("_")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Index name must be lower-case letters or '_all': _"); }
@Override public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle) { ExampleTableHandle exampleTableHandle = (ExampleTableHandle) tableHandle; checkArgument(exampleTableHandle.getConnectorId().equals(connectorId), "tableHandle is not for this connector"); ExampleTable table = exampleClient.getTable(exampleTableHandle.getSchemaName(), exampleTableHandle.getTableName()); if (table == null) { throw new TableNotFoundException(exampleTableHandle.toSchemaTableName()); } ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder(); int index = 0; for (ColumnMetadata column : table.getColumnsMetadata()) { columnHandles.put(column.getName(), new ExampleColumnHandle(connectorId, column.getName(), column.getType(), index)); index++; } return columnHandles.build(); }
@Test public void testGetColumnHandles() { // known table assertEquals(metadata.getColumnHandles(SESSION, NUMBERS_TABLE_HANDLE), ImmutableMap.of( "text", new ExampleColumnHandle(CONNECTOR_ID, "text", createUnboundedVarcharType(), 0), "value", new ExampleColumnHandle(CONNECTOR_ID, "value", BIGINT, 1))); // unknown table try { metadata.getColumnHandles(SESSION, new ExampleTableHandle(CONNECTOR_ID, "unknown", "unknown")); fail("Expected getColumnHandle of unknown table to throw a TableNotFoundException"); } catch (TableNotFoundException expected) { } try { metadata.getColumnHandles(SESSION, new ExampleTableHandle(CONNECTOR_ID, "example", "unknown")); fail("Expected getColumnHandle of unknown table to throw a TableNotFoundException"); } catch (TableNotFoundException expected) { } }
@Override public void delete(PageId pageId) throws IOException, PageNotFoundException { Callable<Void> callable = () -> { mPageStore.delete(pageId); return null; }; try { mTimeLimter.callWithTimeout(callable, mTimeoutMs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // Task got cancelled by others, interrupt the current thread // and then throw a runtime ex to make the higher level stop. Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (TimeoutException e) { Metrics.STORE_DELETE_TIMEOUT.inc(); throw new IOException(e); } catch (RejectedExecutionException e) { Metrics.STORE_THREADS_REJECTED.inc(); throw new IOException(e); } catch (Throwable t) { Throwables.propagateIfPossible(t, IOException.class, PageNotFoundException.class); throw new IOException(t); } }
@Test public void delete() throws Exception { mPageStore.put(PAGE_ID, PAGE); mTimeBoundPageStore.delete(PAGE_ID); assertThrows(PageNotFoundException.class, () -> mPageStore.get(PAGE_ID, 0, PAGE.length, new ByteArrayTargetBuffer(mBuf, 0))); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { if(directory.isRoot()) { final AttributedList<Path> list = new AttributedList<>(); for(RootFolder root : session.roots()) { switch(root.getRootFolderType()) { case 0: // My Files case 1: // Common list.add(new Path(directory, PathNormalizer.name(root.getName()), EnumSet.of(Path.Type.directory, Path.Type.volume), attributes.toAttributes(root))); break; } listener.chunk(directory, list); } return list; } else { try { final AttributedList<Path> children = new AttributedList<>(); int pageIndex = 0; int fileCount = 0; FileContents files; do { files = new FilesApi(this.session.getClient()).filesGetById(URIEncoder.encode(fileid.getFileId(directory)), pageIndex, chunksize, "Name asc", 0, // All true, false, false ); for(File f : files.getFiles()) { final PathAttributes attrs = attributes.toAttributes(f); final EnumSet<Path.Type> type = (f.getFlags() & 1) == 1 ? EnumSet.of(Path.Type.directory) : EnumSet.of(Path.Type.file); children.add(new Path(directory, f.getName(), type, attrs)); } pageIndex++; fileCount += files.getFiles().size(); listener.chunk(directory, children); } while(fileCount < files.getTotalRowCount()); return children; } catch(ApiException e) { throw new StoregateExceptionMappingService(fileid).map("Listing directory {0} failed", e, directory); } } }
@Test public void testListRoot() throws Exception { final StoregateIdProvider nodeid = new StoregateIdProvider(session); final Path directory = new Path("/", EnumSet.of(AbstractPath.Type.directory, Path.Type.volume)); final AttributedList<Path> list = new StoregateListService(session, nodeid).list( directory, new DisabledListProgressListener()); assertNotSame(AttributedList.emptyList(), list); assertFalse(list.isEmpty()); assertEquals(2, list.size()); assertNotNull(list.find(new SimplePathPredicate(new Path("/Common files", EnumSet.of(Path.Type.directory, Path.Type.volume))))); assertNotNull(list.find(new SimplePathPredicate(new Path("/My files", EnumSet.of(Path.Type.directory, Path.Type.volume))))); for(Path f : list) { assertSame(directory, f.getParent()); assertFalse(f.getName().contains(String.valueOf(Path.DELIMITER))); assertTrue(f.attributes().getModificationDate() > 0); assertTrue(f.attributes().getCreationDate() > 0); assertNotNull(nodeid.getFileId(new Path(f).withAttributes(PathAttributes.EMPTY))); assertEquals(f.attributes(), new StoregateAttributesFinderFeature(session, nodeid).find(f)); } }
public String toString() { return getFilePaths() == null || getFilePaths().length == 0 ? "File Input (unknown file)" : "File Input (" + Arrays.toString(this.getFilePaths()) + ')'; }
@Test void testToStringWithoutPathSet() { final DummyFileInputFormat format = new DummyFileInputFormat(); assertThat(format.toString()) .as("The toString() should be correct.") .isEqualTo("File Input (unknown file)"); }
@Override public String telnet(Channel channel, String message) { if (StringUtils.isEmpty(message)) { return "Please input service name, eg: \r\ncd XxxService\r\ncd com.xxx.XxxService"; } StringBuilder buf = new StringBuilder(); if ("/".equals(message) || "..".equals(message)) { String service = (String) channel.getAttribute(SERVICE_KEY); channel.removeAttribute(SERVICE_KEY); buf.append("Cancelled default service ").append(service).append('.'); } else { boolean found = false; for (Exporter<?> exporter : DubboProtocol.getDubboProtocol().getExporters()) { if (message.equals(exporter.getInvoker().getInterface().getSimpleName()) || message.equals(exporter.getInvoker().getInterface().getName()) || message.equals(exporter.getInvoker().getUrl().getPath())) { found = true; break; } } if (found) { channel.setAttribute(SERVICE_KEY, message); buf.append("Used the ") .append(message) .append(" as default.\r\nYou can cancel default service by command: cd /"); } else { buf.append("No such service ").append(message); } } return buf.toString(); }
@Test void testChangePath() throws RemotingException { ExtensionLoader.getExtensionLoader(Protocol.class) .getExtension(DubboProtocol.NAME) .export(mockInvoker); String result = change.telnet(mockChannel, "demo"); assertEquals("Used the demo as default.\r\nYou can cancel default service by command: cd /", result); }
@Override public void execute(Context context) { editionProvider.get().ifPresent(edition -> { if (!edition.equals(EditionProvider.Edition.COMMUNITY)) { return; } Map<String, Integer> filesPerLanguage = reportReader.readMetadata().getNotAnalyzedFilesByLanguageMap() .entrySet() .stream() .filter(entry -> entry.getValue() > 0) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); if (filesPerLanguage.isEmpty()) { return; } ceTaskMessages.add(constructMessage(filesPerLanguage)); computeMeasures(filesPerLanguage); }); }
@Test public void do_nothing_SQ_community_edition_if_cpp_files_in_report_is_zero() { when(editionProvider.get()).thenReturn(Optional.of(EditionProvider.Edition.COMMUNITY)); ScannerReport.AnalysisWarning warning1 = ScannerReport.AnalysisWarning.newBuilder().setText("warning 1").build(); ScannerReport.AnalysisWarning warning2 = ScannerReport.AnalysisWarning.newBuilder().setText("warning 2").build(); ImmutableList<ScannerReport.AnalysisWarning> warnings = of(warning1, warning2); reportReader.setAnalysisWarnings(warnings); reportReader.setMetadata(ScannerReport.Metadata.newBuilder().putNotAnalyzedFilesByLanguage("C++", 0).build()); underTest.execute(new TestComputationStepContext()); verify(ceTaskMessages, never()).add(any()); assertThat(measureRepository.getAddedRawMeasure(PROJECT_REF, UNANALYZED_C_KEY)).isEmpty(); assertThat(measureRepository.getAddedRawMeasure(PROJECT_REF, UNANALYZED_CPP_KEY)).isEmpty(); }
@Override public Collection<String> split(String in) { String text = in.replaceAll("\\r?\\n", " "); return Arrays.asList(StringUtils.split(text)); }
@Test public void testSplit() { Collection<String> split = wordSplitStrategy.split("Hello World\n Foo Bar"); assertEquals(4, split.size()); assertEquals("Bar", new ArrayList<>(split).get(3)); }
@Override public <UK, UV> MapState<UK, UV> getMapState(MapStateDescriptor<UK, UV> stateProperties) { KeyedStateStore keyedStateStore = checkPreconditionsAndGetKeyedStateStore(stateProperties); stateProperties.initializeSerializerUnlessSet(this::createSerializer); return keyedStateStore.getMapState(stateProperties); }
@Test void testMapStateReturnsEmptyMapByDefault() throws Exception { StreamingRuntimeContext context = createMapOperatorRuntimeContext(); MapStateDescriptor<Integer, String> descr = new MapStateDescriptor<>("name", Integer.class, String.class); MapState<Integer, String> state = context.getMapState(descr); Iterable<Map.Entry<Integer, String>> value = state.entries(); assertThat(value).isNotNull(); assertThat(value.iterator()).isExhausted(); }
public String transform() throws ScanException { StringBuilder stringBuilder = new StringBuilder(); compileNode(node, stringBuilder, new Stack<Node>()); return stringBuilder.toString(); }
@Test public void definedAsEmpty() throws ScanException { propertyContainer0.putProperty("empty", ""); String input = "a=${empty}"; Node node = makeNode(input); NodeToStringTransformer nodeToStringTransformer = new NodeToStringTransformer(node, propertyContainer0); Assertions.assertEquals("a=", nodeToStringTransformer.transform()); }
@Override public DynamicMessage parse(final InputStream inputStream) { try { return DynamicMessage.newBuilder(messageDescriptor) .mergeFrom(inputStream, ExtensionRegistryLite.getEmptyRegistry()) .build(); } catch (IOException e) { throw new ShenyuGrpcException("Unable to merge from the supplied input stream", e); } }
@Test public void testParseThrowException() { InputStream inputStream = new ByteArrayInputStream("test".getBytes()); assertThrows(RuntimeException.class, () -> dynamicMessageMarshaller.parse(inputStream)); }
@Override public ConnectResponse<ConnectorStateInfo> status(final String connector) { try { LOG.debug("Issuing status request to Kafka Connect at URI {} with name {}", connectUri, connector); final ConnectResponse<ConnectorStateInfo> connectResponse = withRetries(() -> Request .get(resolveUri(CONNECTORS + "/" + connector + STATUS)) .setHeaders(requestHeaders) .responseTimeout(Timeout.ofMilliseconds(requestTimeoutMs)) .connectTimeout(Timeout.ofMilliseconds(requestTimeoutMs)) .execute(httpClient) .handleResponse( createHandler(HttpStatus.SC_OK, new TypeReference<ConnectorStateInfo>() {}, Function.identity()))); connectResponse.error() .ifPresent(error -> LOG.warn("Could not query status of connector {}: {}", connector, error)); return connectResponse; } catch (final Exception e) { throw new KsqlServerException(e); } }
@Test public void testStatus() throws JsonProcessingException { // Given: WireMock.stubFor( WireMock.get(WireMock.urlEqualTo(pathPrefix + "/connectors/foo/status")) .withHeader(AUTHORIZATION.toString(), new EqualToPattern(AUTH_HEADER)) .withHeader(CUSTOM_HEADER_NAME, new EqualToPattern(CUSTOM_HEADER_VALUE)) .willReturn(WireMock.aResponse() .withStatus(HttpStatus.SC_OK) .withBody(MAPPER.writeValueAsString(SAMPLE_STATUS))) ); // When: final ConnectResponse<ConnectorStateInfo> response = client.status("foo"); // Then: final ConnectorStateInfo connectorStateInfo = response.datum().orElseThrow(AssertionError::new); // equals is not implemented on ConnectorStateInfo assertThat(connectorStateInfo.name(), is(SAMPLE_STATUS.name())); assertThat(connectorStateInfo.type(), is(SAMPLE_STATUS.type())); assertThat(connectorStateInfo.connector().state(), is(SAMPLE_STATUS.connector().state())); assertThat(connectorStateInfo.connector().workerId(), is(SAMPLE_STATUS.connector().workerId())); assertThat(connectorStateInfo.connector().trace(), is(SAMPLE_STATUS.connector().trace())); assertThat(connectorStateInfo.tasks().size(), is(SAMPLE_STATUS.tasks().size())); assertThat(connectorStateInfo.tasks().get(0).id(), is(SAMPLE_STATUS.tasks().get(0).id())); assertThat("Expected no error!", !response.error().isPresent()); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { //set OfficeParserConfig if the user hasn't specified one configure(context); // Have the OOXML file processed OOXMLExtractorFactory.parse(stream, handler, metadata, context); }
@Test public void testEmbeddedPDF() throws Exception { Metadata metadata = new Metadata(); StringWriter sw = new StringWriter(); SAXTransformerFactory factory = (SAXTransformerFactory) SAXTransformerFactory.newInstance(); TransformerHandler handler = factory.newTransformerHandler(); handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "xml"); handler.getTransformer().setOutputProperty(OutputKeys.INDENT, "no"); handler.setResult(new StreamResult(sw)); try (InputStream input = getResourceAsStream( "/test-documents/testWORD_embedded_pdf.docx")) { new OOXMLParser().parse(input, handler, metadata, new ParseContext()); } String xml = sw.toString(); int i = xml.indexOf("Here is the pdf file:"); int j = xml.indexOf("<div class=\"embedded\" id=\"rId5\"/>"); int k = xml.indexOf("Bye Bye"); int l = xml.indexOf("<div class=\"embedded\" id=\"rId6\"/>"); int m = xml.indexOf("Bye for real."); assertTrue(i != -1); assertTrue(j != -1); assertTrue(k != -1); assertTrue(l != -1); assertTrue(m != -1); assertTrue(i < j); assertTrue(j < k); assertTrue(k < l); assertTrue(l < m); }
public static String u8(long v) { char[] result = new char[16]; for (int i = 0; i < 16; i++) { result[15 - i] = Character.forDigit((int) v & 0x0f, 16); v >>= 4; } return new String(result); }
@Test public void testU8() { Assert.assertEquals("0000000000000000", Hex.u8(0L)); Assert.assertEquals("0000016b5086c128", Hex.u8(1560424137000L)); Assert.assertEquals("000462d53c8abac0", Hex.u8(1234567890123456L)); }
@Override public boolean delete() throws FileSystemException { return requireResolvedFileObject().delete(); }
@Test public void testDelegatesDelete() throws FileSystemException { fileObject.delete(); verify( resolvedFileObject, times( 1 ) ).delete(); }
public void addAll(PartitionIdSet other) { bitSet.or(other.bitSet); resetSize(); }
@Test public void test_addAll_fromPartitionIdSet() { partitionIdSet.addAll(listOf(0, 1, 2, 3, 4)); PartitionIdSet other = new PartitionIdSet(271); other.addAll(partitionIdSet); assertContents(other); }
@Override public Serde<GenericKey> create( final FormatInfo format, final PersistenceSchema schema, final KsqlConfig ksqlConfig, final Supplier<SchemaRegistryClient> schemaRegistryClientFactory, final String loggerNamePrefix, final ProcessingLogContext processingLogContext, final Optional<TrackedCallback> tracker ) { return createInner( format, schema, ksqlConfig, schemaRegistryClientFactory, loggerNamePrefix, processingLogContext, tracker ); }
@Test public void shouldWrapInLoggingSerdeWindowed() { // When: factory .create(format, TIMED_WND, schema, config, srClientFactory, LOGGER_PREFIX, processingLogCxt, Optional.empty()); // Then: verify(innerFactory).wrapInLoggingSerde(any(), eq(LOGGER_PREFIX), eq(processingLogCxt), eq(Optional.of(queryId))); }
@Override public Collection<ThreadPoolPlugin> getAllEnableThreadPoolPlugins() { return enableThreadPoolPlugins.values(); }
@Test public void testGetAllEnableThreadPoolPlugins() { GlobalThreadPoolPluginManager manager = new DefaultGlobalThreadPoolPluginManager(); manager.enableThreadPoolPlugin(new TestPlugin("1")); manager.enableThreadPoolPlugin(new TestPlugin("2")); Assert.assertEquals(2, manager.getAllEnableThreadPoolPlugins().size()); }
public static UThrow create(UExpression expression) { return new AutoValue_UThrow(expression); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UThrow.create(UNewClass.create(UClassIdent.create("java.lang.IllegalArgumentException")))); }
public long getTimeout() { return timeout; }
@Test @DirtiesContext public void testCreateEndpointDefaultNoTimeout() throws Exception { ExecEndpoint e = createExecEndpoint("exec:test"); assertEquals(ExecEndpoint.NO_TIMEOUT, e.getTimeout()); }
@VisibleForTesting static OpenAPI createDocumentation( String title, DocumentingRestEndpoint restEndpoint, RestAPIVersion apiVersion) { final OpenAPI openApi = new OpenAPI(); // eagerly initialize some data-structures to simplify operations later on openApi.setPaths(new io.swagger.v3.oas.models.Paths()); openApi.setComponents(new Components()); setInfo(openApi, title, apiVersion); List<MessageHeaders> specs = restEndpoint.getSpecs().stream() .filter(spec -> spec.getSupportedAPIVersions().contains(apiVersion)) .filter(ApiSpecGeneratorUtils::shouldBeDocumented) .collect(Collectors.toList()); final Set<String> usedOperationIds = new HashSet<>(); specs.forEach(spec -> add(spec, openApi, usedOperationIds)); final List<Schema> asyncOperationSchemas = collectAsyncOperationResultVariants(specs); // this adds the schema for every JSON object openApi.components( new Components().schemas(new HashMap<>(modelConverterContext.getDefinedModels()))); injectAsyncOperationResultSchema(openApi, asyncOperationSchemas); overrideIdSchemas(openApi); overrideSerializeThrowableSchema(openApi); sortProperties(openApi); sortSchemas(openApi); return openApi; }
@Test void testModelNameClashByTopLevelClassesDetected() { assertThatThrownBy( () -> OpenApiSpecGenerator.createDocumentation( "title", DocumentingRestEndpoint.forRestHandlerSpecifications( new TestTopLevelNameClashingMessageHeaders1(), new TestTopLevelNameClashingMessageHeaders2()), RuntimeRestAPIVersion.V0)) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("clash"); }
private static LongStream range(long start, long end, long step) { return start > end ? LongStream.empty() : LongStream.iterate(start, n -> n + step).limit(1 + (end - start) / step); }
@Test public void when_receiveRandomTimestamps_then_emitAscending() { // Given final List<Long> timestampsToAdd = LongStream.range(0, 100).boxed().collect(toList()); shuffle(timestampsToAdd); ArrayList<Object> inbox = new ArrayList<>(); for (long ts : timestampsToAdd) { inbox.add(event(ts, 1)); } for (long i = 0; i <= 105; i++) { inbox.add(wm(i)); } List<Object> expectedOutbox = new ArrayList<>(); expectedOutbox.addAll(Arrays.asList( outboxFrame(0, 1), wm(0), outboxFrame(1, 2), wm(1), outboxFrame(2, 3), wm(2), outboxFrame(3, 4), wm(3) )); for (long ts = 4; ts < 100; ts++) { expectedOutbox.add(outboxFrame(ts, 4)); expectedOutbox.add(wm(ts)); } expectedOutbox.addAll(Arrays.asList( outboxFrame(100, 3), wm(100), outboxFrame(101, 2), wm(101), outboxFrame(102, 1), wm(102), wm(103), wm(104), wm(105) )); verifyProcessor(supplier) .disableCompleteCall() .disableLogging() .input(inbox) .expectOutput(expectedOutbox); }
public static byte[] decodeBase64(byte[] base64Data) { return new Base64().decode(base64Data); }
@Test void testDecodeNullOrEmpty() { byte[] b1 = Base64.decodeBase64(null); assertNull(b1); byte[] b2 = Base64.decodeBase64(new byte[] {}); assertEquals(0, b2.length); }
@Override public boolean isVisualizedAutoTrackEnabled() { return false; }
@Test public void isVisualizedAutoTrackEnabled() { Assert.assertFalse(mSensorsAPI.isVisualizedAutoTrackEnabled()); }
@Override protected void close() { // Turn off nacos heartbeat transmission ReflectUtils.invokeMethod(target, "shutdown", null, null); LOGGER.warning("Nacos heartbeat has been closed by user."); }
@Test public void close() throws NoSuchMethodException { final NacosRpcClientHealthInterceptor interceptor = new NacosRpcClientHealthInterceptor(); final RpcClient rpcClient = Mockito.mock(RpcClient.class); interceptor.doBefore(buildContext(rpcClient)); interceptor.close(); Mockito.verify(rpcClient, Mockito.times(1)).shutdown(); }
@Override public void addChildren(Deque<Expression> expressions) { if (expression != null) { expression.addChildren(expressions); } }
@Test public void addChildren() { @SuppressWarnings("unchecked") Deque<Expression> expressions = mock(Deque.class); test.addChildren(expressions); verify(expr).addChildren(expressions); verifyNoMoreInteractions(expr); }
public static boolean equals(FlatRecordTraversalObjectNode left, FlatRecordTraversalObjectNode right) { if (left == null && right == null) { return true; } if (left == null || right == null) { return false; } if (!left.getSchema().getName().equals(right.getSchema().getName())) { return false; } extractCommonObjectSchema(left, right); return compare(left, right); }
@Test public void shouldUseExactFlagToConsiderExtraFieldsInEquality_usingReferences() { TypeState1 typeState1 = new TypeState1(); typeState1.longField = 1L; typeState1.stringField = "A"; typeState1.doubleField = 1.0; typeState1.basicIntField = 1; typeState1.basicIntFieldOnlyInTypeState1 = 1; // This field being set should make the records unequal. writer1.reset(); mapper1.writeFlat(typeState1, writer1); FlatRecord rec1 = writer1.generateFlatRecord(); TypeState2 typeState2 = new TypeState2(); typeState2.longField = 1L; typeState2.stringField = "A"; typeState2.doubleField = 1.0; typeState2.basicIntField = 1; writer2.reset(); mapper2.writeFlat(typeState2, writer2); FlatRecord rec2 = writer2.generateFlatRecord(); FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isTrue(); assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isTrue(); }
@Override public AuthenticationResult authenticate(final ChannelHandlerContext context, final PacketPayload payload) { if (SSL_REQUEST_PAYLOAD_LENGTH == payload.getByteBuf().markReaderIndex().readInt() && SSL_REQUEST_CODE == payload.getByteBuf().readInt()) { if (ProxySSLContext.getInstance().isSSLEnabled()) { SslHandler sslHandler = new SslHandler(ProxySSLContext.getInstance().newSSLEngine(context.alloc()), true); context.pipeline().addFirst(SslHandler.class.getSimpleName(), sslHandler); context.writeAndFlush(new PostgreSQLSSLWillingPacket()); } else { context.writeAndFlush(new PostgreSQLSSLUnwillingPacket()); } return AuthenticationResultBuilder.continued(); } payload.getByteBuf().resetReaderIndex(); AuthorityRule rule = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getSingleRule(AuthorityRule.class); return startupMessageReceived ? processPasswordMessage(context, (PostgreSQLPacketPayload) payload, rule) : processStartupMessage(context, (PostgreSQLPacketPayload) payload, rule); }
@Test void assertSSLUnwilling() { ByteBuf byteBuf = createByteBuf(8, 8); byteBuf.writeInt(8); byteBuf.writeInt(80877103); PacketPayload payload = new PostgreSQLPacketPayload(byteBuf, StandardCharsets.UTF_8); ChannelHandlerContext context = mock(ChannelHandlerContext.class); AuthenticationResult actual = new PostgreSQLAuthenticationEngine().authenticate(context, payload); verify(context).writeAndFlush(any(PostgreSQLSSLUnwillingPacket.class)); assertFalse(actual.isFinished()); }
private void updateField( Object[] r ) throws Exception { // Loop through fields for ( int i = 0; i < data.getFieldnr(); i++ ) { // DO CONVERSION OF THE DEFAULT VALUE ... // Entered by user ValueMetaInterface targetValueMeta = data.getOutputRowMeta().getValueMeta( data.getFieldnrs()[i] ); ValueMetaInterface sourceValueMeta = data.getConvertRowMeta().getValueMeta( data.getFieldnrs()[i] ); if ( !Utils.isEmpty( meta.getField( i ).getReplaceMask() ) ) { sourceValueMeta.setConversionMask( meta.getField( i ).getReplaceMask() ); } sourceValueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); r[data.getFieldnrs()[i]] = targetValueMeta.convertData( sourceValueMeta, data.getRealReplaceByValues()[i] ); targetValueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } }
@Test public void testUpdateField() throws Exception { SetValueConstant step = new SetValueConstant( smh.stepMeta, smh.stepDataInterface, 0, smh.transMeta, smh.trans ); ValueMetaInterface valueMeta = new ValueMetaString( "Field1" ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); RowMeta rowMeta = new RowMeta(); rowMeta.addValueMeta( valueMeta ); SetValueConstantMeta.Field field = new SetValueConstantMeta.Field(); field.setFieldName( "Field Name" ); field.setEmptyString( true ); field.setReplaceMask( "Replace Mask" ); field.setReplaceValue( "Replace Value" ); doReturn( Collections.singletonList( field ) ).when( smh.initStepMetaInterface ).getFields(); doReturn( field ).when( smh.initStepMetaInterface ).getField( 0 ); doReturn( rowMeta ).when( smh.initStepDataInterface ).getConvertRowMeta(); doReturn( rowMeta ).when( smh.initStepDataInterface ).getOutputRowMeta(); doReturn( 1 ).when( smh.initStepDataInterface ).getFieldnr(); doReturn( new int[] { 0 } ).when( smh.initStepDataInterface ).getFieldnrs(); doReturn( new String[] { "foo" } ).when( smh.initStepDataInterface ).getRealReplaceByValues(); step.init( smh.initStepMetaInterface, smh.initStepDataInterface ); Method m = SetValueConstant.class.getDeclaredMethod( "updateField", Object[].class ); m.setAccessible( true ); Object[] row = new Object[] { null }; m.invoke( step, new Object[] { row } ); Assert.assertEquals( "foo", valueMeta.getString( row[0] ) ); }
@Override public void trackInstallation(String eventName, JSONObject properties, boolean disableCallback) { }
@Test public void trackInstallation() { mSensorsAPI.setTrackEventCallBack(new SensorsDataTrackEventCallBack() { @Override public boolean onTrackEvent(String eventName, JSONObject eventProperties) { Assert.fail(); return false; } }); mSensorsAPI.trackInstallation("AppInstall"); }
@Override public EntitySuggestionResponse suggest(final String collection, final String valueColumn, final String query, final int page, final int perPage, final Subject subject) { final MongoCollection<Document> mongoCollection = mongoConnection.getMongoDatabase().getCollection(collection); final var bsonFilter = !Strings.isNullOrEmpty(query) ? Filters.regex(valueColumn, query, "i") : Filters.empty(); final var resultWithoutPagination = mongoCollection .find(bsonFilter) .projection(Projections.include(valueColumn)) .sort(Sorts.ascending(valueColumn)); final var userCanReadAllEntities = permissionsUtils.hasAllPermission(subject) || permissionsUtils.hasReadPermissionForWholeCollection(subject, collection); final var skip = (page - 1) * perPage; final var checkPermission = permissionsUtils.createPermissionCheck(subject, collection); final var documents = userCanReadAllEntities ? mongoPaginate(resultWithoutPagination, perPage, skip) : paginateWithPermissionCheck(resultWithoutPagination, perPage, skip, checkPermission); final List<EntitySuggestion> suggestions = documents .map(doc -> new EntitySuggestion( doc.getObjectId(ID_FIELD).toString(), doc.getString(valueColumn) ) ) .toList(); final long total = userCanReadAllEntities ? mongoCollection.countDocuments(bsonFilter) : MongoUtils.stream(mongoCollection.find(bsonFilter).projection(Projections.include(ID_FIELD))).filter(checkPermission).count(); return new EntitySuggestionResponse(suggestions, PaginatedList.PaginationInfo.create((int) total, suggestions.size(), page, perPage)); }
@Test void checksPermissionsForEachDocumentWhenUserDoesNotHavePermissionForWholeCollection() { doReturn(false).when(entityPermissionsUtils).hasAllPermission(subject); doReturn(false).when(entityPermissionsUtils).hasReadPermissionForWholeCollection(subject, "dashboards"); final Collection<String> permittedIds = List.of("5a82f5974b900a7a97caa1e5", "5a82f5974b900a7a97caa1e7"); doReturn((Predicate<Document>) document -> permittedIds.contains(document.getObjectId(EntityPermissionsUtils.ID_FIELD).toString())) .when(entityPermissionsUtils) .createPermissionCheck(subject, "dashboards"); final var result = toTest.suggest("dashboards", "title", "", 1, 10, subject); assertThat(result.pagination().count()).isEqualTo(2); final var suggestions = result.suggestions(); assertThat(suggestions).hasSize(2); assertThat(suggestions.stream().map(EntitySuggestion::id).toList()) .containsExactlyInAnyOrder("5a82f5974b900a7a97caa1e5", "5a82f5974b900a7a97caa1e7"); assertThat(suggestions.stream().map(EntitySuggestion::value).toList()) .containsExactlyInAnyOrder("Test", "Test 3"); }
@Override public void doRun() { if (isServerInPreflightMode.get()) { // we don't want to automatically trigger CSRs during preflight, don't run it if the preflight is still not finished or skipped LOG.debug("Datanode still in preflight mode, skipping cert renewal task"); return; } // always check if there are any certificates that we can accept getRenewalPolicy() .filter(this::needsNewCertificate) .ifPresent(renewalPolicy -> { switch (renewalPolicy.mode()) { case AUTOMATIC -> automaticRenewal(); case MANUAL -> manualRenewal(); } }); }
@Test void testExpiringSoon() throws Exception { final DatanodeKeystore datanodeKeystore = datanodeKeystore(Duration.ofMinutes(1)); final CsrRequester csrRequester = Mockito.mock(CsrRequester.class); final DataNodeCertRenewalPeriodical periodical = new DataNodeCertRenewalPeriodical(datanodeKeystore, autoRenewalPolicy("PT1M"), csrRequester, () -> false); periodical.doRun(); Mockito.verify(csrRequester, Mockito.times(1)).triggerCertificateSigningRequest(); }
public static <T extends Type> Type decodeIndexedValue( String rawInput, TypeReference<T> typeReference) { return decoder.decodeEventParameter(rawInput, typeReference); }
@Test public void testDecodeIndexedDynamicBytesValue() { DynamicBytes bytes = new DynamicBytes(new byte[] {1, 2, 3, 4, 5}); String encoded = TypeEncoder.encodeDynamicBytes(bytes); String hash = Hash.sha3(encoded); assertEquals( FunctionReturnDecoder.decodeIndexedValue( hash, new TypeReference<DynamicBytes>() {}), (new Bytes32(Numeric.hexStringToByteArray(hash)))); }
public static int decodeInt(InputStream stream) throws IOException { long r = decodeLong(stream); if (r < 0 || r >= 1L << 32) { throw new IOException("varint overflow " + r); } return (int) r; }
@Test public void endOfFileThrowsException() throws Exception { ByteArrayInputStream inStream = new ByteArrayInputStream(new byte[0]); thrown.expect(EOFException.class); VarInt.decodeInt(inStream); }
public static ListenableFuture<EntityFieldsData> findAsync(TbContext ctx, EntityId originatorId) { switch (originatorId.getEntityType()) { // TODO: use EntityServiceRegistry case TENANT: return toEntityFieldsDataAsync(ctx.getTenantService().findTenantByIdAsync(ctx.getTenantId(), (TenantId) originatorId), EntityFieldsData::new, ctx); case CUSTOMER: return toEntityFieldsDataAsync(ctx.getCustomerService().findCustomerByIdAsync(ctx.getTenantId(), (CustomerId) originatorId), EntityFieldsData::new, ctx); case USER: return toEntityFieldsDataAsync(ctx.getUserService().findUserByIdAsync(ctx.getTenantId(), (UserId) originatorId), EntityFieldsData::new, ctx); case ASSET: return toEntityFieldsDataAsync(ctx.getAssetService().findAssetByIdAsync(ctx.getTenantId(), (AssetId) originatorId), EntityFieldsData::new, ctx); case DEVICE: return toEntityFieldsDataAsync(Futures.immediateFuture(ctx.getDeviceService().findDeviceById(ctx.getTenantId(), (DeviceId) originatorId)), EntityFieldsData::new, ctx); case ALARM: return toEntityFieldsDataAsync(ctx.getAlarmService().findAlarmByIdAsync(ctx.getTenantId(), (AlarmId) originatorId), EntityFieldsData::new, ctx); case RULE_CHAIN: return toEntityFieldsDataAsync(ctx.getRuleChainService().findRuleChainByIdAsync(ctx.getTenantId(), (RuleChainId) originatorId), EntityFieldsData::new, ctx); case ENTITY_VIEW: return toEntityFieldsDataAsync(ctx.getEntityViewService().findEntityViewByIdAsync(ctx.getTenantId(), (EntityViewId) originatorId), EntityFieldsData::new, ctx); case EDGE: return toEntityFieldsDataAsync(ctx.getEdgeService().findEdgeByIdAsync(ctx.getTenantId(), (EdgeId) originatorId), EntityFieldsData::new, ctx); default: return Futures.immediateFailedFuture(new TbNodeException("Unexpected originator EntityType: " + originatorId.getEntityType())); } }
@Test public void givenUnsupportedEntityTypes_whenFindAsync_thenException() { for (var entityType : EntityType.values()) { if (!SUPPORTED_ENTITY_TYPES.contains(entityType)) { var entityId = EntityIdFactory.getByTypeAndUuid(entityType, RANDOM_UUID); var expectedExceptionMsg = "org.thingsboard.rule.engine.api.TbNodeException: Unexpected originator EntityType: " + entityType; var exception = assertThrows(ExecutionException.class, () -> EntitiesFieldsAsyncLoader.findAsync(ctxMock, entityId).get()); assertInstanceOf(TbNodeException.class, exception.getCause()); assertThat(exception.getMessage()).isEqualTo(expectedExceptionMsg); } } }
@Override public DataSourceProvenance getProvenance() { return new DemoLabelDataSourceProvenance(this); }
@Test public void testInterlockingCrescents() { // Check zero samples throws assertThrows(PropertyException.class, () -> new InterlockingCrescentsDataSource(0)); // Check valid parameters work InterlockingCrescentsDataSource source = new InterlockingCrescentsDataSource(200); assertEquals(200, source.examples.size()); Dataset<Label> dataset = new MutableDataset<>(source); Map<String, Long> map = new HashMap<>(); dataset.getOutputInfo().outputCountsIterable().forEach((p) -> map.put(p.getA(), p.getB())); assertEquals(100, map.get("X")); assertEquals(100, map.get("O")); Helpers.testProvenanceMarshalling(source.getProvenance()); }