focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
long size() { return directories.size(); }
@Test public void testDeletePerfectCache() throws Throwable { // run a larger scale test. Also use the ordering we'd expect for a sorted // listing, which we implement by sorting the paths List<CopyListingFileStatus> statusList = buildStatusList(); // cache is bigger than the status list tracker = new DeletedDirTracker(statusList.size()); AtomicInteger deletedFiles = new AtomicInteger(0); AtomicInteger deletedDirs = new AtomicInteger(0); deletePaths(statusList, deletedFiles, deletedDirs); assertEquals(0, deletedFiles.get()); }
@Override public T build(ConfigurationSourceProvider provider, String path) throws IOException, ConfigurationException { try (InputStream input = provider.open(requireNonNull(path))) { final JsonNode node = mapper.readTree(createParser(input)); if (node == null) { throw ConfigurationParsingException .builder("Configuration at " + path + " must not be empty") .build(path); } return build(node, path); } catch (JsonParseException e) { throw ConfigurationParsingException .builder("Malformed " + formatName) .setCause(e) .setLocation(e.getLocation()) .setDetail(e.getMessage()) .build(path); } }
@Test void throwsAnExceptionOnUnexpectedArrayOverride() { System.setProperty("dw.servers.port", "9000"); assertThatIllegalArgumentException() .isThrownBy(() -> factory.build(configurationSourceProvider, validFile)) .withMessageContaining("target is an array but no index specified"); }
public String getClusterName() { if (StringUtils.isNotEmpty(contextPath)) { return contextPath.substring(1); } return null; }
@Test public void testGetClusterName() { assertEquals(upstreamInstance.getClusterName(), "henyuContextPath"); upstreamInstance.setContextPath(""); assertNull(upstreamInstance.getClusterName()); }
boolean isPublicAndStatic() { int modifiers = mainMethod.getModifiers(); return isPublic(modifiers) && isStatic(modifiers); }
@Test public void testPublicAndStaticForMain() throws NoSuchMethodException { Method method = MainMethodFinderTest.class.getDeclaredMethod("main", String[].class); MainMethodFinder mainMethodFinder = new MainMethodFinder(); mainMethodFinder.mainMethod = method; boolean publicAndStatic = mainMethodFinder.isPublicAndStatic(); assertTrue(publicAndStatic); }
public static boolean isIPv4Host(String host) { return StringUtils.isNotBlank(host) && IPV4_PATTERN.matcher(host).matches(); }
@Test public void isIPv4Host() throws Exception { }
public Searcher searcher() { return new Searcher(); }
@Test void require_that_search_for_simple_conjunctions_work() { ConjunctionIndexBuilder builder = new ConjunctionIndexBuilder(); IndexableFeatureConjunction c1 = indexableConj( conj( feature("a").inSet("1"), feature("b").inSet("2"))); IndexableFeatureConjunction c2 = indexableConj( conj( feature("a").inSet("1"), feature("b").inSet("2"), feature("c").inSet("3"))); IndexableFeatureConjunction c3 = indexableConj( conj( feature("a").inSet("1"), feature("b").inSet("5"))); builder.indexConjunction(c1); builder.indexConjunction(c2); builder.indexConjunction(c3); ConjunctionIndex index = builder.build(); ConjunctionIndex.Searcher searcher = index.searcher(); PredicateQuery query = new PredicateQuery(); query.addFeature("a", "1"); query.addFeature("b", "2"); assertHitsEquals(searcher.search(query), c1); query.addFeature("c", "3"); assertHitsEquals(searcher.search(query), c1, c2); query.addFeature("b", "5"); assertHitsEquals(searcher.search(query), c1, c2, c3); }
@Override public synchronized boolean onReportingPeriodEnd() { firstEventReceived = false; return true; }
@Test public void testOnReportingPeriodEnd() { assertTrue(strategy.onActivity(), "First call of onActivity() should return true."); assertTrue(strategy.onReportingPeriodEnd(), "onReportingPeriodEnd() should always return true."); assertTrue(strategy.onActivity(), "onActivity() should return true after onReportingPeriodEnd() for the next reporting period"); assertTrue(strategy.onReportingPeriodEnd(), "onReportingPeriodEnd() should always return true."); }
public void addHeader(String name, String value) { parent.headers().add(name, value); }
@Test void testAddHeader() { HttpRequest request = newRequest(URI.create("http://localhost:8080/echo"), HttpRequest.Method.GET, HttpRequest.Version.HTTP_1_1); DiscFilterResponse response = new DiscFilterResponse(HttpResponse.newInstance(HttpResponse.Status.OK)); response.addHeader("header1", "value1"); assertEquals(response.getHeader("header1"), "value1"); }
public static DateTime date() { return new DateTime(); }
@Test public void calendarTest() { final Date date = DateUtil.date(); final Calendar c = DateUtil.calendar(date); assertEquals(DateUtil.date(c), date); }
protected String addDatetimeToFilename( String filename, boolean addDate, String datePattern, boolean addTime, String timePattern, boolean specifyFormat, String datetimeFormat ) { if ( Utils.isEmpty( filename ) ) { return null; } // Replace possible environment variables... String realfilename = environmentSubstitute( filename ); String filenameNoExtension = FilenameUtils.removeExtension( realfilename ); String extension = FilenameUtils.getExtension( realfilename ); // If an extension exists, add the corresponding dot before if ( !StringUtil.isEmpty( extension ) ) { extension = '.' + extension; } final SimpleDateFormat sdf = new SimpleDateFormat(); Date now = new Date(); if ( specifyFormat && !Utils.isEmpty( datetimeFormat ) ) { sdf.applyPattern( datetimeFormat ); String dt = sdf.format( now ); filenameNoExtension += dt; } else { if ( addDate && null != datePattern ) { sdf.applyPattern( datePattern ); String d = sdf.format( now ); filenameNoExtension += '_' + d; } if ( addTime && null != timePattern ) { sdf.applyPattern( timePattern ); String t = sdf.format( now ); filenameNoExtension += '_' + t; } } return filenameNoExtension + extension; }
@Test public void testAddDatetimeToFilename_ZipWithDotsInFolderWithDots() { JobEntryBase jobEntryBase = new JobEntryBase(); String fullFilename; String filename = "/folder.with.dots/zip.with.dots.in.folder.with.dots"; String regexFilename = regexDotEscape( filename ); // add nothing fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, false, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + REGEX_EXTENSION, fullFilename ) ); // add date fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, "yyyyMMdd", false, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, null, false, null, false, null ); assertNotNull( fullFilename ); assertEquals( filename + EXTENSION, fullFilename ); // add time fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, true, "HHmmssSSS", false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, true, null, false, null ); assertNotNull( fullFilename ); assertEquals( filename + EXTENSION, fullFilename ); // add date and time fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, "yyyyMMdd", true, "HHmmssSSS", false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_PATTERN + TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, null, true, "HHmmssSSS", false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, "yyyyMMdd", true, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, null, true, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + REGEX_EXTENSION, fullFilename ) ); // add datetime fullFilename = jobEntryBase .addDatetimeToFilename( filename + EXTENSION, false, null, false, null, true, "(yyyyMMdd_HHmmssSSS)" ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, false, null, true, null ); assertNotNull( fullFilename ); assertEquals( filename + EXTENSION, fullFilename ); }
public HollowHashIndexResult findMatches(Object... query) { if (hashStateVolatile == null) { throw new IllegalStateException(this + " wasn't initialized"); } int hashCode = 0; for(int i=0;i<query.length;i++) { if(query[i] == null) throw new IllegalArgumentException("querying by null unsupported; i=" + i); hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i)); } HollowHashIndexResult result; HollowHashIndexState hashState; do { result = null; hashState = hashStateVolatile; long bucket = hashCode & hashState.getMatchHashMask(); long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; while (!bucketIsEmpty) { if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) { int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize()); long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer()); result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize); break; } bucket = (bucket + 1) & hashState.getMatchHashMask(); hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; } } while (hashState != hashStateVolatile); return result; }
@Test public void testIndexingListTypeField() throws Exception { mapper.add(new TypeList("A", "B", "C", "D", "A", "B", "C", "D")); mapper.add(new TypeList("B", "C", "D", "E")); mapper.add(new TypeList("X", "Y", "Z")); mapper.add(new TypeList()); roundTripSnapshot(); HollowHashIndex index = new HollowHashIndex(readStateEngine, "TypeList", "", "data.element.value"); Assert.assertNull(index.findMatches("M")); Assert.assertNull(index.findMatches("")); assertIteratorContainsAll(index.findMatches("A").iterator(), 0); assertIteratorContainsAll(index.findMatches("B").iterator(), 0, 1); assertIteratorContainsAll(index.findMatches("X").iterator(), 2); }
public void broadcastEvent(AbstractEvent event) throws IOException { broadcastEvent(event, false); }
@TestTemplate void testBroadcastEventBufferReferenceCounting() throws Exception { int bufferSize = 32 * 1024; int numSubpartitions = 2; ResultPartition partition = createResultPartition(bufferSize, numSubpartitions); RecordWriter<?> writer = createRecordWriter(partition); writer.broadcastEvent(EndOfPartitionEvent.INSTANCE); // get references to buffer consumers (copies from the original event buffer consumer) Buffer[] buffers = new Buffer[numSubpartitions]; // process all collected events (recycles the buffer) for (int i = 0; i < numSubpartitions; i++) { assertThat(partition.getNumberOfQueuedBuffers(i)).isOne(); ResultSubpartitionView view = partition.createSubpartitionView( new ResultSubpartitionIndexSet(i), new NoOpBufferAvailablityListener()); buffers[i] = view.getNextBuffer().buffer(); assertThat(parseBuffer(buffers[i], i).isEvent()).isTrue(); } for (int i = 0; i < numSubpartitions; ++i) { assertThat(buffers[i].isRecycled()).isTrue(); } }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final String prefix = containerService.isContainer(directory) ? StringUtils.EMPTY : containerService.getKey(directory) + Path.DELIMITER; return this.list(directory, listener, prefix); }
@Test public void testListDotInKey() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final Path placeholder = new SwiftDirectoryFeature(session).mkdir(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path test = new SwiftTouchFeature(session, new SwiftRegionService(session)).touch( new Path(placeholder, String.format("%s..", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus()); final AttributedList<Path> list = new SwiftObjectListService(session).list(placeholder, new DisabledListProgressListener()); assertEquals(1, list.size()); assertTrue(list.contains(test)); new SwiftDeleteFeature(session).delete(Arrays.asList(test, placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public void execute() throws DdlException { Map<String, UserVariable> clonedUserVars = new ConcurrentHashMap<>(); boolean hasUserVar = stmt.getSetListItems().stream().anyMatch(var -> var instanceof UserVariable); boolean executeSuccess = true; if (hasUserVar) { clonedUserVars.putAll(ctx.getUserVariables()); ctx.modifyUserVariablesCopyInWrite(clonedUserVars); } try { for (SetListItem var : stmt.getSetListItems()) { setVariablesOfAllType(var); } } catch (Throwable e) { if (hasUserVar) { executeSuccess = false; } throw e; } finally { //If the set sql contains more than one user variable, //the atomicity of the modification of this set of variables must be ensured. if (hasUserVar) { ctx.resetUserVariableCopyInWrite(); if (executeSuccess) { ctx.modifyUserVariables(clonedUserVars); } } } }
@Test public void test1SetSessionAndGlobal() throws Exception { ConnectContext ctx = starRocksAssert.getCtx(); ctxToRoot(); DDLStmtExecutor.execute(UtFrameUtils.parseStmtWithNewParser( "grant operate on system to testUser", ctx), ctx); ctxToTestUser(); String globalSQL = "set global query_timeout = 10"; SetStmt stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(globalSQL, ctx); SetExecutor executor = new SetExecutor(ctx, stmt); executor.execute(); Assert.assertEquals(null, ctx.getModifiedSessionVariables()); Assert.assertEquals(10, ctx.sessionVariable.getQueryTimeoutS()); String sessionSQL = "set query_timeout = 9"; stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(sessionSQL, ctx); executor = new SetExecutor(ctx, stmt); executor.execute(); Assert.assertEquals(1, ctx.getModifiedSessionVariables().getSetListItems().size()); Assert.assertEquals(9, ctx.sessionVariable.getQueryTimeoutS()); ctx.modifyUserVariable(new UserVariable("test_b", new IntLiteral(1), true, NodePosition.ZERO)); String userVarSql = "set @a = 10"; stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(userVarSql, ctx); executor = new SetExecutor(ctx, stmt); executor.execute(); Assert.assertEquals(2, ctx.getModifiedSessionVariables().getSetListItems().size()); Assert.assertEquals("10", ctx.getModifiedSessionVariables().getSetListItems().get(1).toSql()); ctx.getUserVariables().remove("test_b"); }
@Nullable protected String findWebJarResourcePath(String pathStr) { Path path = Paths.get(pathStr); if (path.getNameCount() < 2) return null; String version = swaggerUiConfigProperties.getVersion(); if (version == null) return null; Path first = path.getName(0); Path rest = path.subpath(1, path.getNameCount()); return first.resolve(version).resolve(rest).toString(); }
@Test void returNullWhenPathIsSameAsWebjar() { String path = "swagger-ui"; String actual = abstractSwaggerResourceResolver.findWebJarResourcePath(path); assertTrue(Objects.isNull(actual)); }
private static ByteBuf copiedBufferAscii(CharSequence string) { boolean release = true; // Mimic the same behavior as other copiedBuffer implementations. ByteBuf buffer = ALLOC.heapBuffer(string.length()); try { ByteBufUtil.writeAscii(buffer, string); release = false; return buffer; } finally { if (release) { buffer.release(); } } }
@Test public void testCopiedBufferAscii() { testCopiedBufferCharSequence("Some US_ASCII", CharsetUtil.US_ASCII); }
public static void saveHandler(Object handler) { final HttpAsyncContext httpAsyncContext = getOrCreateContext(); httpAsyncContext.setHandler(handler); }
@Test public void saveHandler() { final Object handler = new Object(); HttpAsyncUtils.saveHandler(handler); Assert.assertEquals(handler, HttpAsyncUtils.getOrCreateContext().getHandler()); }
protected SerializerFactory getSerializerFactory(boolean multipleClassLoader, boolean generic) { if (generic) { return multipleClassLoader ? new GenericMultipleClassLoaderSofaSerializerFactory() : new GenericSingleClassLoaderSofaSerializerFactory(); } else { return multipleClassLoader ? new MultipleClassLoaderSofaSerializerFactory() : new SingleClassLoaderSofaSerializerFactory(); } }
@Test public void getSerializerFactory() { Assert.assertEquals(SingleClassLoaderSofaSerializerFactory.class, serializer.getSerializerFactory(false, false).getClass()); Assert.assertEquals(MultipleClassLoaderSofaSerializerFactory.class, serializer.getSerializerFactory(true, false).getClass()); Assert.assertEquals(GenericSingleClassLoaderSofaSerializerFactory.class, serializer.getSerializerFactory(false, true).getClass()); Assert.assertEquals(GenericMultipleClassLoaderSofaSerializerFactory.class, serializer.getSerializerFactory(true, true).getClass()); }
@Override public boolean hasOrdinal() { switch (super.getVersion()) { case DEFAULT_VERSION: return true; default: return true; } }
@Test public void testHasOrdinal() { assertTrue(verDefault.hasOrdinal()); assertTrue(verCurrent.hasOrdinal()); }
@Override public <V> MultiLabel generateOutput(V label) { if (label instanceof Collection) { Collection<?> c = (Collection<?>) label; List<Pair<String,Boolean>> dimensions = new ArrayList<>(); for (Object o : c) { dimensions.add(MultiLabel.parseElement(o.toString())); } return MultiLabel.createFromPairList(dimensions); } return MultiLabel.parseString(label.toString()); }
@Test public void testGenerateOutput_null() { MultiLabelFactory factory = new MultiLabelFactory(); assertThrows(NullPointerException.class, () -> factory.generateOutput(null)); assertThrows(NullPointerException.class, () -> factory.generateOutput(new HashSet<>(Arrays.asList("a", null, "b")))); }
@Override public EncodedMessage transform(ActiveMQMessage message) throws Exception { if (message == null) { return null; } long messageFormat = 0; Header header = null; Properties properties = null; Map<Symbol, Object> daMap = null; Map<Symbol, Object> maMap = null; Map<String,Object> apMap = null; Map<Object, Object> footerMap = null; Section body = convertBody(message); if (message.isPersistent()) { if (header == null) { header = new Header(); } header.setDurable(true); } byte priority = message.getPriority(); if (priority != Message.DEFAULT_PRIORITY) { if (header == null) { header = new Header(); } header.setPriority(UnsignedByte.valueOf(priority)); } String type = message.getType(); if (type != null) { if (properties == null) { properties = new Properties(); } properties.setSubject(type); } MessageId messageId = message.getMessageId(); if (messageId != null) { if (properties == null) { properties = new Properties(); } properties.setMessageId(getOriginalMessageId(message)); } ActiveMQDestination destination = message.getDestination(); if (destination != null) { if (properties == null) { properties = new Properties(); } properties.setTo(destination.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination)); } ActiveMQDestination replyTo = message.getReplyTo(); if (replyTo != null) { if (properties == null) { properties = new Properties(); } properties.setReplyTo(replyTo.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo)); } String correlationId = message.getCorrelationId(); if (correlationId != null) { if (properties == null) { properties = new Properties(); } try { properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId)); } catch (AmqpProtocolException e) { properties.setCorrelationId(correlationId); } } long expiration = message.getExpiration(); if (expiration != 0) { long ttl = expiration - System.currentTimeMillis(); if (ttl < 0) { ttl = 1; } if (header == null) { header = new Header(); } header.setTtl(new UnsignedInteger((int) ttl)); if (properties == null) { properties = new Properties(); } properties.setAbsoluteExpiryTime(new Date(expiration)); } long timeStamp = message.getTimestamp(); if (timeStamp != 0) { if (properties == null) { properties = new Properties(); } properties.setCreationTime(new Date(timeStamp)); } // JMSX Message Properties int deliveryCount = message.getRedeliveryCounter(); if (deliveryCount > 0) { if (header == null) { header = new Header(); } header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount)); } String userId = message.getUserID(); if (userId != null) { if (properties == null) { properties = new Properties(); } properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8))); } String groupId = message.getGroupID(); if (groupId != null) { if (properties == null) { properties = new Properties(); } properties.setGroupId(groupId); } int groupSequence = message.getGroupSequence(); if (groupSequence > 0) { if (properties == null) { properties = new Properties(); } properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence)); } final Map<String, Object> entries; try { entries = message.getProperties(); } catch (IOException e) { throw JMSExceptionSupport.create(e); } for (Map.Entry<String, Object> entry : entries.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.startsWith(JMS_AMQP_PREFIX)) { if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) { messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class); continue; } else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } continue; } else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } continue; } else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (maMap == null) { maMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length()); maMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class)); continue; } else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class)); continue; } else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (daMap == null) { daMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length()); daMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (footerMap == null) { footerMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length()); footerMap.put(Symbol.valueOf(name), value); continue; } } else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) { // strip off the scheduled message properties continue; } // The property didn't map into any other slot so we store it in the // Application Properties section of the message. if (apMap == null) { apMap = new HashMap<>(); } apMap.put(key, value); int messageType = message.getDataStructureType(); if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) { // Type of command to recognize advisory message Object data = message.getDataStructure(); if(data != null) { apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName()); } } } final AmqpWritableBuffer buffer = new AmqpWritableBuffer(); encoder.setByteBuffer(buffer); if (header != null) { encoder.writeObject(header); } if (daMap != null) { encoder.writeObject(new DeliveryAnnotations(daMap)); } if (maMap != null) { encoder.writeObject(new MessageAnnotations(maMap)); } if (properties != null) { encoder.writeObject(properties); } if (apMap != null) { encoder.writeObject(new ApplicationProperties(apMap)); } if (body != null) { encoder.writeObject(body); } if (footerMap != null) { encoder.writeObject(new Footer(footerMap)); } return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength()); }
@Test public void testConvertCompressedMapMessageToAmqpMessage() throws Exception { ActiveMQMapMessage outbound = createMapMessage(true); outbound.setString("property-1", "string"); outbound.setInt("property-2", 1); outbound.setBoolean("property-3", true); outbound.onSend(); outbound.storeContent(); JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer(); EncodedMessage encoded = transformer.transform(outbound); assertNotNull(encoded); Message amqp = encoded.decode(); assertNotNull(amqp.getBody()); assertTrue(amqp.getBody() instanceof AmqpValue); assertTrue(((AmqpValue) amqp.getBody()).getValue() instanceof Map); @SuppressWarnings("unchecked") Map<Object, Object> amqpMap = (Map<Object, Object>) ((AmqpValue) amqp.getBody()).getValue(); assertEquals(3, amqpMap.size()); assertTrue("string".equals(amqpMap.get("property-1"))); }
ProducerIdsBlock nextProducerBlock() { return nextProducerBlock.get(); }
@Test public void testGenerateProducerIds() { for (int i = 0; i < 100; i++) { generateProducerIds(producerIdControlManager, i % 4, 100); } assertEquals(new ProducerIdsBlock(3, 100000, 1000), producerIdControlManager.nextProducerBlock()); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException { try { final StoregateApiClient client = session.getClient(); final MoveFileRequest move = new MoveFileRequest() .name(renamed.getName()) .parentID(fileid.getFileId(renamed.getParent())) .mode(1); // Overwrite final HttpEntityEnclosingRequestBase request; request = new HttpPost(String.format("%s/v4.2/files/%s/move", client.getBasePath(), fileid.getFileId(file))); if(status.getLockId() != null) { request.addHeader("X-Lock-Id", status.getLockId().toString()); } request.setEntity(new StringEntity(new JSON().getContext(move.getClass()).writeValueAsString(move), ContentType.create("application/json", StandardCharsets.UTF_8.name()))); request.addHeader(HTTP.CONTENT_TYPE, MEDIA_TYPE); final HttpResponse response = client.getClient().execute(request); try { switch(response.getStatusLine().getStatusCode()) { case HttpStatus.SC_NO_CONTENT: final PathAttributes attr = new PathAttributes(file.attributes()); fileid.cache(file, null); fileid.cache(renamed, attr.getFileId()); return renamed.withAttributes(attr); default: throw new StoregateExceptionMappingService(fileid).map("Cannot rename {0}", new ApiException(response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()), file); } } finally { EntityUtils.consume(response.getEntity()); } } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test public void testMoveWithLock() throws Exception { final StoregateIdProvider nodeid = new StoregateIdProvider(session); final Path room = new StoregateDirectoryFeature(session, nodeid).mkdir( new Path(String.format("/My files/%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new StoregateTouchFeature(session, nodeid).touch(new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final String fileid = test.attributes().getFileId(); final String lockId = new StoregateLockFeature(session, nodeid).lock(test); final Path target = new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); assertEquals(fileid, new StoregateMoveFeature(session, nodeid).move(test, target, new TransferStatus().withLockId(lockId), new Delete.DisabledCallback(), new DisabledConnectionCallback()).attributes().getFileId()); assertFalse(new DefaultFindFeature(session).find(test)); assertTrue(new DefaultFindFeature(session).find(target)); assertEquals(0, session.getMetrics().get(Copy.class)); new StoregateDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override protected Optional<LocalDate> extractField(String s) { try { return Optional.of(LocalDate.parse(s, formatter)); } catch (DateTimeParseException e) { logger.log(Level.WARNING, e.getParsedString()); logger.log(Level.WARNING, String.format("Unable to parse date %s with formatter %s", s, formatter.toString())); return Optional.empty(); } }
@Test public void testValidBehaviour() { String isoFormat = "uuuu-MM-dd"; DateTimeFormatter isoFormatter = DateTimeFormatter.ofPattern(isoFormat, Locale.US); String isoInput = "1994-01-26"; DateExtractor isoExtractor = new DateExtractor("test-iso", "date-iso", isoFormat); LocalDate isoDate = LocalDate.parse(isoInput, isoFormatter); Optional<LocalDate> isoExtracted = isoExtractor.extractField(isoInput); assertTrue(isoExtracted.isPresent()); assertEquals(isoDate,isoExtracted.get()); String usFormat = "MM-dd-uuuu"; DateTimeFormatter usFormatter = DateTimeFormatter.ofPattern(usFormat, Locale.US); String usInput = "09-08-1966"; DateExtractor usExtractor = new DateExtractor("test-us", "date-us", usFormat); LocalDate usDate = LocalDate.parse(usInput, usFormatter); Optional<LocalDate> usExtracted = usExtractor.extractField(usInput); assertTrue(usExtracted.isPresent()); assertEquals(usDate,usExtracted.get()); String ukFormat = "dd-MM-uuuu"; DateTimeFormatter ukFormatter = DateTimeFormatter.ofPattern(ukFormat, Locale.US); String ukInput = "23-11-1963"; DateExtractor ukProc = new DateExtractor("test-uk", "date-uk", ukFormat); LocalDate ukDate = LocalDate.parse(ukInput, ukFormatter); Optional<LocalDate> ukExtracted = ukProc.extractField(ukInput); assertTrue(ukExtracted.isPresent()); assertEquals(ukDate,ukExtracted.get()); }
@ExecuteOn(TaskExecutors.IO) @Delete(uri = "{namespace}/{id}") @Operation(tags = {"Templates"}, summary = "Delete a template") @ApiResponse(responseCode = "204", description = "On success") public HttpResponse<Void> delete( @Parameter(description = "The template namespace") @PathVariable String namespace, @Parameter(description = "The template id") @PathVariable String id ) { Optional<Template> template = templateRepository.findById(tenantService.resolveTenant(), namespace, id); if (template.isPresent()) { templateRepository.delete(template.get()); return HttpResponse.status(HttpStatus.NO_CONTENT); } else { return HttpResponse.status(HttpStatus.NOT_FOUND); } }
@Test void importTemplatesWithZip() throws IOException { // create 3 templates, so we have at least 3 of them client.toBlocking().retrieve(POST("/api/v1/templates", createTemplate()), Template.class); client.toBlocking().retrieve(POST("/api/v1/templates", createTemplate()), Template.class); client.toBlocking().retrieve(POST("/api/v1/templates", createTemplate()), Template.class); int size = client.toBlocking().retrieve(HttpRequest.GET("/api/v1/templates/search?namespace=kestra.test"), Argument.of(PagedResults.class, Template.class)).getResults().size(); // extract the created templates byte[] zip = client.toBlocking().retrieve(HttpRequest.GET("/api/v1/templates/export/by-query?namespace=kestra.test"), Argument.of(byte[].class)); File temp = File.createTempFile("templates", ".zip"); Files.write(temp.toPath(), zip); // import the templates var body = MultipartBody.builder() .addPart("fileUpload", "templates.zip", temp) .build(); var response = client.toBlocking().exchange(POST("/api/v1/templates/import", body).contentType(MediaType.MULTIPART_FORM_DATA)); assertThat(response.getStatus(), is(NO_CONTENT)); temp.delete(); }
static Optional<DataField> getTargetDataField(final Model model) { DataType targetDataType = getTargetDataType(model.getMiningFunction(), model.getMathContext()); OpType targetOpType = getTargetOpType(model.getMiningFunction()); if (targetDataType == null || targetOpType == null) { return Optional.empty(); } String cleanedName = model.getModelName().replaceAll("[^A-Za-z0-9]", ""); String fieldName = String.format(TARGETFIELD_TEMPLATE, cleanedName); DataField toReturn = new DataField(); toReturn.setName(fieldName); toReturn.setOpType(targetOpType); toReturn.setDataType(targetDataType); return Optional.of(toReturn); }
@Test void getTargetDataField() throws Exception { final InputStream inputStream = getFileInputStream(NO_TARGET_FIELD_SAMPLE); final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream); final Model model = pmml.getModels().get(0); Optional<DataField> optionalDataField = KiePMMLUtil.getTargetDataField(model); assertThat(optionalDataField).isPresent(); DataField retrieved = optionalDataField.get(); String expected = String.format(TARGETFIELD_TEMPLATE, "golfing"); assertThat(retrieved.getName()).isEqualTo(expected); }
public static PCollectionRowTuple empty(Pipeline pipeline) { return new PCollectionRowTuple(pipeline); }
@Test public void testEmpty() { String tag = "collection1"; assertFalse(PCollectionRowTuple.empty(pipeline).has(tag)); }
public abstract void verify(String value);
@Test public void testBooleanAttribute() { BooleanAttribute booleanAttribute = new BooleanAttribute("bool.key", false, false); Assert.assertThrows(RuntimeException.class, () -> booleanAttribute.verify("")); Assert.assertThrows(RuntimeException.class, () -> booleanAttribute.verify("a")); Assert.assertThrows(RuntimeException.class, () -> booleanAttribute.verify(",")); Assert.assertThrows(RuntimeException.class, () -> booleanAttribute.verify("checked")); Assert.assertThrows(RuntimeException.class, () -> booleanAttribute.verify("1")); Assert.assertThrows(RuntimeException.class, () -> booleanAttribute.verify("0")); Assert.assertThrows(RuntimeException.class, () -> booleanAttribute.verify("-1")); booleanAttribute.verify("true"); booleanAttribute.verify("tRue"); booleanAttribute.verify("false"); booleanAttribute.verify("falSe"); }
public static <T> boolean isNotNullOrEmpty(Collection<T> collection) { return !isNullOrEmpty(collection); }
@Test void isNotNullOrEmptyIsFalseForEmptyArray() { assertThat(isNotNullOrEmpty(new String[]{})).isFalse(); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testEthGetTransactionByBlockHashAndIndex() throws Exception { web3j.ethGetTransactionByBlockHashAndIndex( "0xe670ec64341771606e55d6b4ca35a1a6b75ee3d5145a99d05921026d1527331", BigInteger.ZERO) .send(); verifyResult( "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getTransactionByBlockHashAndIndex\",\"params\":[\"0xe670ec64341771606e55d6b4ca35a1a6b75ee3d5145a99d05921026d1527331\",\"0x0\"],\"id\":1}"); }
@Override public int hashCode() { return underlying().hashCode(); }
@Test public void testHashCode() { final HashPMap<Object, Object> mock = mock(HashPMap.class); assertEquals(mock.hashCode(), new PCollectionsImmutableMap<>(mock).hashCode()); final HashPMap<Object, Object> someOtherMock = mock(HashPMap.class); assertNotEquals(mock.hashCode(), new PCollectionsImmutableMap<>(someOtherMock).hashCode()); }
static int getIndex(CharSequence name) { HeaderNameIndex entry = getEntry(name); return entry == null ? NOT_FOUND : entry.index; }
@Test public void testExistingHeaderName() { assertEquals(6, HpackStaticTable.getIndex(":scheme")); }
@Override public String convert(final ReadwriteSplittingRuleConfiguration ruleConfig) { if (ruleConfig.getDataSourceGroups().isEmpty()) { return ""; } StringBuilder result = new StringBuilder(ReadwriteSplittingDistSQLConstants.CREATE_READWRITE_SPLITTING_RULE); Iterator<ReadwriteSplittingDataSourceGroupRuleConfiguration> iterator = ruleConfig.getDataSourceGroups().iterator(); while (iterator.hasNext()) { appendStaticReadWriteSplittingRule(ruleConfig.getLoadBalancers(), iterator.next(), result); if (iterator.hasNext()) { result.append(DistSQLConstants.COMMA); } } result.append(DistSQLConstants.SEMI); return result.toString(); }
@Test void assertConvertWithEmptyDataSources() { ReadwriteSplittingRuleConfiguration readwriteSplittingRuleConfig = mock(ReadwriteSplittingRuleConfiguration.class); when(readwriteSplittingRuleConfig.getDataSourceGroups()).thenReturn(Collections.emptyList()); ReadwriteSplittingRuleConfigurationToDistSQLConverter readwriteSplittingRuleConfigurationToDistSQLConverter = new ReadwriteSplittingRuleConfigurationToDistSQLConverter(); assertThat(readwriteSplittingRuleConfigurationToDistSQLConverter.convert(readwriteSplittingRuleConfig), is("")); }
@Override public double p(double x) { return Math.exp(-np * Math.log(1.0 + x * x / nu) + fac) / Math.sqrt(Math.PI * nu); }
@Test public void testP() { System.out.println("p"); TDistribution instance = new TDistribution(20); instance.rand(); assertEquals(2.660085e-09, instance.p(-10.0), 1E-16); assertEquals(0.05808722, instance.p(-2.0), 1E-7); assertEquals(0.2360456, instance.p(-1.0), 1E-7); assertEquals(0.3939886, instance.p(0.0), 1E-7); assertEquals(0.2360456, instance.p(1.0), 1E-7); assertEquals(0.05808722, instance.p(2.0), 1E-7); assertEquals(2.660085e-09, instance.p(10.0), 1E-16); }
@Override public int size() { if (entries == null) { return 0; } return entries.size(); }
@Test public void testSize_whenNull() { ResultSet resultSet = new ResultSet(null, IterationType.KEY); assertEquals(0, resultSet.size()); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void withDefault() throws ScanException { Tokenizer tokenizer = new Tokenizer("${b:-c}"); Parser parser = new Parser(tokenizer.tokenize()); Node node = parser.parse(); Node witness = new Node(Node.Type.VARIABLE, new Node(Node.Type.LITERAL, "b")); witness.defaultPart = new Node(Node.Type.LITERAL, "c"); assertEquals(witness, node); }
public String generateInvalidPayloadExceptionMessage(final byte[] hl7Bytes) { if (hl7Bytes == null) { return "HL7 payload is null"; } return generateInvalidPayloadExceptionMessage(hl7Bytes, hl7Bytes.length); }
@Test public void testGenerateInvalidPayloadExceptionMessageWithLengthSmallerThanArraySize() { byte[] payload = TEST_MESSAGE.getBytes(); String message = hl7util.generateInvalidPayloadExceptionMessage(payload, 10); assertEquals("The HL7 payload terminating byte [0x7c] is incorrect - expected [0xd] {ASCII [<CR>]}", message); }
@Override public CompletableFuture<T> toCompletableFuture() { return _task.toCompletionStage().toCompletableFuture(); }
@Test public void testCreateStageFromValue() throws Exception { String testResult = "testCreateStageFromValue"; ParSeqBasedCompletionStage<String> stageFromValue = _parSeqBasedCompletionStageFactory.buildStageFromValue(testResult); Assert.assertEquals(testResult, stageFromValue.toCompletableFuture().get()); }
public boolean isRegisteredUser(@Nonnull final JID user, final boolean checkRemoteDomains) { if (xmppServer.isLocal(user)) { try { getUser(user.getNode()); return true; } catch (final UserNotFoundException e) { return false; } } else if (!checkRemoteDomains) { return false; } else { // Look up in the cache using the full JID Boolean isRegistered = remoteUsersCache.get(user.toString()); if (isRegistered == null) { // Check if the bare JID of the user is cached isRegistered = remoteUsersCache.get(user.toBareJID()); if (isRegistered == null) { // No information is cached so check user identity and cache it // A disco#info is going to be sent to the bare JID of the user. This packet // is going to be handled by the remote server. final IQ iq = new IQ(IQ.Type.get); iq.setFrom(xmppServer.getServerInfo().getXMPPDomain()); iq.setTo(user.toBareJID()); iq.setChildElement("query", "http://jabber.org/protocol/disco#info"); final Semaphore completionSemaphore = new Semaphore(0); // Send the disco#info request to the remote server. final IQRouter iqRouter = xmppServer.getIQRouter(); final long timeoutInMillis = REMOTE_DISCO_INFO_TIMEOUT.getValue().toMillis(); iqRouter.addIQResultListener(iq.getID(), new IQResultListener() { @Override public void receivedAnswer(final IQ packet) { final JID from = packet.getFrom(); // Assume that the user is not a registered user Boolean isRegistered = Boolean.FALSE; // Analyze the disco result packet if (IQ.Type.result == packet.getType()) { final Element child = packet.getChildElement(); if (child != null) { for (final Iterator it = child.elementIterator("identity"); it.hasNext();) { final Element identity = (Element) it.next(); final String accountType = identity.attributeValue("type"); if ("registered".equals(accountType) || "admin".equals(accountType)) { isRegistered = Boolean.TRUE; break; } } } } // Update cache of remote registered users remoteUsersCache.put(from.toBareJID(), isRegistered); completionSemaphore.release(); } @Override public void answerTimeout(final String packetId) { Log.warn("The result from the disco#info request was never received. request: {}", iq); completionSemaphore.release(); } }, timeoutInMillis); // Send the request iqRouter.route(iq); // Wait for the response try { completionSemaphore.tryAcquire(timeoutInMillis, TimeUnit.MILLISECONDS); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); Log.warn("Interrupted whilst waiting for response from remote server", e); } isRegistered = remoteUsersCache.computeIfAbsent(user.toBareJID(), ignored -> Boolean.FALSE); } } return isRegistered; } }
@Test public void isRegisteredUserTrueWillReturnTrueForLocalUsers() { final boolean result = userManager.isRegisteredUser(new JID(USER_ID, Fixtures.XMPP_DOMAIN, null), true); assertThat(result, is(true)); }
public FEELFnResult<String> invoke(@ParameterName("string") String string) { if ( string == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "string", "cannot be null")); } else { return FEELFnResult.ofResult( string.toLowerCase() ); } }
@Test void invokeLowercaseString() { FunctionTestUtil.assertResult(stringLowerCaseFunction.invoke("teststring"), "teststring"); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStart, final Range<Instant> windowEnd, final Optional<Position> position ) { try { final ReadOnlySessionStore<GenericKey, GenericRow> store = stateStore .store(QueryableStoreTypes.sessionStore(), partition); return KsMaterializedQueryResult.rowIterator( findSession(store, key, windowStart, windowEnd).iterator()); } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldReturnValueIfSessionStartsAtLowerBoundIfLowerStartBoundClosed() { // Given: final Range<Instant> startBounds = Range.closed( LOWER_INSTANT, UPPER_INSTANT ); final Instant wend = LOWER_INSTANT.plusMillis(1); givenSingleSession(LOWER_INSTANT, wend); // When: final Iterator<WindowedRow> rowIterator = table.get(A_KEY, PARTITION, startBounds, Range.all()).rowIterator; // Then: assertThat(rowIterator.hasNext(), is(true)); assertThat(rowIterator.next(), is( WindowedRow.of( SCHEMA, sessionKey(LOWER_INSTANT, wend), A_VALUE, wend.toEpochMilli() ) )); }
public static SearchTypeError parse(Query query, String searchTypeId, ElasticsearchException ex) { if (isSearchTypeAbortedError(ex)) { return new SearchTypeAbortedError(query, searchTypeId, ex); } Throwable possibleResultWindowException = ex; int attempt = 0; while (possibleResultWindowException != null && attempt < MAX_DEPTH_OF_EXCEPTION_CAUSE_ANALYSIS) { final Integer resultWindowLimit = parseResultLimit(possibleResultWindowException); if (resultWindowLimit != null) { return new ResultWindowLimitError(query, searchTypeId, resultWindowLimit); } possibleResultWindowException = possibleResultWindowException.getCause(); attempt++; } return new SearchTypeError(query, searchTypeId, ex); }
@Test void returnsResultWindowLimitError() { final ElasticsearchException elasticsearchException = new ElasticsearchException("Result window is too large, [from + size] must be less than or equal to: [42]"); final SearchTypeError error = SearchTypeErrorParser.parse(query, "searchTypeId", elasticsearchException); assertThat(error).isInstanceOf(ResultWindowLimitError.class); assertThat((ResultWindowLimitError) error) .satisfies(e -> assertEquals(42, e.getResultWindowLimit())) .satisfies(e -> assertEquals("searchTypeId", e.searchTypeId())) .satisfies(e -> assertEquals("test_query", e.queryId())); }
public void addValueProviders(final String segmentName, final RocksDB db, final Cache cache, final Statistics statistics) { if (storeToValueProviders.isEmpty()) { logger.debug("Adding metrics recorder of task {} to metrics recording trigger", taskId); streamsMetrics.rocksDBMetricsRecordingTrigger().addMetricsRecorder(this); } else if (storeToValueProviders.containsKey(segmentName)) { throw new IllegalStateException("Value providers for store " + segmentName + " of task " + taskId + " has been already added. This is a bug in Kafka Streams. " + "Please open a bug report under https://issues.apache.org/jira/projects/KAFKA/issues"); } verifyDbAndCacheAndStatistics(segmentName, db, cache, statistics); logger.debug("Adding value providers for store {} of task {}", segmentName, taskId); storeToValueProviders.put(segmentName, new DbAndCacheAndStatistics(db, cache, statistics)); }
@Test public void shouldNotAddItselfToRecordingTriggerWhenNotEmpty() { recorder.addValueProviders(SEGMENT_STORE_NAME_1, dbToAdd1, cacheToAdd1, statisticsToAdd1); verify(recordingTrigger).addMetricsRecorder(recorder); recorder.addValueProviders(SEGMENT_STORE_NAME_2, dbToAdd2, cacheToAdd2, statisticsToAdd2); verifyNoMoreInteractions(recordingTrigger); }
@Override public String getName() { return name; }
@Test public void testGetName() { assertNull(null, queueConfig.getName()); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object k0, @Nullable Object v0, @Nullable Object... rest) { return containsAtLeastEntriesIn(accumulateMultimap(k0, v0, rest)); }
@Test public void containsAtLeastVarargRespectsDuplicatesFailure() { ImmutableListMultimap<Integer, String> actual = ImmutableListMultimap.of(3, "one", 3, "two", 4, "five", 4, "five"); expectFailureWhenTestingThat(actual).containsAtLeast(3, "one", 3, "one", 3, "one", 4, "five"); assertFailureKeys("missing", "---", "expected to contain at least", "but was"); assertFailureValue("missing", "{3=[one [2 copies]]}"); }
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) { SinkConfig mergedConfig = clone(existingConfig); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Sink Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName() .equals(existingConfig.getSourceSubscriptionName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().putIfAbsent(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getTopicToSerdeClassName() != null) { newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getTopicToSchemaType() != null) { newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { SinkConfig finalMergedConfig = mergedConfig; newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } finalMergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getConfigs() != null) { mergedConfig.setConfigs(newConfig.getConfigs()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getArchive())) { mergedConfig.setArchive(newConfig.getArchive()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getTransformFunction() != null) { mergedConfig.setTransformFunction(newConfig.getTransformFunction()); } if (newConfig.getTransformFunctionClassName() != null) { mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName()); } if (newConfig.getTransformFunctionConfig() != null) { mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig()); } return mergedConfig; }
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Input Topics cannot be altered") public void testMergeDifferentInputs() { SinkConfig sinkConfig = createSinkConfig(); SinkConfig newSinkConfig = createUpdatedSinkConfig("topicsPattern", "Different"); SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig); }
public String createRegexForUrlTemplate(String url, String placeholder) { String transformedUrl = Arrays.stream(StringUtils.splitByWholeSeparator(url, placeholder)) .map(part -> StringUtils.isBlank(part) ? part : Pattern.quote(part)) .collect(Collectors.joining(".*?")); return "^" + transformedUrl + "$"; }
@Test public void createRegexForTemplateUrl() { String url = "https://example.com/api/lookup?key=message_key&a=b&c=message_key&e=f"; String template = "https://example.com/api/lookup?key=${key}&a=b&c=${key}&e=f"; String expected = "^\\Qhttps://example.com/api/lookup?key=\\E.*?\\Q&a=b&c=\\E.*?\\Q&e=f\\E$"; String got = regexHelper.createRegexForUrlTemplate(template, "${key}"); assertThat(got).isEqualTo(expected); Pattern compiled = Pattern.compile(got, Pattern.DOTALL); assertThat(compiled.matcher(url).find()).isTrue(); }
@Override public boolean hasContent() { switch (super.getVersion()) { case DEFAULT_VERSION: return true; default: return true; } }
@Test public void testHasContent() { assertTrue(verDefault.hasContent()); assertTrue(verCurrent.hasContent()); }
public ClientFailoverConfig setClientConfigs(List<ClientConfig> clientConfigs) { clientConfigs.forEach(this::validateClientConfig); this.clientConfigs = clientConfigs; return this; }
@Test public void testSetClientConfigs_WithOffReconnectMode_ShouldThrowInvalidConfigException() { ClientConfig clientConfig1 = new ClientConfig(); ClientConfig clientConfig2 = new ClientConfig() .setConnectionStrategyConfig(new ClientConnectionStrategyConfig().setReconnectMode(OFF)); List<ClientConfig> clientConfigList = Arrays.asList(clientConfig1, clientConfig2); ClientFailoverConfig clientFailoverConfig = new ClientFailoverConfig(); assertThatThrownBy(() -> clientFailoverConfig.setClientConfigs(clientConfigList)) .isInstanceOf(InvalidConfigurationException.class) .hasMessageContaining("Reconnect mode for ClientFailoverConfig must not be OFF"); }
@Override public Serde.Deserializer deserializer(String topic, Serde.Target type) { return new Serde.Deserializer() { @SneakyThrows @Override public DeserializeResult deserialize(RecordHeaders headers, byte[] data) { try { UnknownFieldSet unknownFields = UnknownFieldSet.parseFrom(data); return new DeserializeResult(unknownFields.toString(), DeserializeResult.Type.STRING, Map.of()); } catch (Exception e) { throw new ValidationException(e.getMessage()); } } }; }
@Test void deserializeInvalidMessage() { var deserializer = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE); assertThatThrownBy(() -> deserializer.deserialize(null, new byte[] { 1, 2, 3 })) .isInstanceOf(ValidationException.class) .hasMessageContaining("Protocol message contained an invalid tag"); }
@Deprecated public void add(Promise promise) { add((Future) promise); }
@Test public void testAddNullPromise() { assertThrows(NullPointerException.class, new Executable() { @Override public void execute() { combiner.add(null); } }); }
@Override public Mono<ListResult<CategoryVo>> list(Integer page, Integer size) { var listOptions = new ListOptions(); listOptions.setFieldSelector(FieldSelector.of( notEqual("spec.hideFromList", BooleanUtils.TRUE) )); return client.listBy(Category.class, listOptions, PageRequestImpl.of(pageNullSafe(page), sizeNullSafe(size), defaultSort()) ) .map(list -> { List<CategoryVo> categoryVos = list.get() .map(CategoryVo::from) .collect(Collectors.toList()); return new ListResult<>(list.getPage(), list.getSize(), list.getTotal(), categoryVos); }) .defaultIfEmpty(new ListResult<>(page, size, 0L, List.of())); }
@Test void list() { ListResult<Category> categories = new ListResult<>(1, 10, 3, categories().stream() .sorted(CategoryFinderImpl.defaultComparator()) .toList()); when(client.listBy(eq(Category.class), any(ListOptions.class), any(PageRequest.class))) .thenReturn(Mono.just(categories)); ListResult<CategoryVo> list = categoryFinder.list(1, 10).block(); assertThat(list.getItems()).hasSize(3); assertThat(list.get().map(categoryVo -> categoryVo.getMetadata().getName()).toList()) .isEqualTo(List.of("c3", "c2", "hello")); }
long nextRecordingId() { return nextRecordingId; }
@Test void shouldReadNextRecordingIdFromCatalogHeader() throws IOException { final long nextRecordingId = 10101010; setNextRecordingId(nextRecordingId); try (Catalog catalog = new Catalog(archiveDir, null, 0, CAPACITY, clock, null, segmentFileBuffer)) { assertEquals(nextRecordingId, catalog.nextRecordingId()); } }
public static String parsingEndpointRule(String endpointUrl) { // If entered in the configuration file, the priority in ENV will be given priority. if (endpointUrl == null || !PATTERN.matcher(endpointUrl).find()) { // skip retrieve from system property and retrieve directly from system env String endpointUrlSource = NacosClientProperties.PROTOTYPE.getProperty( PropertyKeyConst.SystemEnv.ALIBABA_ALIWARE_ENDPOINT_URL); if (StringUtils.isNotBlank(endpointUrlSource)) { endpointUrl = endpointUrlSource; } return StringUtils.isNotBlank(endpointUrl) ? endpointUrl : ""; } endpointUrl = endpointUrl.substring(endpointUrl.indexOf("${") + 2, endpointUrl.lastIndexOf("}")); int defStartOf = endpointUrl.indexOf(":"); String defaultEndpointUrl = null; if (defStartOf != -1) { defaultEndpointUrl = endpointUrl.substring(defStartOf + 1); endpointUrl = endpointUrl.substring(0, defStartOf); } String endpointUrlSource = TemplateUtils.stringBlankAndThenExecute( NacosClientProperties.PROTOTYPE.getProperty(endpointUrl), () -> NacosClientProperties.PROTOTYPE.getProperty( PropertyKeyConst.SystemEnv.ALIBABA_ALIWARE_ENDPOINT_URL)); if (StringUtils.isBlank(endpointUrlSource)) { if (StringUtils.isNotBlank(defaultEndpointUrl)) { endpointUrl = defaultEndpointUrl; } } else { endpointUrl = endpointUrlSource; } return StringUtils.isNotBlank(endpointUrl) ? endpointUrl : ""; }
@Test void testParsingEndpointRuleFromSystemWithParam() { System.setProperty(PropertyKeyConst.SystemEnv.ALIBABA_ALIWARE_ENDPOINT_URL, "alibaba_aliware_endpoint_url"); assertEquals("alibaba_aliware_endpoint_url", ParamUtil.parsingEndpointRule("${abc:xxx}")); }
public RuntimeOptionsBuilder parse(String... args) { return parse(Arrays.asList(args)); }
@Test void clobbers_line_filters_from_cli_if_tags_are_specified_in_env() { RuntimeOptions runtimeOptions = parser .parse("file:path/to.feature") .build(); RuntimeOptions options = new CucumberPropertiesParser() .parse(singletonMap(FILTER_TAGS_PROPERTY_NAME, "@should_not_be_clobbered")) .build(runtimeOptions); List<String> actual = options.getTagExpressions().stream() .map(e -> e.toString()) .collect(toList()); assertAll( () -> assertThat(actual, contains("@should_not_be_clobbered")), () -> assertThat(options.getLineFilters(), is(emptyMap())), () -> assertThat(options.getFeaturePaths(), contains(new File("path/to.feature").toURI()))); }
public static Builder forCurrentMagic(ProduceRequestData data) { return forMagic(RecordBatch.CURRENT_MAGIC_VALUE, data); }
@Test public void shouldBeFlaggedAsIdempotentWhenIdempotentRecords() { final MemoryRecords memoryRecords = MemoryRecords.withIdempotentRecords(1, Compression.NONE, 1L, (short) 1, 1, 1, simpleRecord); final ProduceRequest request = ProduceRequest.forCurrentMagic(new ProduceRequestData() .setTopicData(new ProduceRequestData.TopicProduceDataCollection(Collections.singletonList( new ProduceRequestData.TopicProduceData() .setName("topic") .setPartitionData(Collections.singletonList( new ProduceRequestData.PartitionProduceData() .setIndex(1) .setRecords(memoryRecords)))).iterator())) .setAcks((short) -1) .setTimeoutMs(10)).build(); assertTrue(RequestTestUtils.hasIdempotentRecords(request)); }
@Override public ServletStream stream() { return stream; }
@Test public void set_status() { underTest.stream().setStatus(404); verify(response).setStatus(404); }
@Override public Number getMetricValue() { long currentCount = counter.getCount(); long difference = currentCount - lastReportCount; currentReportCount = currentCount; return difference; }
@Test void testGetMetricValue() { final Counter backingCounter = new SimpleCounter(); final DCounter counter = new DCounter( backingCounter, "counter", "localhost", Collections.emptyList(), () -> 0); // sane initial state assertThat(counter.getMetricValue()).isEqualTo(0L); counter.ackReport(); assertThat(counter.getMetricValue()).isEqualTo(0L); // value is compared against initial state 0 backingCounter.inc(10); assertThat(counter.getMetricValue()).isEqualTo(10L); // last value was not acked, should still be compared against initial state 0 backingCounter.inc(10); assertThat(counter.getMetricValue()).isEqualTo(20L); // last value (20) acked, now target of comparison counter.ackReport(); assertThat(counter.getMetricValue()).isEqualTo(0L); // we now compare against the acked value backingCounter.inc(10); assertThat(counter.getMetricValue()).isEqualTo(10L); // properly handle decrements backingCounter.dec(10); assertThat(counter.getMetricValue()).isEqualTo(0L); }
@Override public String getApiUrl() { return endpoint.getApiUri(); }
@Test public void testServerGithubEnterpriseTopLevelUrl() throws Exception { // Create a server Map resp = request() .status(400) .jwtToken(token) .crumb( crumb ) .data(MapsHelper.of( "name", "My Server", "apiUrl", getApiUrl() )) .post("/organizations/jenkins/scm/github-enterprise/servers/") .build(Map.class); List errors = (List) resp.get("errors"); Assert.assertEquals(1, errors.size()); Map error1 = (Map) errors.get(0); Assert.assertEquals("apiUrl", error1.get("field")); Assert.assertEquals(GithubServerContainer.ERROR_MESSAGE_INVALID_APIURL, error1.get("message")); Assert.assertEquals("INVALID", error1.get("code")); }
@Override public Local create(final Path file) { return this.create(String.format("%s-%s", new AlphanumericRandomStringService().random(), file.getName())); }
@Test public void testCreateContainer() { final String temp = StringUtils.removeEnd(System.getProperty("java.io.tmpdir"), File.separator); final String s = System.getProperty("file.separator"); final Path file = new Path("/container", EnumSet.of(Path.Type.directory)); file.attributes().setRegion("region"); assertEquals(String.format("%s%su%s1742810335-container", temp, s, s), new FlatTemporaryFileService().create("u", file).getAbsolute()); }
static int decodeLiteral(byte tag, ByteBuf in, ByteBuf out) { in.markReaderIndex(); int length; switch(tag >> 2 & 0x3F) { case 60: if (!in.isReadable()) { return NOT_ENOUGH_INPUT; } length = in.readUnsignedByte(); break; case 61: if (in.readableBytes() < 2) { return NOT_ENOUGH_INPUT; } length = in.readUnsignedShortLE(); break; case 62: if (in.readableBytes() < 3) { return NOT_ENOUGH_INPUT; } length = in.readUnsignedMediumLE(); break; case 63: if (in.readableBytes() < 4) { return NOT_ENOUGH_INPUT; } length = in.readIntLE(); break; default: length = tag >> 2 & 0x3F; } length += 1; if (in.readableBytes() < length) { in.resetReaderIndex(); return NOT_ENOUGH_INPUT; } out.writeBytes(in, length); return length; }
@Test public void testDecodeLiteral() throws Exception { ByteBuf in = Unpooled.wrappedBuffer(new byte[] { 0x05, // preamble length 0x04 << 2, // literal tag + length 0x6e, 0x65, 0x74, 0x74, 0x79 // "netty" }); ByteBuf out = Unpooled.buffer(5); snappy.decode(in, out); // "netty" ByteBuf expected = Unpooled.wrappedBuffer(new byte[] { 0x6e, 0x65, 0x74, 0x74, 0x79 }); assertEquals(expected, out, "Literal was not decoded correctly"); in.release(); out.release(); expected.release(); }
public ScriptBuilder number(long num) { return number(chunks.size(), num); }
@Test public void testNumber() { for (int i = -100; i <= 100; i++) { Script s = new ScriptBuilder().number(i).build(); for (ScriptChunk ch : s.chunks()) { assertTrue(Integer.toString(i), ch.isShortestPossiblePushData()); } } }
@Override public boolean isDetected() { return !isEmpty(system.envVariable("FCI_BUILD_ID")); }
@Test public void isDetected_givenNoEnvVariable_dontDetectCodeMagic() { assertThat(underTest.isDetected()).isFalse(); }
@Override public void shutdown() { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void shutdown() { client().getCluster().shutdown(); }
@Override public SmileResponse<T> handle(Request request, Response response) { byte[] bytes = readResponseBytes(response); String contentType = response.getHeader(CONTENT_TYPE); if ((contentType == null) || !MediaType.parse(contentType).is(MEDIA_TYPE_SMILE)) { return new SmileResponse<>(response.getStatusCode(), response.getHeaders(), bytes); } return new SmileResponse<>(response.getStatusCode(), response.getHeaders(), smileCodec, bytes); }
@Test public void testInvalidSmileGetValue() { byte[] invalidSmileBytes = "test".getBytes(UTF_8); SmileResponse<User> response = handler.handle(null, mockResponse(OK, MEDIA_TYPE_SMILE, invalidSmileBytes)); try { response.getValue(); fail("expected exception"); } catch (IllegalStateException e) { assertEquals(e.getMessage(), "Response does not contain a SMILE value"); assertEquals(e.getCause(), response.getException()); assertEquals(response.getSmileBytes(), invalidSmileBytes); assertEquals(response.getResponseBytes(), response.getSmileBytes()); } }
@Override public boolean add(final Integer value) { return add(value.intValue()); }
@Test public void addingAnElementTwiceDoesNothing() { assertTrue(set.add(1)); assertFalse(set.add(1)); }
public String encode(String name, String value) { return encode(new DefaultCookie(name, value)); }
@Test public void testRejectCookieValueWithSemicolon() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { ClientCookieEncoder.STRICT.encode(new DefaultCookie("myCookie", "foo;bar")); } }); }
public static <K> KStreamHolder<K> build( final KStreamHolder<K> stream, final StreamSelectKey<K> selectKey, final RuntimeBuildContext buildContext ) { return build(stream, selectKey, buildContext, PartitionByParamsFactory::build); }
@Test public void shouldReturnCorrectSchema() { // When: final KStreamHolder<GenericKey> result = StreamSelectKeyBuilder .build(stream, selectKey, buildContext, paramBuilder); // Then: assertThat(result.getSchema(), is(RESULT_SCHEMA)); }
@Override public SelType call(String methodName, SelType[] args) { if (args.length == 0 && "getDays".equals(methodName)) { return SelLong.of((long) val.getDays()); } else if (args.length == 2 && "daysBetween".equals(methodName)) { return new SelJodaDateTimeDays( Days.daysBetween( ((SelJodaDateTime) args[0]).getInternalVal(), ((SelJodaDateTime) args[1]).getInternalVal())); } throw new UnsupportedOperationException( type() + " DO NOT support calling method: " + methodName + " with args: " + Arrays.toString(args)); }
@Test(expected = UnsupportedOperationException.class) public void testInvalidCallArg() { one.call("getDays", new SelType[] {SelType.NULL}); }
public static KafkaPool fromCrd( Reconciliation reconciliation, Kafka kafka, KafkaNodePool pool, NodeIdAssignment idAssignment, Storage oldStorage, OwnerReference ownerReference, SharedEnvironmentProvider sharedEnvironmentProvider ) { ModelUtils.validateComputeResources(pool.getSpec().getResources(), "KafkaNodePool.spec.resources"); StorageUtils.validatePersistentStorage(pool.getSpec().getStorage(), "KafkaNodePool.spec.storage"); KafkaPool result = new KafkaPool(reconciliation, kafka, pool, componentName(kafka, pool), ownerReference, idAssignment, sharedEnvironmentProvider); result.gcLoggingEnabled = isGcLoggingEnabled(kafka, pool); result.jvmOptions = pool.getSpec().getJvmOptions() != null ? pool.getSpec().getJvmOptions() : kafka.getSpec().getKafka().getJvmOptions(); result.resources = pool.getSpec().getResources() != null ? pool.getSpec().getResources() : kafka.getSpec().getKafka().getResources(); result.processRoles = new HashSet<>(pool.getSpec().getRoles()); if (oldStorage != null) { Storage newStorage = pool.getSpec().getStorage(); StorageDiff diff = new StorageDiff(reconciliation, oldStorage, newStorage, idAssignment.current(), idAssignment.desired()); if (diff.issuesDetected()) { LOGGER.warnCr(reconciliation, "Only the following changes to Kafka storage are allowed: " + "changing the deleteClaim flag, " + "changing the kraftMetadata flag (but only one one volume can be marked to store the KRaft metadata log at a time), " + "adding volumes to Jbod storage or removing volumes from Jbod storage, " + "each volume in Jbod storage should have an unique ID, " + "changing overrides to nodes which do not exist yet, " + "and increasing size of persistent claim volumes (depending on the volume type and used storage class)."); LOGGER.warnCr(reconciliation, "The desired Kafka storage configuration in the KafkaNodePool resource {}/{} contains changes which are not allowed. As a " + "result, all storage changes will be ignored. Use DEBUG level logging for more information " + "about the detected changes.", pool.getMetadata().getNamespace(), pool.getMetadata().getName()); Condition warning = StatusUtils.buildWarningCondition("KafkaStorage", "The desired Kafka storage configuration in the KafkaNodePool resource " + pool.getMetadata().getNamespace() + "/" + pool.getMetadata().getName() + " contains changes which are not allowed. As a " + "result, all storage changes will be ignored. Use DEBUG level logging for more information " + "about the detected changes."); result.warningConditions.add(warning); result.setStorage(oldStorage); } else { if (!VolumeUtils.kraftMetadataPath(oldStorage).equals(VolumeUtils.kraftMetadataPath(newStorage))) { // The volume for the KRaft metadata log is changing. We should log it. LOGGER.warnCr(reconciliation, "The KRaft metadata log for KafkaNodePool {}/{} will be moved from volume {} to volume {}.", pool.getMetadata().getNamespace(), pool.getMetadata().getName(), VolumeUtils.kraftMetadataPath(oldStorage), VolumeUtils.kraftMetadataPath(newStorage)); } result.setStorage(newStorage); } } else { result.setStorage(pool.getSpec().getStorage()); } // Adds the warnings about unknown or deprecated fields result.warningConditions.addAll(StatusUtils.validate(reconciliation, pool)); if (pool.getSpec().getTemplate() != null) { KafkaNodePoolTemplate template = pool.getSpec().getTemplate(); result.templatePersistentVolumeClaims = template.getPersistentVolumeClaim(); result.templatePodSet = template.getPodSet(); result.templatePod = template.getPod(); result.templatePerBrokerService = template.getPerPodService(); result.templatePerBrokerRoute = template.getPerPodRoute(); result.templatePerBrokerIngress = template.getPerPodIngress(); result.templateContainer = template.getKafkaContainer(); result.templateInitContainer = template.getInitContainer(); } else if (kafka.getSpec().getKafka().getTemplate() != null) { KafkaClusterTemplate template = kafka.getSpec().getKafka().getTemplate(); result.templatePersistentVolumeClaims = template.getPersistentVolumeClaim(); result.templatePodSet = template.getPodSet(); result.templatePod = template.getPod(); result.templatePerBrokerService = template.getPerPodService(); result.templatePerBrokerRoute = template.getPerPodRoute(); result.templatePerBrokerIngress = template.getPerPodIngress(); result.templateContainer = template.getKafkaContainer(); result.templateInitContainer = template.getInitContainer(); } return result; }
@Test public void testKafkaPoolConfigureOptionsConflict() { KafkaNodePool pool = new KafkaNodePoolBuilder(POOL) .editSpec() .withResources(new ResourceRequirementsBuilder().withRequests(Map.of("cpu", new Quantity("4"), "memory", new Quantity("16Gi"))).build()) .withNewJvmOptions() .withGcLoggingEnabled() .withXmx("4096m") .endJvmOptions() .withNewTemplate() .withNewKafkaContainer() .addToEnv(new ContainerEnvVarBuilder().withName("MY_ENV_VAR").withValue("my-env-var-value").build()) .endKafkaContainer() .endTemplate() .endSpec() .build(); Kafka kafka = new KafkaBuilder(KAFKA) .editSpec() .editKafka() .withResources(new ResourceRequirementsBuilder().withRequests(Map.of("cpu", new Quantity("6"), "memory", new Quantity("20Gi"))).build()) .withNewJvmOptions() .withGcLoggingEnabled() .withXmx("8192m") .endJvmOptions() .withNewTemplate() .withNewInitContainer() .addToEnv(new ContainerEnvVarBuilder().withName("MY_INIT_ENV_VAR").withValue("my-init-env-var-value").build()) .endInitContainer() .endTemplate() .endKafka() .endSpec() .build(); KafkaPool kp = KafkaPool.fromCrd( Reconciliation.DUMMY_RECONCILIATION, kafka, pool, new NodeIdAssignment(Set.of(10, 11, 13), Set.of(10, 11, 13), Set.of(), Set.of(), Set.of()), new JbodStorageBuilder().withVolumes(new PersistentClaimStorageBuilder().withId(0).withSize("100Gi").build()).build(), OWNER_REFERENCE, SHARED_ENV_PROVIDER ); assertThat(kp, is(notNullValue())); assertThat(kp.componentName, is(CLUSTER_NAME + "-pool")); assertThat(kp.storage, is(new JbodStorageBuilder().withVolumes(new PersistentClaimStorageBuilder().withId(0).withSize("100Gi").build()).build())); assertThat(kp.resources.getRequests(), is(Map.of("cpu", new Quantity("4"), "memory", new Quantity("16Gi")))); assertThat(kp.gcLoggingEnabled, is(true)); assertThat(kp.jvmOptions.getXmx(), is("4096m")); assertThat(kp.templateContainer.getEnv(), is(List.of(new ContainerEnvVarBuilder().withName("MY_ENV_VAR").withValue("my-env-var-value").build()))); assertThat(kp.templateInitContainer, is(nullValue())); assertThat(kp.templatePod, is(nullValue())); assertThat(kp.templatePerBrokerIngress, is(nullValue())); assertThat(kp.templatePodSet, is(nullValue())); assertThat(kp.templatePerBrokerRoute, is(nullValue())); assertThat(kp.templatePerBrokerService, is(nullValue())); assertThat(kp.templatePersistentVolumeClaims, is(nullValue())); }
@Override public FTPFile parseFTPEntry(String entry) { if(matches(entry)) { String typeStr = group(1); String usr = group(15); String grp = group(16); String filesize = group(17); String datestr = group(18) + " " + group(19); String name = group(20); String endtoken = group(21); return super.parseFTPEntry(typeStr, usr, grp, filesize, datestr, name, endtoken); } return null; }
@Test public void testParse() { FTPFile parsed; //#1213 parsed = parser.parseFTPEntry( "-rw-r--r-- FTP User 10439 Apr 20 05:29 ASCheckbox_2_0.zip" ); assertNotNull(parsed); assertEquals("ASCheckbox_2_0.zip", parsed.getName()); assertEquals(FTPFile.FILE_TYPE, parsed.getType()); assertEquals(10439, parsed.getSize()); assertEquals(Calendar.APRIL, parsed.getTimestamp().get(Calendar.MONTH)); assertEquals(20, parsed.getTimestamp().get(Calendar.DAY_OF_MONTH)); assertTrue(parsed.hasPermission(FTPFile.USER_ACCESS, FTPFile.READ_PERMISSION)); assertTrue(parsed.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.READ_PERMISSION)); assertTrue(parsed.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.READ_PERMISSION)); assertTrue(parsed.hasPermission(FTPFile.USER_ACCESS, FTPFile.WRITE_PERMISSION)); assertFalse(parsed.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.WRITE_PERMISSION)); assertFalse(parsed.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.WRITE_PERMISSION)); assertFalse(parsed.hasPermission(FTPFile.USER_ACCESS, FTPFile.EXECUTE_PERMISSION)); assertFalse(parsed.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.EXECUTE_PERMISSION)); assertFalse(parsed.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.EXECUTE_PERMISSION)); }
@Bean("MigrationSteps") public MigrationSteps provide(InternalMigrationStepRegistry migrationStepRegistry, DbVersion... dbVersions) { Arrays.stream(dbVersions).forEach(dbVersion -> dbVersion.addSteps(migrationStepRegistry)); return migrationStepRegistry.build(); }
@Test public void provide_calls_DbVersion_addStep_in_order() { DbVersion dbVersion1 = newMockFailingOnSecondBuildCall(); DbVersion dbVersion2 = newMockFailingOnSecondBuildCall(); DbVersion dbVersion3 = newMockFailingOnSecondBuildCall(); InOrder inOrder = inOrder(dbVersion1, dbVersion2, dbVersion3); MigrationSteps expected = mock(MigrationSteps.class); when(internalMigrationStepRegistry.build()).thenReturn(expected); assertThat(underTest.provide(internalMigrationStepRegistry, dbVersion1, dbVersion2, dbVersion3)) .isSameAs(expected); inOrder.verify(dbVersion1).addSteps(internalMigrationStepRegistry); inOrder.verify(dbVersion2).addSteps(internalMigrationStepRegistry); inOrder.verify(dbVersion3).addSteps(internalMigrationStepRegistry); inOrder.verifyNoMoreInteractions(); }
@Override public void onProcessingTime(long time) throws IOException { for (FileWriterBucket<IN> bucket : activeBuckets.values()) { bucket.onProcessingTime(time); } registerNextBucketInspectionTimer(); }
@Test void testOnProcessingTime(@TempDir java.nio.file.Path tempDir) throws Exception { Path path = new Path(tempDir.toUri()); // Create the processing timer service starts from 10. ManuallyTriggeredProcessingTimeService processingTimeService = new ManuallyTriggeredProcessingTimeService(); processingTimeService.advanceTo(10); FileWriter<String> fileWriter = createWriter( path, new FileSinkTestUtils.StringIdentityBucketAssigner(), DefaultRollingPolicy.builder() .withRolloverInterval(Duration.ofMillis(10)) .build(), new OutputFileConfig("part-", ""), processingTimeService, 5); fileWriter.initializeState(Collections.emptyList()); // Test timer registered timer@15 on startup fileWriter.write("test1", new ContextImpl()); processingTimeService.advanceTo(15); fileWriter.write("test2", new ContextImpl()); processingTimeService.advanceTo(20); FileWriterBucket<String> test1Bucket = fileWriter.getActiveBuckets().get("test1"); assertThat(test1Bucket.getInProgressPart()) .as("The in-progress part of test1 should be rolled") .isNull(); assertThat(test1Bucket.getPendingFiles().size()).isEqualTo(1); FileWriterBucket<String> test2Bucket = fileWriter.getActiveBuckets().get("test2"); assertThat(test2Bucket.getInProgressPart()) .as("The in-progress part of test2 should not be rolled") .isNotNull(); assertThat(test2Bucket.getPendingFiles().size()).isEqualTo(0); // Close, pre-commit & clear all the pending records. processingTimeService.advanceTo(30); fileWriter.prepareCommit(); // Test timer re-registration. fileWriter.write("test1", new ContextImpl()); processingTimeService.advanceTo(35); fileWriter.write("test2", new ContextImpl()); processingTimeService.advanceTo(40); test1Bucket = fileWriter.getActiveBuckets().get("test1"); assertThat(test1Bucket.getInProgressPart()) .as("The in-progress part of test1 should be rolled") .isNull(); assertThat(test1Bucket.getPendingFiles().size()).isEqualTo(1); test2Bucket = fileWriter.getActiveBuckets().get("test2"); assertThat(test2Bucket.getInProgressPart()) .as("The in-progress part of test2 should not be rolled") .isNotNull(); assertThat(test2Bucket.getPendingFiles().size()).isEqualTo(0); }
public static URI buildExternalUri(@NotNull MultivaluedMap<String, String> httpHeaders, @NotNull URI defaultUri) { Optional<URI> externalUri = Optional.empty(); final List<String> headers = httpHeaders.get(HttpConfiguration.OVERRIDE_HEADER); if (headers != null && !headers.isEmpty()) { externalUri = headers.stream() .filter(s -> { try { if (Strings.isNullOrEmpty(s)) { return false; } final URI uri = new URI(s); if (!uri.isAbsolute()) { return true; } switch (uri.getScheme()) { case "http": case "https": return true; } return false; } catch (URISyntaxException e) { return false; } }) .map(URI::create) .findFirst(); } final URI uri = externalUri.orElse(defaultUri); // Make sure we return an URI object with a trailing slash if (!uri.toString().endsWith("/")) { return URI.create(uri.toString() + "/"); } return uri; }
@Test public void buildEndpointUriEnsuresTrailingSlash() { final MultivaluedMap<String, String> httpHeaders = new MultivaluedHashMap<>(); final URI endpointUri = URI.create("http://graylog.example.com"); final URI endpointUri2 = URI.create("http://graylog.example.com/"); assertThat(RestTools.buildExternalUri(httpHeaders, endpointUri)).isEqualTo(URI.create("http://graylog.example.com/")); assertThat(RestTools.buildExternalUri(httpHeaders, endpointUri2)).isEqualTo(URI.create("http://graylog.example.com/")); httpHeaders.putSingle(HttpConfiguration.OVERRIDE_HEADER, "http://header.example.com"); assertThat(RestTools.buildExternalUri(httpHeaders, endpointUri)).isEqualTo(URI.create("http://header.example.com/")); httpHeaders.putSingle(HttpConfiguration.OVERRIDE_HEADER, "http://header.example.com/"); assertThat(RestTools.buildExternalUri(httpHeaders, endpointUri)).isEqualTo(URI.create("http://header.example.com/")); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() != 1) { onInvalidDataReceived(device, data); return; } final int type = data.getIntValue(Data.FORMAT_UINT8, 0); onTemperatureTypeReceived(device, type); }
@Test public void onMeasurementIntervalReceived() { final ProfileReadResponse response = new TemperatureTypeDataCallback() { @Override public void onTemperatureTypeReceived(@NonNull final BluetoothDevice device, final int type) { called = true; assertEquals("Temperature Type", HealthThermometerTypes.TYPE_EAR, type); } }; called = false; final Data data = new Data(new byte[] { 3 }); response.onDataReceived(null, data); assertTrue(response.isValid()); assertTrue(called); }
public static ClusterAllocationDiskSettings create(boolean enabled, String low, String high, String floodStage) { if (!enabled) { return ClusterAllocationDiskSettings.create(enabled, null); } return ClusterAllocationDiskSettings.create(enabled, createWatermarkSettings(low, high, floodStage)); }
@Test public void createWithoutSettingsWhenThresholdDisabled() throws Exception { ClusterAllocationDiskSettings settings = ClusterAllocationDiskSettingsFactory.create(false, "", "", ""); assertThat(settings).isInstanceOf(ClusterAllocationDiskSettings.class); assertThat(settings.ThresholdEnabled()).isFalse(); }
protected void copyAndClose( InputStream inputStream, OutputStream outputStream ) throws IOException { IOUtils.copy( inputStream, outputStream ); outputStream.flush(); IOUtils.closeQuietly( outputStream ); }
@Test public void testCopyAndClose() throws Exception { try ( MockedStatic<IOUtils> ioUtilsMockedStatic1 = mockStatic( IOUtils.class ) ) { int bytesCopied = 10; ioUtilsMockedStatic1.when( () -> IOUtils.copy( any( InputStream.class ), any( OutputStream.class ) ) ).thenReturn( bytesCopied ); // Variables InputStream inputStream = Mockito.mock( InputStream.class ); OutputStream outputStream = Mockito.mock( OutputStream.class ); // EXECUTE servlet.copyAndClose( inputStream, outputStream ); ioUtilsMockedStatic1.verify( () -> IOUtils.copy( eq( inputStream ), eq( outputStream ) ) ); ioUtilsMockedStatic1.verify( () -> IOUtils.closeQuietly( eq( outputStream ) ) ); } }
@SuppressWarnings("unchecked") @Override public MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues( MoveApplicationAcrossQueuesRequest request) throws YarnException { ApplicationId applicationId = request.getApplicationId(); UserGroupInformation callerUGI = getCallerUgi(applicationId, AuditConstants.MOVE_APP_REQUEST); RMApp application = verifyUserAccessForRMApp(applicationId, callerUGI, AuditConstants.MOVE_APP_REQUEST, ApplicationAccessType.MODIFY_APP, true); String targetQueue = request.getTargetQueue(); if (!accessToTargetQueueAllowed(callerUGI, application, targetQueue)) { RMAuditLogger.logFailure(callerUGI.getShortUserName(), AuditConstants.MOVE_APP_REQUEST, "Target queue doesn't exist or user" + " doesn't have permissions to submit to target queue: " + targetQueue, "ClientRMService", AuditConstants.UNAUTHORIZED_USER, applicationId); throw RPCUtil.getRemoteException(new AccessControlException("User " + callerUGI.getShortUserName() + " cannot submit applications to" + " target queue or the target queue doesn't exist: " + targetQueue + " while moving " + applicationId)); } // Moves only allowed when app is in a state that means it is tracked by // the scheduler. Introducing SUBMITTED state also to this list as there // could be a corner scenario that app may not be in Scheduler in SUBMITTED // state. if (!ACTIVE_APP_STATES.contains(application.getState())) { String msg = "App in " + application.getState() + " state cannot be moved."; RMAuditLogger.logFailure(callerUGI.getShortUserName(), AuditConstants.MOVE_APP_REQUEST, "UNKNOWN", "ClientRMService", msg); throw new YarnException(msg); } try { this.rmAppManager.moveApplicationAcrossQueue( application.getApplicationId(), request.getTargetQueue()); } catch (YarnException ex) { RMAuditLogger.logFailure(callerUGI.getShortUserName(), AuditConstants.MOVE_APP_REQUEST, "UNKNOWN", "ClientRMService", ex.getMessage()); throw ex; } RMAuditLogger.logSuccess(callerUGI.getShortUserName(), AuditConstants.MOVE_APP_REQUEST, "ClientRMService" , applicationId); return recordFactory .newRecordInstance(MoveApplicationAcrossQueuesResponse.class); }
@Test public void testMoveApplicationAdminTargetQueue() throws Exception { ApplicationId applicationId = getApplicationId(1); UserGroupInformation aclUGI = UserGroupInformation.getCurrentUser(); QueueACLsManager queueAclsManager = getQueueAclManager("allowed_queue", QueueACL.ADMINISTER_QUEUE, aclUGI); ApplicationACLsManager appAclsManager = getAppAclManager(); ClientRMService rmService = createClientRMServiceForMoveApplicationRequest(applicationId, aclUGI.getShortUserName(), appAclsManager, queueAclsManager); // user is admin move to queue in acl MoveApplicationAcrossQueuesRequest moveAppRequest = MoveApplicationAcrossQueuesRequest.newInstance(applicationId, "allowed_queue"); rmService.moveApplicationAcrossQueues(moveAppRequest); // user is admin move to queue not in acl moveAppRequest = MoveApplicationAcrossQueuesRequest.newInstance( applicationId, "not_allowed"); try { rmService.moveApplicationAcrossQueues(moveAppRequest); Assert.fail("The request should fail with an AccessControlException"); } catch (YarnException rex) { Assert.assertTrue("AccessControlException is expected", rex.getCause() instanceof AccessControlException); } // ACL is owned by "moveuser", move is performed as a different user aclUGI = UserGroupInformation.createUserForTesting("moveuser", new String[]{}); queueAclsManager = getQueueAclManager("move_queue", QueueACL.ADMINISTER_QUEUE, aclUGI); appAclsManager = getAppAclManager(); ClientRMService rmService2 = createClientRMServiceForMoveApplicationRequest(applicationId, aclUGI.getShortUserName(), appAclsManager, queueAclsManager); // no access to this queue MoveApplicationAcrossQueuesRequest moveAppRequest2 = MoveApplicationAcrossQueuesRequest. newInstance(applicationId, "move_queue"); try { rmService2.moveApplicationAcrossQueues(moveAppRequest2); Assert.fail("The request should fail with an AccessControlException"); } catch (YarnException rex) { Assert.assertTrue("AccessControlException is expected", rex.getCause() instanceof AccessControlException); } // execute the move as the acl owner // access to the queue OK: user allowed in this queue aclUGI.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { return rmService2.moveApplicationAcrossQueues(moveAppRequest2); } }); }
public int login(final String username, final String password) throws SQLException { var sql = "select count(*) from USERS where username=? and password=?"; ResultSet resultSet = null; try (var connection = dataSource.getConnection(); var preparedStatement = connection.prepareStatement(sql) ) { var result = 0; preparedStatement.setString(1, username); preparedStatement.setString(2, password); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { result = resultSet.getInt(1); } if (result == 1) { LOGGER.info("Login successfully!"); } else { LOGGER.info("Fail to login!"); } return result; } finally { if (resultSet != null) { resultSet.close(); } } }
@Test void loginShouldFail() throws SQLException { var dataSource = createDataSource(); var userTableModule = new UserTableModule(dataSource); var user = new User(1, "123456", "123456"); assertEquals(0, userTableModule.login(user.getUsername(), user.getPassword())); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } if(containerService.isContainer(file)) { final PathAttributes attributes = new PathAttributes(); if(log.isDebugEnabled()) { log.debug(String.format("Read location for bucket %s", file)); } attributes.setRegion(new S3LocationFeature(session, session.getClient().getRegionEndpointCache()).getLocation(file).getIdentifier()); return attributes; } if(file.getType().contains(Path.Type.upload)) { final Write.Append append = new S3MultipartUploadService(session, new S3WriteFeature(session, acl), acl).append(file, new TransferStatus()); if(append.append) { return new PathAttributes().withSize(append.offset); } throw new NotfoundException(file.getAbsolute()); } try { PathAttributes attr; final Path bucket = containerService.getContainer(file); try { attr = new S3AttributesAdapter(session.getHost()).toAttributes(session.getClient().getVersionedObjectDetails( file.attributes().getVersionId(), bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file))); } catch(ServiceException e) { switch(e.getResponseCode()) { case 405: if(log.isDebugEnabled()) { log.debug(String.format("Mark file %s as delete marker", file)); } // Only DELETE method is allowed for delete markers attr = new PathAttributes(); attr.setCustom(Collections.singletonMap(KEY_DELETE_MARKER, Boolean.TRUE.toString())); attr.setDuplicate(true); return attr; } throw new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file); } if(StringUtils.isNotBlank(attr.getVersionId())) { if(log.isDebugEnabled()) { log.debug(String.format("Determine if %s is latest version for %s", attr.getVersionId(), file)); } // Determine if latest version try { final String latest = new S3AttributesAdapter(session.getHost()).toAttributes(session.getClient().getObjectDetails( bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file))).getVersionId(); if(null != latest) { if(log.isDebugEnabled()) { log.debug(String.format("Found later version %s for %s", latest, file)); } // Duplicate if not latest version attr.setDuplicate(!latest.equals(attr.getVersionId())); } } catch(ServiceException e) { final BackgroundException failure = new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file); if(failure instanceof NotfoundException) { attr.setDuplicate(true); } else { throw failure; } } } return attr; } catch(NotfoundException e) { if(file.isDirectory()) { if(log.isDebugEnabled()) { log.debug(String.format("Search for common prefix %s", file)); } // File may be marked as placeholder but no placeholder file exists. Check for common prefix returned. try { new S3ObjectListService(session, acl).list(file, new CancellingListProgressListener(), String.valueOf(Path.DELIMITER), 1); } catch(ListCanceledException l) { // Found common prefix return PathAttributes.EMPTY; } catch(NotfoundException n) { throw e; } // Found common prefix return PathAttributes.EMPTY; } throw e; } }
@Test public void testDeletedWithMarker() throws Exception { final Path bucket = new Path("versioning-test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final Path test = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertNotNull(test.attributes().getVersionId()); assertNotEquals(PathAttributes.EMPTY, new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(test)); // Add delete marker new S3DefaultDeleteFeature(session).delete(Collections.singletonList(new Path(test).withAttributes(PathAttributes.EMPTY)), new DisabledPasswordCallback(), new Delete.DisabledCallback()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(new Path(test))); assertFalse(new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(test).getCustom().containsKey(KEY_DELETE_MARKER)); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(new Path(test).withAttributes(PathAttributes.EMPTY))); // Test reading delete marker itself final Path marker = new S3VersionedObjectListService(session, new S3AccessControlListFeature(session)).list(bucket, new DisabledListProgressListener()).find(new SimplePathPredicate(test)); assertTrue(marker.attributes().isDuplicate()); assertTrue(marker.attributes().getCustom().containsKey(KEY_DELETE_MARKER)); assertTrue(new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(marker).getCustom().containsKey(KEY_DELETE_MARKER)); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(marker)); }
@Override public boolean containsValue(final Object value) { return containsValue(((Long)value).longValue()); }
@Test public void shouldNotContainValueForAMissingEntry() { assertFalse(map.containsValue(1L)); }
@PostMapping("") public ShenyuAdminResult createRule(@Valid @RequestBody final RuleDTO ruleDTO) { Integer createCount = ruleService.createOrUpdate(ruleDTO); return ShenyuAdminResult.success(ShenyuResultMessage.CREATE_SUCCESS, createCount); }
@Test public void testCreateRule() throws Exception { RuleConditionDTO ruleConditionDTO = RuleConditionDTO.builder() .id("888") .ruleId("666") .paramType("uri") .operator("match") .paramName("/") .paramValue("test") .paramType("/http/order/save") .build(); List<RuleConditionDTO> conList = new ArrayList<>(); conList.add(ruleConditionDTO); RuleDTO ruleDTO = RuleDTO.builder() .id("666") .selectorId("168") .matchMode(0) .name("/http/order/save") .enabled(true) .loged(true) .matchRestful(false) .sort(1) .handle("{\"loadBalance\":\"random\",\"retry\":0,\"timeout\":3000}") .ruleConditions(conList) .build(); SpringBeanUtils.getInstance().setApplicationContext(mock(ConfigurableApplicationContext.class)); when(SpringBeanUtils.getInstance().getBean(RuleMapper.class)).thenReturn(ruleMapper); when(ruleMapper.existed(ruleDTO.getId())).thenReturn(true); when(SpringBeanUtils.getInstance().getBean(SelectorMapper.class)).thenReturn(selectorMapper); when(selectorMapper.existed(ruleDTO.getSelectorId())).thenReturn(true); given(this.ruleService.createOrUpdate(ruleDTO)).willReturn(1); this.mockMvc.perform(MockMvcRequestBuilders.post("/rule", ruleDTO) .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(ruleDTO)) ) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.CREATE_SUCCESS))) .andReturn(); }
@Override public AppResponse process(Flow flow, MijnDigidSessionRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException, SharedServiceClientException { appSession = appSessionService.getSession(request.getMijnDigidSessionId()); appAuthenticator = appAuthenticatorService.findByUserAppId(appSession.getUserAppId()); checkSwitchesEnabled(); digidClient.remoteLog("1468", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId(), lowerUnderscore(DEVICE_NAME), appAuthenticator.getDeviceName(), lowerUnderscore(HUMAN_PROCESS), "get_notifications", lowerUnderscore(APP_CODE), appAuthenticator.getAppCode())); if (!isAppSessionAuthenticated(appSession) || !isAppAuthenticatorActivated(appAuthenticator)){ return new NokResponse("no_session"); } return nsClient.getNotifications(appAuthenticator.getAccountId()); }
@Test public void appSwitchDisabledTest() throws FlowNotDefinedException, SharedServiceClientException, IOException, NoSuchAlgorithmException { //given when(appSessionService.getSession(any())).thenReturn(mockedAppSession); when(appAuthenticatorService.findByUserAppId(any())).thenReturn(mockedAppAuthenticator); when(switchService.digidAppSwitchEnabled()).thenReturn(false); assertThrows(SwitchDisabledException.class, () -> notificationsGet.process(mockedFlow, mockedRequest)); verify(digidClientMock, times(1)).remoteLog("1418", Map.of(lowerUnderscore(ACCOUNT_ID), mockedAppSession.getAccountId(), lowerUnderscore(HIDDEN), true, lowerUnderscore(HUMAN_PROCESS), "get_notifications")); }
@Override // SELECT 场景 public void beforeQuery(Executor executor, MappedStatement ms, Object parameter, RowBounds rowBounds, ResultHandler resultHandler, BoundSql boundSql) { // 获得 Mapper 对应的数据权限的规则 List<DataPermissionRule> rules = ruleFactory.getDataPermissionRule(ms.getId()); if (mappedStatementCache.noRewritable(ms, rules)) { // 如果无需重写,则跳过 return; } PluginUtils.MPBoundSql mpBs = PluginUtils.mpBoundSql(boundSql); try { // 初始化上下文 ContextHolder.init(rules); // 处理 SQL mpBs.sql(parserSingle(mpBs.sql(), null)); } finally { // 添加是否需要重写的缓存 addMappedStatementCache(ms); // 清空上下文 ContextHolder.clear(); } }
@Test // 不存在规则,且不匹配 public void testBeforeQuery_withoutRule() { try (MockedStatic<PluginUtils> pluginUtilsMock = mockStatic(PluginUtils.class)) { // 准备参数 MappedStatement mappedStatement = mock(MappedStatement.class); BoundSql boundSql = mock(BoundSql.class); // 调用 interceptor.beforeQuery(null, mappedStatement, null, null, null, boundSql); // 断言 pluginUtilsMock.verify(() -> PluginUtils.mpBoundSql(boundSql), never()); } }
public ArtifactResolveRequest startArtifactResolveProcess(HttpServletRequest httpServletRequest) throws SamlParseException { try { final var artifactResolveRequest = validateRequest(httpServletRequest); final var samlSession = updateArtifactResolveRequestWithSamlSession(artifactResolveRequest); validateArtifactResolve(artifactResolveRequest); dcMetadataService.resolveDcMetadata(artifactResolveRequest); signatureService.validateSamlRequest(artifactResolveRequest, artifactResolveRequest.getArtifactResolve().getSignature()); createAdAuthentication(samlSession, artifactResolveRequest); samlSessionService.updateSamlSession(artifactResolveRequest); return artifactResolveRequest; } catch (MessageDecodingException e) { throw new SamlParseException("ArtifactResolveRequest soap11 decode exception", e); } catch (ComponentInitializationException e) { throw new SamlParseException("ArtifactResolveRequest initialization exception", e); } catch (SamlSessionException e) { throw new SamlParseException("Failed to load saml session", e); } catch (AdException e) { throw new SamlParseException("Failed to create an authentication", e); } catch (DienstencatalogusException e) { throw new SamlParseException("Failed to retrieve metadata from DienstenCatalogus", e); } catch (SamlValidationException e) { throw new SamlParseException("ArtifactResolve not valid", e); } catch (ValidationException e) { throw new SamlParseException("Failed to validate", e); } catch (SharedServiceClientException e) { throw new SamlParseException("Failed to retrieve data from sharedServiceClient.getSSConfigLong", e); } }
@Test void parseArtifactResolveWithWrongConnectionEntityId() throws SamlSessionException { samlSession.setConnectionEntityId("wrongConnectionEntityId"); when(samlSessionServiceMock.loadSession(anyString())).thenReturn(samlSession); SamlParseException exception = assertThrows(SamlParseException.class, () -> artifactResolveService.startArtifactResolveProcess(prepareSoapRequest(artifactResolveValid)) ); assertEquals("ArtifactResolve not valid", exception.getMessage()); }
public static String validColumnName(String identifier) { if (identifier.isEmpty() || identifier.equals(EMPTY_COLUMN_NAME)) { return "\"\""; } return validIdentifier(identifier); }
@Test public void testValidColumnName() { assertEquals("foo", validColumnName("foo")); assertEquals("\"\"", validColumnName(CassandraCqlUtils.EMPTY_COLUMN_NAME)); assertEquals("\"\"", validColumnName("")); assertEquals("\"select\"", validColumnName("select")); }
@Udf public String ucase( @UdfParameter(description = "The string to upper-case") final String input) { if (input == null) { return null; } return input.toUpperCase(); }
@Test public void shouldConvertToUpperCase() { final String result = udf.ucase("FoO bAr"); assertThat(result, is("FOO BAR")); }
public static <K, E, V> Collector<E, ImmutableSetMultimap.Builder<K, V>, ImmutableSetMultimap<K, V>> unorderedFlattenIndex( Function<? super E, K> keyFunction, Function<? super E, Stream<V>> valueFunction) { verifyKeyAndValueFunctions(keyFunction, valueFunction); BiConsumer<ImmutableSetMultimap.Builder<K, V>, E> accumulator = (map, element) -> { K key = requireNonNull(keyFunction.apply(element), KEY_FUNCTION_CANT_RETURN_NULL_MESSAGE); Stream<V> valueStream = requireNonNull(valueFunction.apply(element), VALUE_FUNCTION_CANT_RETURN_NULL_MESSAGE); valueStream.forEach(value -> map.put(key, value)); }; BinaryOperator<ImmutableSetMultimap.Builder<K, V>> merger = (m1, m2) -> { for (Map.Entry<K, V> entry : m2.build().entries()) { m1.put(entry.getKey(), entry.getValue()); } return m1; }; return Collector.of( ImmutableSetMultimap::builder, accumulator, merger, ImmutableSetMultimap.Builder::build); }
@Test public void unorderedFlattenIndex_with_valueFunction_fails_if_value_function_returns_null() { assertThatThrownBy(() -> SINGLE_ELEMENT2_LIST.stream().collect(unorderedFlattenIndex(MyObj2::getId, s -> null))) .isInstanceOf(NullPointerException.class) .hasMessage("Value function can't return null"); }
public static LogCollector<ShenyuRequestLog> getInstance() { return INSTANCE; }
@Test public void testAbstractLogCollector() throws Exception { PulsarLogCollector.getInstance().start(); Field field1 = AbstractLogCollector.class.getDeclaredField("started"); field1.setAccessible(true); Assertions.assertEquals(field1.get(PulsarLogCollector.getInstance()).toString(), "true"); PulsarLogCollector.getInstance().collect(shenyuRequestLog); PulsarLogCollector.getInstance().close(); Field field2 = AbstractLogCollector.class.getDeclaredField("started"); field2.setAccessible(true); Assertions.assertEquals(field2.get(PulsarLogCollector.getInstance()).toString(), "false"); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) { List<Map<String, Object>> nodeTags = readerWay.getTag("node_tags", null); if (nodeTags == null) return; for (int i = 0; i < nodeTags.size(); i++) { Map<String, Object> tags = nodeTags.get(i); if ("crossing".equals(tags.get("railway")) || "level_crossing".equals(tags.get("railway"))) { String barrierVal = (String) tags.get("crossing:barrier"); crossingEnc.setEnum(false, edgeId, edgeIntAccess, (Helper.isEmpty(barrierVal) || "no".equals(barrierVal)) ? Crossing.RAILWAY : Crossing.RAILWAY_BARRIER); return; } String crossingSignals = (String) tags.get("crossing:signals"); if ("yes".equals(crossingSignals)) { crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.TRAFFIC_SIGNALS); return; } String crossingMarkings = (String) tags.get("crossing:markings"); if ("yes".equals(crossingMarkings)) { crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.MARKED); return; } String crossingValue = (String) tags.get("crossing"); // some crossing values like "no" do not require highway=crossing and sometimes no crossing value exists although highway=crossing if (Helper.isEmpty(crossingValue) && ("no".equals(crossingSignals) || "no".equals(crossingMarkings) || "crossing".equals(tags.get("highway")) || "crossing".equals(tags.get("footway")) || "crossing".equals(tags.get("cycleway")))) { crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.UNMARKED); // next node could have more specific Crossing value continue; } Crossing crossing = Crossing.find(crossingValue); if (crossing != Crossing.MISSING) crossingEnc.setEnum(false, edgeId, edgeIntAccess, crossing); } }
@Test public void testMarked() { EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; parser.handleWayTags(edgeId, edgeIntAccess, createReader(new HashMap<>()), null); assertEquals(Crossing.MISSING, crossingEV.getEnum(false, edgeId, edgeIntAccess)); parser.handleWayTags(edgeId, edgeIntAccess = new ArrayEdgeIntAccess(1), createReader(new PMap().putObject("highway", "crossing").toMap()), null); assertEquals(Crossing.UNMARKED, crossingEV.getEnum(false, edgeId, edgeIntAccess)); parser.handleWayTags(edgeId, edgeIntAccess = new ArrayEdgeIntAccess(1), createReader(new PMap().putObject("crossing", "marked").toMap()), null); assertEquals(Crossing.MARKED, crossingEV.getEnum(false, edgeId, edgeIntAccess)); parser.handleWayTags(edgeId, edgeIntAccess = new ArrayEdgeIntAccess(1), createReader(new PMap().putObject("crossing:markings", "yes").toMap()), null); assertEquals(Crossing.MARKED, crossingEV.getEnum(false, edgeId, edgeIntAccess)); parser.handleWayTags(edgeId, edgeIntAccess = new ArrayEdgeIntAccess(1), createReader(new PMap().putObject("crossing:markings", "no").toMap()), null); assertEquals(Crossing.UNMARKED, crossingEV.getEnum(false, edgeId, edgeIntAccess)); parser.handleWayTags(edgeId, edgeIntAccess = new ArrayEdgeIntAccess(1), createReader(new PMap().putObject("crossing:signals", "no").putObject("crossing:markings", "yes").toMap()), null); assertEquals(Crossing.MARKED, crossingEV.getEnum(false, edgeId, edgeIntAccess)); }
@Override public Iterator<Object> iterateObjects() { return new CompositeObjectIterator(concreteStores, true); }
@Test public void iterateObjectsReturnsObjectsOfAllTypes() throws Exception { String aStringValue = "a string"; BigDecimal bigDecimalValue = new BigDecimal("1"); insertObjectWithFactHandle(aStringValue); insertObjectWithFactHandle(bigDecimalValue); Collection<Object> result = collect(underTest.iterateObjects()); assertThat(result).hasSize(2); }
@Override public boolean match(Message msg, StreamRule rule) { Double msgVal = getDouble(msg.getField(rule.getField())); if (msgVal == null) { return false; } Double ruleVal = getDouble(rule.getValue()); if (ruleVal == null) { return false; } return rule.getInverted() ^ (msgVal < ruleVal); }
@Test public void testMissedDoubleMatch() { StreamRule rule = getSampleRule(); rule.setValue("25"); Message msg = getSampleMessage(); msg.addField("something", "27.45"); StreamRuleMatcher matcher = getMatcher(rule); assertFalse(matcher.match(msg, rule)); }
public static ResourceBundleUtil getInstance() { return INSTANCE; }
@Test void getInstance() { Assertions.assertNotNull(ResourceBundleUtil.getInstance()); }
public void schedule(final ScheduledHealthCheck check, final boolean healthy) { unschedule(check.getName()); final Duration interval; if (healthy) { interval = check.getSchedule().getCheckInterval(); } else { interval = check.getSchedule().getDowntimeInterval(); } schedule(check, interval, interval); }
@Test void shouldRescheduleCheckForHealthyDependency() { final String name = "test"; final Schedule schedule = new Schedule(); final ScheduledFuture future = mock(ScheduledFuture.class); when(future.cancel(true)).thenReturn(true); final ScheduledHealthCheck check = mock(ScheduledHealthCheck.class); when(check.getName()).thenReturn(name); when(check.getSchedule()).thenReturn(schedule); when(executor.scheduleWithFixedDelay( eq(check), or(eq(schedule.getCheckInterval().toMilliseconds()), eq(schedule.getDowntimeInterval().toMilliseconds())), or(eq(schedule.getCheckInterval().toMilliseconds()), eq(schedule.getDowntimeInterval().toMilliseconds())), eq(TimeUnit.MILLISECONDS)) ) .thenReturn(future); scheduler.schedule(check, false); scheduler.schedule(check, true); verify(executor, times(2)).scheduleWithFixedDelay( eq(check), or(eq(schedule.getCheckInterval().toMilliseconds()), eq(schedule.getDowntimeInterval().toMilliseconds())), or(eq(schedule.getCheckInterval().toMilliseconds()), eq(schedule.getDowntimeInterval().toMilliseconds())), eq(TimeUnit.MILLISECONDS)); verify(future).cancel(true); }
public MessageListener messageListener(MessageListener messageListener, boolean addConsumerSpan) { if (messageListener instanceof TracingMessageListener) return messageListener; return new TracingMessageListener(messageListener, this, addConsumerSpan); }
@Test void messageListener_traces() { jmsTracing.messageListener(mock(MessageListener.class), false) .onMessage(message); assertThat(testSpanHandler.takeLocalSpan().name()).isEqualTo("on-message"); }
void shutdown(@Observes ShutdownEvent event) { if (jobRunrBuildTimeConfiguration.backgroundJobServer().enabled()) { backgroundJobServerInstance.get().stop(); } if (jobRunrBuildTimeConfiguration.dashboard().enabled()) { dashboardWebServerInstance.get().stop(); } storageProviderInstance.get().close(); }
@Test void jobRunrStarterDoesNotStopBackgroundJobServerIfNotConfigured() { when(backgroundJobServerConfiguration.enabled()).thenReturn(false); jobRunrStarter.shutdown(new ShutdownEvent()); verify(backgroundJobServerInstance, never()).get(); }
public int poll(final FragmentHandler fragmentHandler, final int fragmentLimit) { if (isClosed) { return 0; } final long position = subscriberPosition.get(); return TermReader.read( activeTermBuffer(position), (int)position & termLengthMask, fragmentHandler, fragmentLimit, header, errorHandler, position, subscriberPosition); }
@Test void shouldReportCorrectPositionOnReception() { final long initialPosition = computePosition(INITIAL_TERM_ID, 0, POSITION_BITS_TO_SHIFT, INITIAL_TERM_ID); position.setOrdered(initialPosition); final Image image = createImage(); insertDataFrame(INITIAL_TERM_ID, offsetForFrame(0)); final int messages = image.poll(mockFragmentHandler, Integer.MAX_VALUE); assertThat(messages, is(1)); verify(mockFragmentHandler).onFragment( any(UnsafeBuffer.class), eq(HEADER_LENGTH), eq(DATA.length), any(Header.class)); final InOrder inOrder = Mockito.inOrder(position); inOrder.verify(position).setOrdered(initialPosition); inOrder.verify(position).setOrdered(initialPosition + ALIGNED_FRAME_LENGTH); }
@ConstantFunction(name = "bitShiftRightLogical", argTypes = {SMALLINT, BIGINT}, returnType = SMALLINT) public static ConstantOperator bitShiftRightLogicalSmallInt(ConstantOperator first, ConstantOperator second) { short s = first.getSmallint(); int i = s >= 0 ? s : (((int) s) + 65536); return ConstantOperator.createSmallInt((short) (i >>> second.getBigint())); }
@Test public void bitShiftRightLogicalSmallInt() { assertEquals(1, ScalarOperatorFunctions.bitShiftRightLogicalSmallInt(O_SI_10, O_BI_3).getSmallint()); }
@Override public TGetDictQueryParamResponse getDictQueryParam(TGetDictQueryParamRequest request) throws TException { Database db = GlobalStateMgr.getCurrentState().getDb(request.getDb_name()); if (db == null) { throw new SemanticException("Database %s is not found", request.getDb_name()); } Table table = db.getTable(request.getTable_name()); if (table == null) { throw new SemanticException("dict table %s is not found", request.getTable_name()); } if (!(table instanceof OlapTable)) { throw new SemanticException("dict table type is not OlapTable, type=" + table.getClass()); } OlapTable dictTable = (OlapTable) table; TupleDescriptor tupleDescriptor = new TupleDescriptor(TupleId.createGenerator().getNextId()); IdGenerator<SlotId> slotIdIdGenerator = SlotId.createGenerator(); for (Column column : dictTable.getBaseSchema()) { SlotDescriptor slotDescriptor = new SlotDescriptor(slotIdIdGenerator.getNextId(), tupleDescriptor); slotDescriptor.setColumn(column); slotDescriptor.setIsMaterialized(true); tupleDescriptor.addSlot(slotDescriptor); } TGetDictQueryParamResponse response = new TGetDictQueryParamResponse(); response.setSchema(OlapTableSink.createSchema(db.getId(), dictTable, tupleDescriptor)); try { List<Long> allPartitions = dictTable.getAllPartitionIds(); TOlapTablePartitionParam partitionParam = OlapTableSink.createPartition( db.getId(), dictTable, tupleDescriptor, dictTable.supportedAutomaticPartition(), dictTable.getAutomaticBucketSize(), allPartitions); response.setPartition(partitionParam); response.setLocation(OlapTableSink.createLocation(dictTable, partitionParam, dictTable.enableReplicatedStorage())); response.setNodes_info(GlobalStateMgr.getCurrentState().createNodesInfo(WarehouseManager.DEFAULT_WAREHOUSE_ID, GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo())); } catch (UserException e) { SemanticException semanticException = new SemanticException("build DictQueryParams error in dict_query_expr."); semanticException.initCause(e); throw semanticException; } return response; }
@Test public void testgetDictQueryParam() throws TException { FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TGetDictQueryParamRequest request = new TGetDictQueryParamRequest(); request.setDb_name("test"); request.setTable_name("site_access_auto"); TGetDictQueryParamResponse result = impl.getDictQueryParam(request); System.out.println(result); Assert.assertNotEquals(0, result.getLocation().getTabletsSize()); }
static int run(File buildResult, Path root) throws IOException { // parse included dependencies from build output final Map<String, Set<Dependency>> modulesWithBundledDependencies = combineAndFilterFlinkDependencies( ShadeParser.parseShadeOutput(buildResult.toPath()), DependencyParser.parseDependencyCopyOutput(buildResult.toPath())); final Set<String> deployedModules = DeployParser.parseDeployOutput(buildResult); LOG.info( "Extracted " + deployedModules.size() + " modules that were deployed and " + modulesWithBundledDependencies.keySet().size() + " modules which bundle dependencies with a total of " + modulesWithBundledDependencies.values().size() + " dependencies"); // find modules producing a shaded-jar List<Path> noticeFiles = findNoticeFiles(root); LOG.info("Found {} NOTICE files to check", noticeFiles.size()); final Map<String, Optional<NoticeContents>> moduleToNotice = noticeFiles.stream() .collect( Collectors.toMap( NoticeFileChecker::getModuleFromNoticeFile, noticeFile -> { try { return NoticeParser.parseNoticeFile(noticeFile); } catch (IOException e) { // some machine issue throw new RuntimeException(e); } })); return run(modulesWithBundledDependencies, deployedModules, moduleToNotice); }
@Test void testRunSkipsNonDeployedModules() throws IOException { final String moduleName = "test"; final Dependency bundledDependency = Dependency.create("a", "b", "c", null); final Map<String, Set<Dependency>> bundleDependencies = new HashMap<>(); bundleDependencies.put(moduleName, Collections.singleton(bundledDependency)); final Set<String> deployedModules = Collections.emptySet(); // this would usually be a problem, but since the module is not deployed it's OK! final Optional<NoticeContents> emptyNotice = Optional.of(new NoticeContents(moduleName, Collections.emptyList())); assertThat( NoticeFileChecker.run( bundleDependencies, deployedModules, Collections.singletonMap(moduleName, emptyNotice))) .isEqualTo(0); }