focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static int durationStringLongToMs(String input) { String[] parts = input.split(":"); if (parts.length != 3) { return 0; } return Integer.parseInt(parts[0]) * 3600 * 1000 + Integer.parseInt(parts[1]) * 60 * 1000 + Integer.parseInt(parts[2]) * 1000; }
@Test public void testDurationStringLongToMs() { String input = "01:20:30"; long expected = 4830000; assertEquals(expected, Converter.durationStringLongToMs(input)); }
public AwsAsgUtil getAwsAsgUtil() { return awsAsgUtil; }
@Test public void testOverridesWithAsgEnabledThenDisabled() { // Regular registration first InstanceInfo myInstance = createLocalUpInstanceWithAsg(LOCAL_REGION_INSTANCE_1_HOSTNAME); registerInstanceLocally(myInstance); verifyLocalInstanceStatus(myInstance.getId(), InstanceStatus.UP); // Now we disable the ASG and we should expect OUT_OF_SERVICE status. ((AwsInstanceRegistry) registry).getAwsAsgUtil().setStatus(myInstance.getASGName(), false); myInstance = createLocalUpInstanceWithAsg(LOCAL_REGION_INSTANCE_1_HOSTNAME); registerInstanceLocally(myInstance); verifyLocalInstanceStatus(myInstance.getId(), InstanceStatus.OUT_OF_SERVICE); // Now we re-enable the ASG and we should expect UP status. ((AwsInstanceRegistry) registry).getAwsAsgUtil().setStatus(myInstance.getASGName(), true); myInstance = createLocalUpInstanceWithAsg(LOCAL_REGION_INSTANCE_1_HOSTNAME); registerInstanceLocally(myInstance); verifyLocalInstanceStatus(myInstance.getId(), InstanceStatus.UP); }
@Override public double getValue(double quantile) { if (quantile < 0.0 || quantile > 1.0 || Double.isNaN(quantile)) { throw new IllegalArgumentException(quantile + " is not in [0..1]"); } if (values.length == 0) { return 0.0; } int posx = Arrays.binarySearch(quantiles, quantile); if (posx < 0) posx = ((-posx) - 1) - 1; if (posx < 1) { return values[0]; } if (posx >= values.length) { return values[values.length - 1]; } return values[posx]; }
@Test public void smallQuantilesAreTheFirstValue() { assertThat(snapshot.getValue(0.0)) .isEqualTo(1.0, offset(0.1)); }
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) { final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps()); map.put( MetricCollectors.RESOURCE_LABEL_PREFIX + StreamsConfig.APPLICATION_ID_CONFIG, applicationId ); // Streams client metrics aren't used in Confluent deployment possiblyConfigureConfluentTelemetry(map); return Collections.unmodifiableMap(map); }
@Test public void shouldSetMonitoringInterceptorConfigPropertiesByClientType() { // Given: final Map<String, String> props = ImmutableMap.of( "ksql.streams.consumer.confluent.monitoring.interceptor.topic", "foo", "producer.confluent.monitoring.interceptor.topic", "bar" ); final KsqlConfig ksqlConfig = new KsqlConfig(props); // When: final Map<String, Object> result = ksqlConfig.getKsqlStreamConfigProps(); // Then: assertThat(result.get("consumer.confluent.monitoring.interceptor.topic"), is("foo")); assertThat(result.get("producer.confluent.monitoring.interceptor.topic"), is("bar")); }
protected FEEL newFeelEvaluator(AtomicReference<FEELEvent> errorHolder) { // cleanup existing error errorHolder.set(null); FEEL feel = FEELBuilder.builder().withProfiles(singletonList(new ExtendedDMNProfile())).build(); feel.addListener(event -> { FEELEvent feelEvent = errorHolder.get(); if (!(feelEvent instanceof SyntaxErrorEvent) && ERROR.equals(event.getSeverity())) { errorHolder.set(event); } }); return feel; }
@Test public void listener_singleSyntaxError() { FEELEvent syntaxErrorEvent = new SyntaxErrorEvent(Severity.ERROR, "test", null, 0, 0, null); AtomicReference<FEELEvent> error = new AtomicReference<>(); FEEL feel = expressionEvaluator.newFeelEvaluator(error); applyEvents(List.of(syntaxErrorEvent), feel); assertThat(error.get()).isEqualTo(syntaxErrorEvent); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_table__transposed() { DataTable table = parse("", " | | 1 | 2 | 3 |", " | A | ♘ | | ♝ |", " | B | | | |", " | C | | ♝ | |"); assertEquals(table.transpose(), converter.convert(table, DataTable.class, true)); }
public static boolean isValidOrigin(String sourceHost, ZeppelinConfiguration zConf) throws UnknownHostException, URISyntaxException { String sourceUriHost = ""; if (sourceHost != null && !sourceHost.isEmpty()) { sourceUriHost = new URI(sourceHost).getHost(); sourceUriHost = (sourceUriHost == null) ? "" : sourceUriHost.toLowerCase(); } sourceUriHost = sourceUriHost.toLowerCase(); String currentHost = InetAddress.getLocalHost().getHostName().toLowerCase(); return zConf.getAllowedOrigins().contains("*") || currentHost.equals(sourceUriHost) || "localhost".equals(sourceUriHost) || zConf.getAllowedOrigins().contains(sourceHost); }
@Test void isLocalhost() throws URISyntaxException, UnknownHostException { assertTrue(CorsUtils.isValidOrigin("http://localhost", ZeppelinConfiguration.load())); }
public WikipediaEditsSource() { this(DEFAULT_HOST, DEFAULT_PORT, DEFAULT_CHANNEL); }
@TestTemplate @RetryOnFailure(times = 1) void testWikipediaEditsSource() throws Exception { if (canConnect(1, TimeUnit.SECONDS)) { final Time testTimeout = Time.seconds(60); final WikipediaEditsSource wikipediaEditsSource = new WikipediaEditsSource(); ExecutorService executorService = null; try { executorService = Executors.newSingleThreadExecutor(); BlockingQueue<Object> collectedEvents = new ArrayBlockingQueue<>(1); AtomicReference<Exception> asyncError = new AtomicReference<>(); // Execute the source in a different thread and collect events into the queue. // We do this in a separate thread in order to not block the main test thread // indefinitely in case that something bad happens (like not receiving any // events) executorService.execute( () -> { try { wikipediaEditsSource.run( new CollectingSourceContext<>(collectedEvents)); } catch (Exception e) { boolean interrupted = e instanceof InterruptedException; if (!interrupted) { LOG.warn("Failure in WikipediaEditsSource", e); } asyncError.compareAndSet(null, e); } }); long deadline = deadlineNanos(testTimeout); Object event = null; Exception error = null; // Check event or error while (event == null && error == null && System.nanoTime() < deadline) { event = collectedEvents.poll(1, TimeUnit.SECONDS); error = asyncError.get(); } if (error != null) { // We don't use assertNull, because we want to include the error message fail("Failure in WikipediaEditsSource: " + error.getMessage()); } assertThat(event) .as("Did not receive a WikipediaEditEvent within the desired timeout") .isNotNull(); assertThat(event) .as("Received unexpected event " + event) .isInstanceOf(WikipediaEditEvent.class); } finally { wikipediaEditsSource.cancel(); if (executorService != null) { executorService.shutdownNow(); executorService.awaitTermination(1, TimeUnit.SECONDS); } } } else { LOG.info("Skipping test, because not able to connect to IRC server."); } }
@Nullable public synchronized Beacon track(@NonNull Beacon beacon) { Beacon trackedBeacon = null; if (beacon.isMultiFrameBeacon() || beacon.getServiceUuid() != -1) { trackedBeacon = trackGattBeacon(beacon); } else { trackedBeacon = beacon; } return trackedBeacon; }
@Test public void multiFrameBeaconProgramaticParserAssociationDifferentServiceUUIDFieldsGetUpdated() { Beacon beacon = getMultiFrameBeacon(); Beacon beaconUpdate = getMultiFrameBeaconUpdateDifferentServiceUUID(); ExtraDataBeaconTracker tracker = new ExtraDataBeaconTracker(false); tracker.track(beacon); tracker.track(beaconUpdate); Beacon trackedBeacon = tracker.track(beacon); assertEquals("rssi should be updated", beaconUpdate.getRssi(), trackedBeacon.getRssi()); assertEquals("data fields should be updated", beaconUpdate.getDataFields(), trackedBeacon.getExtraDataFields()); }
public DecoderResult decode(AztecDetectorResult detectorResult) throws FormatException { ddata = detectorResult; BitMatrix matrix = detectorResult.getBits(); boolean[] rawbits = extractBits(matrix); CorrectedBitsResult correctedBits = correctBits(rawbits); byte[] rawBytes = convertBoolArrayToByteArray(correctedBits.correctBits); String result = getEncodedData(correctedBits.correctBits); DecoderResult decoderResult = new DecoderResult(rawBytes, result, null, String.format("%d%%", correctedBits.ecLevel)); decoderResult.setNumBits(correctedBits.correctBits.length); decoderResult.setErrorsCorrected(correctedBits.errorsCorrected); return decoderResult; }
@Test(expected = FormatException.class) public void testDecodeTooManyErrors() throws FormatException { BitMatrix matrix = BitMatrix.parse("" + "X X . X . . . X X . . . X . . X X X . X . X X X X X . \n" + "X X . . X X . . . . . X X . . . X X . . . X . X . . X \n" + "X . . . X X . . X X X . X X . X X X X . X X . . X . . \n" + ". . . . X . X X . . X X . X X . X . X X X X . X . . X \n" + "X X X . . X X X X X . . . . . X X . . . X . X . X . X \n" + "X X . . . . . . . . X . . . X . X X X . X . . X . . . \n" + "X X . . X . . . . . X X . . . . . X . . . . X . . X X \n" + ". . . X . X . X . . . . . X X X X X X . . . . . . X X \n" + "X . . . X . X X X X X X . . X X X . X . X X X X X X . \n" + "X . . X X X . X X X X X X X X X X X X X . . . X . X X \n" + ". . . . X X . . . X . . . . . . . X X . . . X X . X . \n" + ". . . X X X . . X X . X X X X X . X . . X . . . . . . \n" + "X . . . . X . X . X . X . . . X . X . X X . X X . X X \n" + "X . X . . X . X . X . X . X . X . X . . . . . X . X X \n" + "X . X X X . . X . X . X . . . X . X . X X X . . . X X \n" + "X X X X X X X X . X . X X X X X . X . X . X . X X X . \n" + ". . . . . . . X . X . . . . . . . X X X X . . . X X X \n" + "X X . . X . . X . X X X X X X X X X X X X X . . X . X \n" + "X X X . X X X X . . X X X X . . X . . . . X . . X X X \n" + ". . . . X . X X X . . . . X X X X . . X X X X . . . . \n" + ". . X . . X . X . . . X . X X . X X . X . . . X . X . \n" + "X X . . X . . X X X X X X X . . X . X X X X X X X . . \n" + "X . X X . . X X . . . . . X . . . . . . X X . X X X . \n" + "X . . X X . . X X . X . X . . . . X . X . . X . . X . \n" + "X . X . X . . X . X X X X X X X X . X X X X . . X X . \n" + "X X X X . . . X . . X X X . X X . . X . . . . X X X . \n" + "X X . X . X . . . X . X . . . . X X . X . . X X . . . \n", "X ", ". "); AztecDetectorResult r = new AztecDetectorResult(matrix, NO_POINTS, true, 16, 4); new Decoder().decode(r); }
@Override public EncodedMessage transform(ActiveMQMessage message) throws Exception { if (message == null) { return null; } long messageFormat = 0; Header header = null; Properties properties = null; Map<Symbol, Object> daMap = null; Map<Symbol, Object> maMap = null; Map<String,Object> apMap = null; Map<Object, Object> footerMap = null; Section body = convertBody(message); if (message.isPersistent()) { if (header == null) { header = new Header(); } header.setDurable(true); } byte priority = message.getPriority(); if (priority != Message.DEFAULT_PRIORITY) { if (header == null) { header = new Header(); } header.setPriority(UnsignedByte.valueOf(priority)); } String type = message.getType(); if (type != null) { if (properties == null) { properties = new Properties(); } properties.setSubject(type); } MessageId messageId = message.getMessageId(); if (messageId != null) { if (properties == null) { properties = new Properties(); } properties.setMessageId(getOriginalMessageId(message)); } ActiveMQDestination destination = message.getDestination(); if (destination != null) { if (properties == null) { properties = new Properties(); } properties.setTo(destination.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination)); } ActiveMQDestination replyTo = message.getReplyTo(); if (replyTo != null) { if (properties == null) { properties = new Properties(); } properties.setReplyTo(replyTo.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo)); } String correlationId = message.getCorrelationId(); if (correlationId != null) { if (properties == null) { properties = new Properties(); } try { properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId)); } catch (AmqpProtocolException e) { properties.setCorrelationId(correlationId); } } long expiration = message.getExpiration(); if (expiration != 0) { long ttl = expiration - System.currentTimeMillis(); if (ttl < 0) { ttl = 1; } if (header == null) { header = new Header(); } header.setTtl(new UnsignedInteger((int) ttl)); if (properties == null) { properties = new Properties(); } properties.setAbsoluteExpiryTime(new Date(expiration)); } long timeStamp = message.getTimestamp(); if (timeStamp != 0) { if (properties == null) { properties = new Properties(); } properties.setCreationTime(new Date(timeStamp)); } // JMSX Message Properties int deliveryCount = message.getRedeliveryCounter(); if (deliveryCount > 0) { if (header == null) { header = new Header(); } header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount)); } String userId = message.getUserID(); if (userId != null) { if (properties == null) { properties = new Properties(); } properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8))); } String groupId = message.getGroupID(); if (groupId != null) { if (properties == null) { properties = new Properties(); } properties.setGroupId(groupId); } int groupSequence = message.getGroupSequence(); if (groupSequence > 0) { if (properties == null) { properties = new Properties(); } properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence)); } final Map<String, Object> entries; try { entries = message.getProperties(); } catch (IOException e) { throw JMSExceptionSupport.create(e); } for (Map.Entry<String, Object> entry : entries.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.startsWith(JMS_AMQP_PREFIX)) { if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) { messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class); continue; } else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } continue; } else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } continue; } else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (maMap == null) { maMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length()); maMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class)); continue; } else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class)); continue; } else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (daMap == null) { daMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length()); daMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (footerMap == null) { footerMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length()); footerMap.put(Symbol.valueOf(name), value); continue; } } else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) { // strip off the scheduled message properties continue; } // The property didn't map into any other slot so we store it in the // Application Properties section of the message. if (apMap == null) { apMap = new HashMap<>(); } apMap.put(key, value); int messageType = message.getDataStructureType(); if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) { // Type of command to recognize advisory message Object data = message.getDataStructure(); if(data != null) { apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName()); } } } final AmqpWritableBuffer buffer = new AmqpWritableBuffer(); encoder.setByteBuffer(buffer); if (header != null) { encoder.writeObject(header); } if (daMap != null) { encoder.writeObject(new DeliveryAnnotations(daMap)); } if (maMap != null) { encoder.writeObject(new MessageAnnotations(maMap)); } if (properties != null) { encoder.writeObject(properties); } if (apMap != null) { encoder.writeObject(new ApplicationProperties(apMap)); } if (body != null) { encoder.writeObject(body); } if (footerMap != null) { encoder.writeObject(new Footer(footerMap)); } return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength()); }
@Test public void testConvertEmptyObjectMessageToAmqpMessageWithAmqpValueBody() throws Exception { ActiveMQObjectMessage outbound = createObjectMessage(); outbound.setShortProperty(JMS_AMQP_ORIGINAL_ENCODING, AMQP_VALUE_BINARY); outbound.onSend(); outbound.storeContent(); JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer(); EncodedMessage encoded = transformer.transform(outbound); assertNotNull(encoded); Message amqp = encoded.decode(); assertNotNull(amqp.getBody()); assertTrue(amqp.getBody() instanceof AmqpValue); assertTrue(((AmqpValue)amqp.getBody()).getValue() instanceof Binary); assertEquals(0, ((Binary) ((AmqpValue) amqp.getBody()).getValue()).getLength()); }
@Override protected Set<StepField> getUsedFields( ExcelInputMeta meta ) { Set<StepField> usedFields = new HashSet<>(); if ( meta.isAcceptingFilenames() && StringUtils.isNotEmpty( meta.getAcceptingStepName() ) ) { StepField stepField = new StepField( meta.getAcceptingStepName(), meta.getAcceptingField() ); usedFields.add( stepField ); } return usedFields; }
@Test public void testGetUsedFields_isAcceptingFilenamesButNoStepName() throws Exception { lenient().when( meta.isAcceptingFilenames() ).thenReturn( true ); lenient().when( meta.getAcceptingField() ).thenReturn( "filename" ); lenient().when( meta.getAcceptingStepName() ).thenReturn( null ); Set<StepField> usedFields = analyzer.getUsedFields( meta ); assertNotNull( usedFields ); assertEquals( 0, usedFields.size() ); }
public <E extends T> boolean processEvent(E event) { boolean consumed = false; if (!onEventConsumers.isEmpty()) { for (EventConsumer<T> onEventConsumer : onEventConsumers) { onEventConsumer.consumeEvent(event); } consumed = true; } if (!eventConsumerMap.isEmpty()) { final Set<EventConsumer<T>> consumers = this.eventConsumerMap.get(event.getClass().getName()); if (consumers != null && !consumers.isEmpty()) { for (EventConsumer<T> consumer : consumers) { consumer.consumeEvent(event); } consumed = true; } } return consumed; }
@Test public void testNoConsumers() { EventProcessor<Number> eventProcessor = new EventProcessor<>(); boolean consumed = eventProcessor.processEvent(1); assertThat(consumed).isFalse(); }
@Override public boolean put(K key, V value) { return get(putAsync(key, value)); }
@Test public void testEntrySet() { RListMultimap<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1"); map.put(new SimpleKey("0"), new SimpleValue("1")); map.put(new SimpleKey("0"), new SimpleValue("1")); map.put(new SimpleKey("3"), new SimpleValue("4")); assertThat(map.entries().size()).isEqualTo(3); List<Map.Entry<SimpleKey, SimpleValue>> testMap = new ArrayList<Map.Entry<SimpleKey, SimpleValue>>(); testMap.add(new AbstractMap.SimpleEntry(new SimpleKey("0"), new SimpleValue("1"))); testMap.add(new AbstractMap.SimpleEntry(new SimpleKey("0"), new SimpleValue("1"))); testMap.add(new AbstractMap.SimpleEntry(new SimpleKey("3"), new SimpleValue("4"))); assertThat(map.entries()).containsOnlyElementsOf(testMap); }
public String decryptFilename(final BaseEncoding encoding, final String ciphertextName, final byte[] associatedData) throws AuthenticationFailedException { final CacheKey key = new CacheKey(encoding, ciphertextName, associatedData); if(decryptCache.contains(key)) { return decryptCache.get(key); } final String cleartextName = impl.decryptFilename(encoding, ciphertextName, associatedData); decryptCache.put(key, cleartextName); encryptCache.put(new CacheKey(encoding, cleartextName, associatedData), ciphertextName); return cleartextName; }
@Test public void TestDecryptFilename() { final FileNameCryptor mock = mock(FileNameCryptor.class); final CryptorCache cryptor = new CryptorCache(mock); when(mock.decryptFilename(any(), any(), any())).thenReturn(RandomStringUtils.randomAscii(10)); final String decrypted1 = cryptor.decryptFilename(CryptorCache.BASE32, "first", "id1".getBytes()); verify(mock, times(1)).decryptFilename(any(), any(), any()); assertEquals(decrypted1, cryptor.decryptFilename(CryptorCache.BASE32, "first", "id1".getBytes())); verify(mock, times(1)).decryptFilename(any(), any(), any()); // ensure using reverse cache from encryption assertEquals("first", cryptor.encryptFilename(CryptorCache.BASE32, decrypted1, "id1".getBytes())); verify(mock, times(1)).decryptFilename(any(), any(), any()); verifyNoMoreInteractions(mock); // cache miss on encoding cryptor.decryptFilename(BaseEncoding.base64Url(), "first", "id1".getBytes()); verify(mock, times(2)).decryptFilename(any(), any(), any()); // cache miss on cleartext cryptor.decryptFilename(CryptorCache.BASE32, "second", "id1".getBytes()); verify(mock, times(3)).decryptFilename(any(), any(), any()); // cache miss on byte[] cryptor.decryptFilename(CryptorCache.BASE32, "first", "id2".getBytes()); verify(mock, times(4)).decryptFilename(any(), any(), any()); }
@Override protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException { final Set<Evidence> remove; if (dependency.getVersion() != null) { remove = dependency.getEvidence(EvidenceType.VERSION).stream() .filter(e -> !e.isFromHint() && !dependency.getVersion().equals(e.getValue())) .collect(Collectors.toSet()); } else { remove = new HashSet<>(); String fileVersion = null; String pomVersion = null; String manifestVersion = null; for (Evidence e : dependency.getEvidence(EvidenceType.VERSION)) { if (FILE.equals(e.getSource()) && VERSION.equals(e.getName())) { fileVersion = e.getValue(); } else if ((NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource()) || POM.equals(e.getSource())) && VERSION.equals(e.getName())) { pomVersion = e.getValue(); } else if (MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName())) { manifestVersion = e.getValue(); } } //ensure we have at least two not null if (((fileVersion == null ? 0 : 1) + (pomVersion == null ? 0 : 1) + (manifestVersion == null ? 0 : 1)) > 1) { final DependencyVersion dvFile = new DependencyVersion(fileVersion); final DependencyVersion dvPom = new DependencyVersion(pomVersion); final DependencyVersion dvManifest = new DependencyVersion(manifestVersion); final boolean fileMatch = Objects.equals(dvFile, dvPom) || Objects.equals(dvFile, dvManifest); final boolean manifestMatch = Objects.equals(dvManifest, dvPom) || Objects.equals(dvManifest, dvFile); final boolean pomMatch = Objects.equals(dvPom, dvFile) || Objects.equals(dvPom, dvManifest); if (fileMatch || manifestMatch || pomMatch) { LOGGER.debug("filtering evidence from {}", dependency.getFileName()); for (Evidence e : dependency.getEvidence(EvidenceType.VERSION)) { if (!e.isFromHint() && !(pomMatch && VERSION.equals(e.getName()) && (NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource()) || POM.equals(e.getSource()))) && !(fileMatch && VERSION.equals(e.getName()) && FILE.equals(e.getSource())) && !(manifestMatch && MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName()))) { remove.add(e); } } } } } remove.forEach((e) -> dependency.removeEvidence(EvidenceType.VERSION, e)); if (dependency.getVersion() == null) { final Set<Evidence> evidence = dependency.getEvidence(EvidenceType.VERSION); final DependencyVersion version; final Evidence e = evidence.stream().findFirst().orElse(null); if (e != null) { version = DependencyVersionUtil.parseVersion(e.getValue(), true); if (version != null && evidence.stream() .map(ev -> DependencyVersionUtil.parseVersion(ev.getValue(), true)) .allMatch(version::equals)) { dependency.setVersion(version.toString()); } } } }
@Test public void testAnalyzeDependency() throws Exception { Dependency dependency = new Dependency(); dependency.addEvidence(EvidenceType.VERSION, "util", "version", "33.3", Confidence.HIGHEST); dependency.addEvidence(EvidenceType.VERSION, "other", "version", "alpha", Confidence.HIGHEST); dependency.addEvidence(EvidenceType.VERSION, "other", "Implementation-Version", "1.2.3", Confidence.HIGHEST); VersionFilterAnalyzer instance = new VersionFilterAnalyzer(); instance.initialize(getSettings()); instance.analyzeDependency(dependency, null); assertEquals(3, dependency.getEvidence(EvidenceType.VERSION).size()); dependency.addEvidence(EvidenceType.VERSION, "pom", "version", "1.2.3", Confidence.HIGHEST); instance.analyzeDependency(dependency, null); assertEquals(4, dependency.getEvidence(EvidenceType.VERSION).size()); dependency.addEvidence(EvidenceType.VERSION, "file", "version", "1.2.3", Confidence.HIGHEST); instance.analyzeDependency(dependency, null); assertEquals(2, dependency.getEvidence(EvidenceType.VERSION).size()); dependency.addEvidence(EvidenceType.VERSION, "Manifest", "Implementation-Version", "1.2.3", Confidence.HIGHEST); instance.analyzeDependency(dependency, null); assertEquals(3, dependency.getEvidence(EvidenceType.VERSION).size()); dependency.addEvidence(EvidenceType.VERSION, "nexus", "version", "1.2.3", Confidence.HIGHEST); dependency.addEvidence(EvidenceType.VERSION, "other", "version", "alpha", Confidence.HIGHEST); instance.analyzeDependency(dependency, null); assertEquals(4, dependency.getEvidence(EvidenceType.VERSION).size()); dependency.addEvidence(EvidenceType.VERSION, "central", "version", "1.2.3", Confidence.HIGHEST); dependency.addEvidence(EvidenceType.VERSION, "other", "version", "alpha", Confidence.HIGHEST); instance.analyzeDependency(dependency, null); assertEquals(5, dependency.getEvidence(EvidenceType.VERSION).size()); }
@Override public void handleRequest(RestRequest request, RequestContext requestContext, final Callback<RestResponse> callback) { if (HttpMethod.POST != HttpMethod.valueOf(request.getMethod())) { _log.error("POST is expected, but " + request.getMethod() + " received"); callback.onError(RestException.forError(HttpStatus.S_405_METHOD_NOT_ALLOWED.getCode(), "Invalid method")); return; } // Disable server-side latency instrumentation for multiplexed requests requestContext.putLocalAttr(TimingContextUtil.TIMINGS_DISABLED_KEY_NAME, true); IndividualRequestMap individualRequests; try { individualRequests = extractIndividualRequests(request); if (_multiplexerSingletonFilter != null) { individualRequests = _multiplexerSingletonFilter.filterRequests(individualRequests); } } catch (RestException e) { _log.error("Invalid multiplexed request", e); callback.onError(e); return; } catch (Exception e) { _log.error("Invalid multiplexed request", e); callback.onError(RestException.forError(HttpStatus.S_400_BAD_REQUEST.getCode(), e)); return; } // prepare the map of individual responses to be collected final IndividualResponseMap individualResponses = new IndividualResponseMap(individualRequests.size()); final Map<String, HttpCookie> responseCookies = new HashMap<>(); // all tasks are Void and side effect based, that will be useful when we add streaming Task<?> requestProcessingTask = createParallelRequestsTask(request, requestContext, individualRequests, individualResponses, responseCookies); Task<Void> responseAggregationTask = Task.action("send aggregated response", () -> { RestResponse aggregatedResponse = aggregateResponses(individualResponses, responseCookies); callback.onSuccess(aggregatedResponse); } ); _engine.run(requestProcessingTask.andThen(responseAggregationTask), MUX_PLAN_CLASS); }
@Test(dataProvider = "multiplexerConfigurations", enabled=false) public void testHandleWrongContentType(MultiplexerRunMode multiplexerRunMode) throws Exception { MultiplexedRequestHandlerImpl multiplexer = createMultiplexer(null, multiplexerRunMode); RestRequest request = muxRequestBuilder() .setMethod(HttpMethod.POST.name()) .setHeader(RestConstants.HEADER_CONTENT_TYPE, "text/plain") .build(); FutureCallback<RestResponse> callback = new FutureCallback<>(); multiplexer.handleRequest(request, new RequestContext(), callback); assertEquals(getErrorStatus(callback), HttpStatus.S_415_UNSUPPORTED_MEDIA_TYPE); }
@Override public void execute(final List<String> args, final PrintWriter terminal) { CliCmdUtil.ensureArgCountBounds(args, 1, 1, HELP); final String filePath = args.get(0); final String content = loadScript(filePath); requestExecutor.makeKsqlRequest(content); }
@Test public void shouldExecuteScript() { // When: cmd.execute(ImmutableList.of(scriptFile.toString()), terminal); // Then: verify(requestExecutor).makeKsqlRequest(FILE_CONTENT); }
public Method get(Object object) { return get(object, null); }
@Test void testCache() throws Exception { URL url = MethodCacheTest.class.getClassLoader().getResource("dummy").toURI().resolve(".").toURL(); class MyLoader extends URLClassLoader { MyLoader() { super(new URL[] { url }, MethodCacheTest.class.getClassLoader()); } public Class<?> loadClass(String name) throws ClassNotFoundException { if (name.equals(Dummy.class.getName())) synchronized (getClassLoadingLock(name)) { return findClass(name); } else return super.loadClass(name); } } try (MyLoader myLoader = new MyLoader()) { Class<?> applicationClass = Dummy.class; Class<?> customClass = myLoader.loadClass(Dummy.class.getName()); assertNotSame(applicationClass, customClass); assertSame(applicationClass.getName(), customClass.getName()); MethodCache methods = new MethodCache("clone"); AtomicBoolean updatedCache = new AtomicBoolean(); Object applicationDummy = applicationClass.getConstructor().newInstance(); Object customDummy = customClass.getConstructor().newInstance(); Method applicationMethod = methods.get(applicationDummy, __ -> updatedCache.set(true)); assertTrue(updatedCache.getAndSet(false), "cache was updated"); Method cachedApplicationMethod = methods.get(applicationDummy, __ -> updatedCache.set(true)); assertFalse(updatedCache.getAndSet(false), "cache was updated"); Method customMethod = methods.get(customDummy, __ -> updatedCache.set(true)); assertTrue(updatedCache.getAndSet(false), "cache was updated"); Method cachedCustomMethod = methods.get(customDummy, __ -> updatedCache.set(true)); assertFalse(updatedCache.getAndSet(false), "cache was updated"); assertSame(applicationMethod, cachedApplicationMethod); assertNotSame(applicationMethod, customMethod); assertSame(customMethod, cachedCustomMethod); cachedApplicationMethod.invoke(applicationDummy); cachedCustomMethod.invoke(customDummy); assertThrows(IllegalArgumentException.class, () -> applicationMethod.invoke(customDummy)); assertThrows(IllegalArgumentException.class, () -> customMethod.invoke(applicationDummy)); Object noDummy = new NoDummy(); Method noMethod = methods.get(noDummy, __ -> updatedCache.set(true)); assertTrue(updatedCache.getAndSet(false), "cache was updated"); assertNull(noMethod); Method cachedNoMethod = methods.get(noDummy, __ -> updatedCache.set(true)); assertFalse(updatedCache.getAndSet(false), "cache was updated"); assertNull(cachedNoMethod); } }
public boolean filter(char[] content, int offset, int length) { if (content == null) { return false; } boolean filtered = false; for (int i = offset; i < offset + length; i++) { if (isFiltered(content[i])) { filtered = true; content[i] = REPLACEMENT_CHAR; } } if (filtered) { LOG.warn("Identified and replaced non-XML chars"); } return filtered; }
@Test public void testFilter1ArgNonFiltered() { when(nonXmlCharFiltererMock.filter(anyString())).thenCallRealMethod(); when(nonXmlCharFiltererMock.filter(any(char[].class), anyInt(), anyInt())).thenReturn(false); String string = "abc"; String result = nonXmlCharFiltererMock.filter(string); verify(nonXmlCharFiltererMock).filter(new char[] { 'a', 'b', 'c' }, 0, 3); assertSame(string, result, "Should have returned the same string if nothing was filtered"); }
@Override public void register() { client.register(); }
@Test public void register() { scRegister.register(); Mockito.verify(scClient, Mockito.times(1)).register(); }
public static <T> Predicate<T> distinctByKey(Function<? super T, Object> keyExtractor) { Map<Object, Boolean> seen = new ConcurrentHashMap<>(); return object -> seen.putIfAbsent(keyExtractor.apply(object), Boolean.TRUE) == null; }
@Test void shouldReturnTrueForDistinctKeys() { Function<Object, Object> keyExtractor = Object::getClass; Predicate<Object> distinctByKey = LambdaUtils.distinctByKey(keyExtractor); boolean result = distinctByKey.test(new Object()); assertTrue(result); }
public boolean isAdmin(Admin admin) { return !isSecurityEnabled() || noAdminsConfigured() || adminsConfig.isAdmin(admin, rolesConfig.memberRoles(admin)); }
@Test public void shouldKnowIfRoleIsAdmin() throws Exception { SecurityConfig security = security(passwordFileAuthConfig(), admins(role("role1"))); assertThat(security.isAdmin(new AdminUser(new CaseInsensitiveString("chris"))), is(true)); assertThat(security.isAdmin(new AdminUser(new CaseInsensitiveString("jez"))), is(true)); assertThat(security.isAdmin(new AdminUser(new CaseInsensitiveString("evilHacker"))), is(false)); }
public static String getQuotedFqtn(String fqtn) { String[] fqtnTokens = fqtn.split("\\."); // adding single quotes around fqtn for cases when db and/or tableName has special character(s), // like '-' return String.format("`%s`.`%s`", fqtnTokens[0], fqtnTokens[1]); }
@Test void testGetQuotedFqtn() { Assertions.assertEquals("`db`.`table-name`", SparkJobUtil.getQuotedFqtn("db.table-name")); Assertions.assertEquals( "`db-dashed`.`table-name`", SparkJobUtil.getQuotedFqtn("db-dashed.table-name")); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldFindOneArgConflict() { // Given: givenFunctions( function(EXPECTED, -1, STRING), function(OTHER, -1, INT) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of(SqlArgument.of(SqlTypes.STRING))); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
public static void main(final String[] args) { var view = new View(); view.createView(); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
public static int toInt(final String str, final int defaultValue) { if (str == null) { return defaultValue; } try { return Integer.parseInt(str); } catch (NumberFormatException nfe) { return defaultValue; } }
@Test public void testToInReturnParsedValue() { Assertions.assertEquals(10, NumberUtils.toInt("10", 9)); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); cachedNanoClock.update(nowNs); dutyCycleTracker.measureAndUpdate(nowNs); final int workCount = commandQueue.drain(CommandProxy.RUN_TASK, Configuration.COMMAND_DRAIN_LIMIT); final long shortSendsBefore = shortSends.get(); final int bytesSent = doSend(nowNs); int bytesReceived = 0; if (0 == bytesSent || ++dutyCycleCounter >= dutyCycleRatio || (controlPollDeadlineNs - nowNs < 0) || shortSendsBefore < shortSends.get()) { bytesReceived = controlTransportPoller.pollTransports(); dutyCycleCounter = 0; controlPollDeadlineNs = nowNs + statusMessageReadTimeoutNs; } if (reResolutionCheckIntervalNs > 0 && (reResolutionDeadlineNs - nowNs) < 0) { reResolutionDeadlineNs = nowNs + reResolutionCheckIntervalNs; controlTransportPoller.checkForReResolutions(nowNs, conductorProxy); } return workCount + bytesSent + bytesReceived; }
@Test void shouldBeAbleToSendOnChannelTwice() { final StatusMessageFlyweight msg = mock(StatusMessageFlyweight.class); when(msg.consumptionTermId()).thenReturn(INITIAL_TERM_ID); when(msg.consumptionTermOffset()).thenReturn(0); when(msg.receiverWindowLength()).thenReturn(2 * ALIGNED_FRAME_LENGTH); publication.onStatusMessage(msg, rcvAddress, mockDriverConductorProxy); final UnsafeBuffer buffer = new UnsafeBuffer(ByteBuffer.allocateDirect(PAYLOAD.length)); buffer.putBytes(0, PAYLOAD); final int offset = appendUnfragmentedMessage( rawLog, 0, INITIAL_TERM_ID, 0, headerWriter, buffer, 0, PAYLOAD.length); sender.doWork(); appendUnfragmentedMessage(rawLog, 0, INITIAL_TERM_ID, offset, headerWriter, buffer, 0, PAYLOAD.length); sender.doWork(); assertThat(receivedFrames.size(), is(2)); dataHeader.wrap(new UnsafeBuffer(receivedFrames.remove())); assertThat(dataHeader.frameLength(), is(FRAME_LENGTH)); assertThat(dataHeader.termId(), is(INITIAL_TERM_ID)); assertThat(dataHeader.streamId(), is(STREAM_ID)); assertThat(dataHeader.sessionId(), is(SESSION_ID)); assertThat(dataHeader.termOffset(), is(offsetOfMessage(1))); assertThat(dataHeader.headerType(), is(HeaderFlyweight.HDR_TYPE_DATA)); assertThat(dataHeader.flags(), is(DataHeaderFlyweight.BEGIN_AND_END_FLAGS)); assertThat(dataHeader.version(), is((short)HeaderFlyweight.CURRENT_VERSION)); dataHeader.wrap(new UnsafeBuffer(receivedFrames.remove())); assertThat(dataHeader.frameLength(), is(FRAME_LENGTH)); assertThat(dataHeader.termId(), is(INITIAL_TERM_ID)); assertThat(dataHeader.streamId(), is(STREAM_ID)); assertThat(dataHeader.sessionId(), is(SESSION_ID)); assertThat(dataHeader.termOffset(), is(offsetOfMessage(2))); assertThat(dataHeader.headerType(), is(HeaderFlyweight.HDR_TYPE_DATA)); assertThat(dataHeader.flags(), is(DataHeaderFlyweight.BEGIN_AND_END_FLAGS)); assertThat(dataHeader.version(), is((short)HeaderFlyweight.CURRENT_VERSION)); }
@Override public final int position() { return pos; }
@Test(expected = IllegalArgumentException.class) public void testPositionNewPos_HighNewPos() { in.position(INIT_DATA.length + 10); }
@SuppressWarnings({"unchecked", "rawtypes"}) public static int compareTo(final Comparable thisValue, final Comparable otherValue, final OrderDirection orderDirection, final NullsOrderType nullsOrderType, final boolean caseSensitive) { if (null == thisValue && null == otherValue) { return 0; } if (null == thisValue) { return NullsOrderType.FIRST == nullsOrderType ? -1 : 1; } if (null == otherValue) { return NullsOrderType.FIRST == nullsOrderType ? 1 : -1; } if (!caseSensitive && thisValue instanceof String && otherValue instanceof String) { return compareToCaseInsensitiveString((String) thisValue, (String) otherValue, orderDirection); } return OrderDirection.ASC == orderDirection ? thisValue.compareTo(otherValue) : -thisValue.compareTo(otherValue); }
@Test void assertCompareToWhenFirstValueIsNullForOrderByAscAndNullsLast() { assertThat(CompareUtils.compareTo(null, 1, OrderDirection.ASC, NullsOrderType.LAST, caseSensitive), is(1)); }
@Override public void executeWithLock(Runnable task) { Optional<LockConfiguration> lockConfigOptional = lockConfigurationExtractor.getLockConfiguration(task); if (lockConfigOptional.isEmpty()) { logger.debug("No lock configuration for {}. Executing without lock.", task); task.run(); } else { lockingTaskExecutor.executeWithLock(task, lockConfigOptional.get()); } }
@Test void executeIfLockAvailable() { when(lockConfigurationExtractor.getLockConfiguration(task)).thenReturn(Optional.of(LOCK_CONFIGURATION)); when(lockProvider.lock(LOCK_CONFIGURATION)).thenReturn(Optional.of(lock)); defaultLockManager.executeWithLock(task); verify(task).run(); InOrder inOrder = inOrder(task, lock); inOrder.verify(task).run(); inOrder.verify(lock).unlock(); }
@Override void execute() throws HiveMetaException { // Need to confirm unless it's a dry run or specified -yes if (!schemaTool.isDryRun() && !this.yes) { boolean confirmed = promptToConfirm(); if (!confirmed) { System.out.println("Operation cancelled, exiting."); return; } } Connection conn = schemaTool.getConnectionToMetastore(true); try { try (Statement stmt = conn.createStatement()) { final String def = Warehouse.DEFAULT_DATABASE_NAME; // List databases List<String> databases = new ArrayList<>(); try (ResultSet rs = stmt.executeQuery("SHOW DATABASES")) { while (rs.next()) { databases.add(rs.getString(1)); } } // Drop databases for (String database : databases) { // Don't try to drop 'default' database as it's not allowed if (!def.equalsIgnoreCase(database)) { if (schemaTool.isDryRun()) { System.out.println("would drop database " + database); } else { logIfVerbose("dropping database " + database); stmt.execute(String.format("DROP DATABASE `%s` CASCADE", database)); } } } // List tables in 'default' database List<String> tables = new ArrayList<>(); try (ResultSet rs = stmt.executeQuery(String.format("SHOW TABLES IN `%s`", def))) { while (rs.next()) { tables.add(rs.getString(1)); } } // Drop tables in 'default' database for (String table : tables) { if (schemaTool.isDryRun()) { System.out.println("would drop table " + table); } else { logIfVerbose("dropping table " + table); stmt.execute(String.format("DROP TABLE `%s`.`%s`", def, table)); } } } } catch (SQLException se) { throw new HiveMetaException("Failed to drop databases.", se); } }
@Test public void testExecuteWithYes() throws Exception { setUpTwoDatabases(); uut.yes = true; uut.execute(); Mockito.verify(stmtMock, times(3)).execute(anyString()); }
public ApplicationResourceUsageReport getResourceUsageReport() { writeLock.lock(); try { AggregateAppResourceUsage runningResourceUsage = getRunningAggregateAppResourceUsage(); Resource usedResourceClone = Resources.clone( attemptResourceUsage.getAllUsed()); Resource reservedResourceClone = Resources.clone(attemptResourceUsage.getAllReserved()); Resource cluster = rmContext.getScheduler().getClusterResource(); ResourceCalculator calc = rmContext.getScheduler().getResourceCalculator(); Map<String, Long> preemptedResourceSecondsMaps = new HashMap<>(); preemptedResourceSecondsMaps .put(ResourceInformation.MEMORY_MB.getName(), 0L); preemptedResourceSecondsMaps .put(ResourceInformation.VCORES.getName(), 0L); float queueUsagePerc = 0.0f; float clusterUsagePerc = 0.0f; if (!calc.isAllInvalidDivisor(cluster)) { float queueCapacityPerc = queue.getQueueInfo(false, false) .getCapacity(); queueUsagePerc = calc.divide(cluster, usedResourceClone, Resources.multiply(cluster, queueCapacityPerc)) * 100; if (Float.isNaN(queueUsagePerc) || Float.isInfinite(queueUsagePerc)) { queueUsagePerc = 0.0f; } clusterUsagePerc = calc.divide(cluster, usedResourceClone, cluster) * 100; } return ApplicationResourceUsageReport .newInstance(liveContainers.size(), reservedContainers.size(), usedResourceClone, reservedResourceClone, Resources.add(usedResourceClone, reservedResourceClone), runningResourceUsage.getResourceUsageSecondsMap(), queueUsagePerc, clusterUsagePerc, preemptedResourceSecondsMaps); } finally { writeLock.unlock(); } }
@Test public void testAppPercentagesOnswitch() throws Exception { FifoScheduler scheduler = mock(FifoScheduler.class); when(scheduler.getClusterResource()).thenReturn(Resource.newInstance(0, 0)); when(scheduler.getResourceCalculator()) .thenReturn(new DefaultResourceCalculator()); ApplicationAttemptId appAttId = createAppAttemptId(0, 0); RMContext rmContext = mock(RMContext.class); when(rmContext.getEpoch()).thenReturn(3L); when(rmContext.getScheduler()).thenReturn(scheduler); when(rmContext.getYarnConfiguration()).thenReturn(conf); final String user = "user1"; Queue queue = createQueue("test", null); SchedulerApplicationAttempt app = new SchedulerApplicationAttempt(appAttId, user, queue, queue.getAbstractUsersManager(), rmContext); // Resource request Resource requestedResource = Resource.newInstance(1536, 2); app.attemptResourceUsage.incUsed(requestedResource); assertEquals(0.0f, app.getResourceUsageReport().getQueueUsagePercentage(), 0.0f); assertEquals(0.0f, app.getResourceUsageReport().getClusterUsagePercentage(), 0.0f); }
@Nullable static String route(ContainerRequest request) { ExtendedUriInfo uriInfo = request.getUriInfo(); List<UriTemplate> templates = uriInfo.getMatchedTemplates(); int templateCount = templates.size(); if (templateCount == 0) return ""; StringBuilder builder = null; // don't allocate unless you need it! String basePath = uriInfo.getBaseUri().getPath(); String result = null; if (!"/" .equals(basePath)) { // skip empty base paths result = basePath; } for (int i = templateCount - 1; i >= 0; i--) { String template = templates.get(i).getTemplate(); if ("/" .equals(template)) continue; // skip allocation if (builder != null) { builder.append(template); } else if (result != null) { builder = new StringBuilder(result).append(template); result = null; } else { result = template; } } return result != null ? result : builder != null ? builder.toString() : ""; }
@Test void route_invalid() { setBaseUri("/"); when(uriInfo.getMatchedTemplates()).thenReturn(Arrays.asList( new PathTemplate("/"), new PathTemplate("/") )); assertThat(SpanCustomizingApplicationEventListener.route(request)) .isEmpty(); }
public JetConfig getJetConfig() { return jetConfig; }
@Test public void when_instanceIsCreatedWithOverriddenDefaultConfiguration_then_defaultConfigurationIsNotChanged() { Config config = new Config(); DataPersistenceConfig dataPersistenceConfig = new DataPersistenceConfig(); dataPersistenceConfig.setEnabled(true); config.addMapConfig(getMapConfig("default", dataPersistenceConfig)); config.getJetConfig().setEnabled(true); HazelcastInstance instance = createHazelcastInstance(config); MapConfig otherMapConfig = ((MapProxyImpl) instance.getMap("otherMap")).getMapConfig(); assertTrue(otherMapConfig.getDataPersistenceConfig().isEnabled()); assertEquals(DEFAULT_BACKUP_COUNT, otherMapConfig.getBackupCount()); }
@Override public void setMonochrome(boolean monochrome) { formats = monochrome ? monochrome() : ansi(); }
@Test void should_print_encoded_characters() { Feature feature = TestFeatureParser.parse("path/test.feature", "" + "Feature: Test feature\n" + " Scenario: Test Characters\n" + " Given first step\n" + " | URLEncoded | %71s%22i%22%3A%7B%22D |\n"); ByteArrayOutputStream out = new ByteArrayOutputStream(); Runtime.builder() .withFeatureSupplier(new StubFeatureSupplier(feature)) .withAdditionalPlugins(new PrettyFormatter(out)) .withRuntimeOptions(new RuntimeOptionsBuilder().setMonochrome().build()) .withBackendSupplier(new StubBackendSupplier( new StubStepDefinition("first step", "path/step_definitions.java:7", DataTable.class))) .build() .run(); assertThat(out, bytes(equalToCompressingWhiteSpace("" + "\n" + "Scenario: Test Characters # path/test.feature:2\n" + " Given first step # path/step_definitions.java:7\n" + " | URLEncoded | %71s%22i%22%3A%7B%22D |\n"))); }
private void verifyTaskGenerationAndOwnership(ConnectorTaskId id, int initialTaskGen) { log.debug("Reading to end of config topic to ensure it is still safe to bring up source task {} with exactly-once support", id); if (!refreshConfigSnapshot(Long.MAX_VALUE)) { throw new ConnectException("Failed to read to end of config topic"); } FutureCallback<Void> verifyCallback = new FutureCallback<>(); addRequest( () -> verifyTaskGenerationAndOwnership(id, initialTaskGen, verifyCallback), forwardErrorAndTickThreadStages(verifyCallback) ); try { verifyCallback.get(); } catch (InterruptedException e) { throw new ConnectException("Interrupted while performing preflight check for task " + id, e); } catch (ExecutionException e) { Throwable cause = e.getCause(); throw ConnectUtils.maybeWrap(cause, "Failed to perform preflight check for task " + id); } }
@Test public void testVerifyTaskGeneration() { Map<String, Integer> taskConfigGenerations = new HashMap<>(); herder.configState = new ClusterConfigState( 1, null, Collections.singletonMap(CONN1, 3), Collections.singletonMap(CONN1, CONN1_CONFIG), Collections.singletonMap(CONN1, TargetState.STARTED), TASK_CONFIGS_MAP, Collections.emptyMap(), taskConfigGenerations, Collections.singletonMap(CONN1, new AppliedConnectorConfig(CONN1_CONFIG)), Collections.emptySet(), Collections.emptySet()); Callback<Void> verifyCallback = mock(Callback.class); herder.assignment = new ExtendedAssignment( (short) 2, (short) 0, "leader", "leaderUrl", 0, Collections.emptySet(), Collections.singleton(TASK1), Collections.emptySet(), Collections.emptySet(), 0); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 0, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 1, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 2, verifyCallback)); taskConfigGenerations.put(CONN1, 0); herder.verifyTaskGenerationAndOwnership(TASK1, 0, verifyCallback); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 1, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 2, verifyCallback)); taskConfigGenerations.put(CONN1, 1); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 0, verifyCallback)); herder.verifyTaskGenerationAndOwnership(TASK1, 1, verifyCallback); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 2, verifyCallback)); taskConfigGenerations.put(CONN1, 2); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 0, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 1, verifyCallback)); herder.verifyTaskGenerationAndOwnership(TASK1, 2, verifyCallback); taskConfigGenerations.put(CONN1, 3); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 0, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 1, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(TASK1, 2, verifyCallback)); ConnectorTaskId unassignedTask = new ConnectorTaskId(CONN2, 0); taskConfigGenerations.put(unassignedTask.connector(), 1); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(unassignedTask, 0, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(unassignedTask, 1, verifyCallback)); assertThrows(ConnectException.class, () -> herder.verifyTaskGenerationAndOwnership(unassignedTask, 2, verifyCallback)); verify(verifyCallback, times(3)).onCompletion(isNull(), isNull()); }
public static List<String> getFilterNames( Document webXml ) { return getNames( "filter", webXml ); }
@Test public void testGetFilterNames() throws Exception { // Setup fixture. final Document webXml = WebXmlUtils.asDocument( new File( Objects.requireNonNull(WebXmlUtilsTest.class.getResource("/org/jivesoftware/util/test-web.xml")).toURI() ) ); // Execute system under test. final List<String> results = WebXmlUtils.getFilterNames( webXml ); // Verify result. assertNotNull( results ); final Iterator<String> iterator = results.iterator(); // Names should be reported in order. assertEquals( "AuthCheck", iterator.next() ); assertEquals( "PluginFilter", iterator.next() ); assertEquals( "Set Character Encoding", iterator.next() ); assertEquals( "LocaleFilter", iterator.next() ); assertEquals( "sitemesh", iterator.next() ); assertFalse( iterator.hasNext() ); }
@Override public byte[] serialize(final String topicName, final T record) { return serializer.get().serialize(topicName, record); }
@Test public void shouldUseAThreadLocalSerializer() throws InterruptedException { final List<Serializer<GenericRow>> serializers = new LinkedList<>(); final ThreadLocalSerializer<GenericRow> serializer = new ThreadLocalSerializer<>( () -> { final Serializer<GenericRow> local = mock(Serializer.class); serializers.add(local); expect(local.serialize(anyString(), anyObject(GenericRow.class))) .andReturn(new byte[32]) .times(1); replay(local); return serializers.get(serializers.size() - 1); } ); for (int i = 0; i < 3; i++) { final Thread t = new Thread( () -> serializer.serialize("foo", new GenericRow()) ); t.start(); t.join(); assertThat(serializers.size(), equalTo(i + 1)); serializers.forEach(EasyMock::verify); } }
public void addListener(ExtensionLoaderListener<T> listener) { synchronized (this) { if (!listeners.contains(listener)) { this.listeners.add(listener); for (ExtensionClass<T> value : all.values()) { try { listener.onLoad(value); } catch (Exception e) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Error when notify listener of extensible " + interfaceClass + " with alias: " + value.getAlias() + ".", e); } } } } } }
@Test public void testAddListener(){ ExtensionLoader<Filter> extensionLoader = ExtensionLoaderFactory.getExtensionLoader(Filter.class); extensionLoader.loadExtension(DynamicFilter.class); ConcurrentMap<String, ExtensionClass<Filter>> all = extensionLoader.all; String alias = "dynamic0"; Assert.assertTrue(all.containsKey(alias)); List<String> filters = new ArrayList<>(); extensionLoader = ExtensionLoaderFactory.getExtensionLoader(Filter.class); extensionLoader.addListener( new ExtensionLoaderListener<Filter>() { @Override public void onLoad(ExtensionClass<Filter> extensionClass) { filters.add(extensionClass.getAlias()); } }); Assert.assertTrue(filters.contains(alias)); }
@Override protected Mono<Boolean> doMatcher(final ServerWebExchange exchange, final WebFilterChain chain) { String path = exchange.getRequest().getURI().getRawPath(); return Mono.just(paths.contains(path)); }
@Test public void testDoNotMatcher() { ServerWebExchange webExchange = MockServerWebExchange.from(MockServerHttpRequest .post("http://localhost:8080/")); Mono<Boolean> filter = fallbackFilter.doMatcher(webExchange, webFilterChain); StepVerifier.create(filter).expectNext(Boolean.FALSE).verifyComplete(); }
@Override protected void analyzeDependency(final Dependency dependency, final Engine engine) throws AnalysisException { // batch request component-reports for all dependencies synchronized (FETCH_MUTIX) { if (reports == null) { try { requestDelay(); reports = requestReports(engine.getDependencies()); } catch (TransportException ex) { final String message = ex.getMessage(); final boolean warnOnly = getSettings().getBoolean(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, false); this.setEnabled(false); if (StringUtils.endsWith(message, "401")) { LOG.error("Invalid credentials for the OSS Index, disabling the analyzer"); throw new AnalysisException("Invalid credentials provided for OSS Index", ex); } else if (StringUtils.endsWith(message, "403")) { LOG.error("OSS Index access forbidden, disabling the analyzer"); throw new AnalysisException("OSS Index access forbidden", ex); } else if (StringUtils.endsWith(message, "429")) { if (warnOnly) { LOG.warn("OSS Index rate limit exceeded, disabling the analyzer", ex); } else { throw new AnalysisException("OSS Index rate limit exceeded, disabling the analyzer", ex); } } else if (warnOnly) { LOG.warn("Error requesting component reports, disabling the analyzer", ex); } else { LOG.debug("Error requesting component reports, disabling the analyzer", ex); throw new AnalysisException("Failed to request component-reports", ex); } } catch (SocketTimeoutException e) { final boolean warnOnly = getSettings().getBoolean(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, false); this.setEnabled(false); if (warnOnly) { LOG.warn("OSS Index socket timeout, disabling the analyzer", e); } else { LOG.debug("OSS Index socket timeout", e); throw new AnalysisException("Failed to establish socket to OSS Index", e); } } catch (Exception e) { LOG.debug("Error requesting component reports", e); throw new AnalysisException("Failed to request component-reports", e); } } // skip enrichment if we failed to fetch reports if (reports != null) { enrich(dependency); } } }
@Test public void should_analyzeDependency_fail_when_socket_error_from_sonatype() throws Exception { // Given OssIndexAnalyzer analyzer = new OssIndexAnalyzerThrowingSocketTimeout(); getSettings().setBoolean(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, false); analyzer.initialize(getSettings()); Identifier identifier = new PurlIdentifier("maven", "test", "test", "1.0", Confidence.HIGHEST); Dependency dependency = new Dependency(); dependency.addSoftwareIdentifier(identifier); Settings settings = getSettings(); Engine engine = new Engine(settings); engine.setDependencies(Collections.singletonList(dependency)); // When AnalysisException output = new AnalysisException(); try { analyzer.analyzeDependency(dependency, engine); } catch (AnalysisException e) { output = e; } // Then assertEquals("Failed to establish socket to OSS Index", output.getMessage()); analyzer.close(); }
@Override public CompletableFuture<Map<String, BrokerLookupData>> filterAsync(Map<String, BrokerLookupData> brokers, ServiceUnitId serviceUnit, LoadManagerContext context) { int loadBalancerBrokerMaxTopics = context.brokerConfiguration().getLoadBalancerBrokerMaxTopics(); brokers.keySet().removeIf(broker -> { Optional<BrokerLoadData> brokerLoadDataOpt = context.brokerLoadDataStore().get(broker); long topics = brokerLoadDataOpt.map(BrokerLoadData::getTopics).orElse(0L); // TODO: The broker load data might be delayed, so the max topic check might not accurate. return topics >= loadBalancerBrokerMaxTopics; }); return CompletableFuture.completedFuture(brokers); }
@Test public void test() throws IllegalAccessException, BrokerFilterException, ExecutionException, InterruptedException { LoadManagerContext context = getContext(); LoadDataStore<BrokerLoadData> store = context.brokerLoadDataStore(); BrokerLoadData maxTopicLoadData = new BrokerLoadData(); FieldUtils.writeDeclaredField(maxTopicLoadData, "topics", context.brokerConfiguration().getLoadBalancerBrokerMaxTopics(), true); BrokerLoadData exceedMaxTopicLoadData = new BrokerLoadData(); FieldUtils.writeDeclaredField(exceedMaxTopicLoadData, "topics", context.brokerConfiguration().getLoadBalancerBrokerMaxTopics() * 2, true); store.pushAsync("broker1", maxTopicLoadData); store.pushAsync("broker2", new BrokerLoadData()); store.pushAsync("broker3", exceedMaxTopicLoadData); BrokerMaxTopicCountFilter filter = new BrokerMaxTopicCountFilter(); Map<String, BrokerLookupData> originalBrokers = Map.of( "broker1", getLookupData(), "broker2", getLookupData(), "broker3", getLookupData(), "broker4", getLookupData() ); Map<String, BrokerLookupData> result = filter.filterAsync(new HashMap<>(originalBrokers), null, context).get(); assertEquals(result, Map.of( "broker2", getLookupData(), "broker4", getLookupData() )); }
@Override public boolean dropTable(TableIdentifier identifier, boolean purge) { if (!tableExists(identifier)) { return false; } EcsURI tableObjectURI = tableURI(identifier); if (purge) { // if re-use the same instance, current() will throw exception. TableOperations ops = newTableOps(identifier); TableMetadata current = ops.current(); if (current == null) { return false; } CatalogUtil.dropTableData(ops.io(), current); } client.deleteObject(tableObjectURI.bucket(), tableObjectURI.name()); return true; }
@Test public void testRegisterExistingTable() { TableIdentifier identifier = TableIdentifier.of("a", "t1"); ecsCatalog.createTable(identifier, SCHEMA); Table registeringTable = ecsCatalog.loadTable(identifier); TableOperations ops = ((HasTableOperations) registeringTable).operations(); String metadataLocation = ((EcsTableOperations) ops).currentMetadataLocation(); assertThatThrownBy(() -> ecsCatalog.registerTable(identifier, metadataLocation)) .isInstanceOf(AlreadyExistsException.class) .hasMessage("Table already exists: a.t1"); assertThat(ecsCatalog.dropTable(identifier, true)).isTrue(); }
static Set<String> parseStaleDataNodeList(String liveNodeJsonString, final int blockThreshold, final Logger log) throws IOException { final Set<String> dataNodesToReport = new HashSet<>(); JsonFactory fac = JacksonUtil.createBasicJsonFactory(); JsonParser parser = fac.createParser(IOUtils .toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name())); int objectDepth = 0; String currentNodeAddr = null; for (JsonToken tok = parser.nextToken(); tok != null; tok = parser .nextToken()) { if (tok == JsonToken.START_OBJECT) { objectDepth++; } else if (tok == JsonToken.END_OBJECT) { objectDepth--; } else if (tok == JsonToken.FIELD_NAME) { if (objectDepth == 1) { // This is where the Datanode identifiers are stored currentNodeAddr = parser.getCurrentName(); } else if (objectDepth == 2) { if (parser.getCurrentName().equals("numBlocks")) { JsonToken valueToken = parser.nextToken(); if (valueToken != JsonToken.VALUE_NUMBER_INT || currentNodeAddr == null) { throw new IOException(String.format("Malformed LiveNodes JSON; " + "got token = %s; currentNodeAddr = %s: %s", valueToken, currentNodeAddr, liveNodeJsonString)); } int numBlocks = parser.getIntValue(); if (numBlocks < blockThreshold) { log.debug(String.format( "Queueing Datanode <%s> for block report; numBlocks = %d", currentNodeAddr, numBlocks)); dataNodesToReport.add(currentNodeAddr); } else { log.debug(String.format( "Not queueing Datanode <%s> for block report; numBlocks = %d", currentNodeAddr, numBlocks)); } } } } } return dataNodesToReport; }
@Test public void testParseStaleDatanodeListSingleDatanode() throws Exception { // Confirm all types of values can be properly parsed String json = "{" + "\"1.2.3.4:5\": {" + " \"numBlocks\": 5," + " \"fooString\":\"stringValue\"," + " \"fooInteger\": 1," + " \"fooFloat\": 1.0," + " \"fooArray\": []" + "}" + "}"; Set<String> out = DynoInfraUtils.parseStaleDataNodeList(json, 10, LOG); assertEquals(1, out.size()); assertTrue(out.contains("1.2.3.4:5")); }
public static Status unblock( final UnsafeBuffer logMetaDataBuffer, final UnsafeBuffer termBuffer, final int blockedOffset, final int tailOffset, final int termId) { Status status = NO_ACTION; int frameLength = frameLengthVolatile(termBuffer, blockedOffset); if (frameLength < 0) { resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, -frameLength); status = UNBLOCKED; } else if (0 == frameLength) { int currentOffset = blockedOffset + FRAME_ALIGNMENT; while (currentOffset < tailOffset) { frameLength = frameLengthVolatile(termBuffer, currentOffset); if (frameLength != 0) { if (scanBackToConfirmZeroed(termBuffer, currentOffset, blockedOffset)) { final int length = currentOffset - blockedOffset; resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, length); status = UNBLOCKED; } break; } currentOffset += FRAME_ALIGNMENT; } if (currentOffset == termBuffer.capacity()) { if (0 == frameLengthVolatile(termBuffer, blockedOffset)) { final int length = currentOffset - blockedOffset; resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, length); status = UNBLOCKED_TO_END; } } } return status; }
@Test void shouldScanForwardForNextCompleteMessage() { final int messageLength = HEADER_LENGTH * 4; final int termOffset = 0; final int tailOffset = messageLength * 2; when(mockTermBuffer.getIntVolatile(messageLength)).thenReturn(messageLength); assertEquals( UNBLOCKED, TermUnblocker.unblock(mockLogMetaDataBuffer, mockTermBuffer, termOffset, tailOffset, TERM_ID)); final InOrder inOrder = inOrder(mockTermBuffer); inOrder.verify(mockTermBuffer).putShort(typeOffset(termOffset), (short)HDR_TYPE_PAD, LITTLE_ENDIAN); inOrder.verify(mockTermBuffer).putInt(termOffsetOffset(termOffset), termOffset, LITTLE_ENDIAN); inOrder.verify(mockTermBuffer).putIntOrdered(termOffset, messageLength); }
public static String getMigrationsDir( final String configFilePath, final MigrationConfig config ) { final String migrationsDir = config.getString(MigrationConfig.KSQL_MIGRATIONS_DIR_OVERRIDE); if (migrationsDir != null && !migrationsDir.isEmpty()) { return migrationsDir; } else { return getMigrationsDirFromConfigFile(configFilePath); } }
@Test public void shouldOverrideMigrationsDirFromConfig() { // Given: when(config.getString(MigrationConfig.KSQL_MIGRATIONS_DIR_OVERRIDE)).thenReturn(CUSTOM_DIR); // When / Then: assertThat(MigrationsDirectoryUtil.getMigrationsDir(migrationsConfigPath, config), is(CUSTOM_DIR)); }
@Override public String getAELSafeURIString() { throw new UnsupportedOperationException( String.format( "This connection file object does not support this operation: '%s'", this.getOriginalURIString() ) ); }
@Test( expected = UnsupportedOperationException.class ) public void testGetAELSafeURIString() { fileObject.getAELSafeURIString(); }
public static Path copyFile(Resource src, Path target, CopyOption... options) throws IORuntimeException { Assert.notNull(src, "Source is null !"); if(src instanceof FileResource){ return copyFile(((FileResource) src).getFile().toPath(), target, options); } try(InputStream stream = src.getStream()){ return copyFile(stream, target, options); } catch (IOException e) { throw new RuntimeException(e); } }
@Test @Disabled public void copyFileTest(){ PathUtil.copyFile( Paths.get("d:/test/1595232240113.jpg"), Paths.get("d:/test/1595232240113_copy.jpg"), StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING ); }
public static ExtensibleLoadManagerImpl get(LoadManager loadManager) { if (!(loadManager instanceof ExtensibleLoadManagerWrapper loadManagerWrapper)) { throw new IllegalArgumentException("The load manager should be 'ExtensibleLoadManagerWrapper'."); } return loadManagerWrapper.get(); }
@Test(enabled = false) public static void testOptimizeUnloadDisable(TopicDomain topicDomain, String defaultTestNamespace, PulsarAdmin admin, String brokerServiceUrl, PulsarService pulsar1, PulsarService pulsar2) throws Exception { var id = String.format("test-optimize-unload-disable-%s-%s", topicDomain, UUID.randomUUID()); var topic = String.format("%s://%s/%s", topicDomain, defaultTestNamespace, id); var topicName = TopicName.get(topic); pulsar1.getConfig().setLoadBalancerMultiPhaseBundleUnload(false); pulsar2.getConfig().setLoadBalancerMultiPhaseBundleUnload(false); @Cleanup var pulsarClient = pulsarClient(brokerServiceUrl, 0); @Cleanup var producer = pulsarClient.newProducer(Schema.STRING).topic(topic).create(); @Cleanup var consumer = pulsarClient.newConsumer(Schema.STRING).subscriptionName(id).topic(topic).subscribe(); Awaitility.await().until(() -> producer.isConnected() && consumer.isConnected()); var lookup = spyLookupService(pulsarClient); final CountDownLatch cdl = new CountDownLatch(3); NamespaceBundle bundle = getBundleAsync(pulsar1, TopicName.get(topic)).get(); var srcBrokerServiceUrl = admin.lookups().lookupTopic(topic); var dstBroker = srcBrokerServiceUrl.equals(pulsar1.getBrokerServiceUrl()) ? pulsar2 : pulsar1; CompletableFuture<Void> unloadNamespaceBundle = CompletableFuture.runAsync(() -> { try { cdl.await(); admin.namespaces().unloadNamespaceBundle(defaultTestNamespace, bundle.getBundleRange(), dstBroker.getBrokerId()); } catch (InterruptedException | PulsarAdminException e) { fail(); } }); MutableInt sendCount = new MutableInt(); Awaitility.await().atMost(20, TimeUnit.SECONDS).ignoreExceptions().until(() -> { var message = String.format("message-%d", sendCount.getValue()); AtomicBoolean messageSent = new AtomicBoolean(false); while (true) { var recvFuture = consumer.receiveAsync().orTimeout(1000, TimeUnit.MILLISECONDS); if (!messageSent.get()) { producer.sendAsync(message).thenAccept(messageId -> { if (messageId != null) { messageSent.set(true); } }).get(1000, TimeUnit.MILLISECONDS); } if (topicDomain == TopicDomain.non_persistent) { // No need to wait for message receipt, we're only trying to stress the consumer lookup pathway. break; } var msg = recvFuture.get(); if (Objects.equals(msg.getValue(), message)) { break; } } cdl.countDown(); return sendCount.incrementAndGet() == 10; }); assertTrue(producer.isConnected()); assertTrue(consumer.isConnected()); assertTrue(unloadNamespaceBundle.isDone()); verify(lookup, times(2)).getBroker(topicName); }
@Override public Object getDefaultValue() { return defaultValue; }
@Test public void testGetDefaultValue() throws Exception { final ListField list = new ListField("list", "The List", ImmutableList.of("Foo", "Bar", "Baz"), "Hello, this is a list", ConfigurationField.Optional.NOT_OPTIONAL); final Object defaultValue = list.getDefaultValue(); assertThat(defaultValue instanceof List).isTrue(); final List<?> defaultValue1 = (List) defaultValue; assertThat(defaultValue1.size()).isEqualTo(3); assertThat((String) defaultValue1.get(0)).isEqualTo("Foo"); assertThat((String) defaultValue1.get(1)).isEqualTo("Bar"); assertThat((String) defaultValue1.get(2)).isEqualTo("Baz"); }
public List<String> build() { if (columnDefs.isEmpty()) { throw new IllegalStateException("No column has been defined"); } switch (dialect.getId()) { case PostgreSql.ID: return createPostgresQuery(); case Oracle.ID: return createOracleQuery(); default: return createMsSqlAndH2Queries(); } }
@Test public void update_not_nullable_column_on_mssql() { assertThat(createNotNullableBuilder(new MsSql()).build()) .containsOnly("ALTER TABLE issues ALTER COLUMN name NVARCHAR (10) NOT NULL"); }
@Override public final void init(@Nonnull Outbox outbox, @Nonnull Context context) throws Exception { this.outbox = outbox; this.logger = context.logger(); init(context); }
@Test(expected = UnknownHostException.class) public void when_customInitThrows_then_initRethrows() throws Exception { new MockP().setInitError(UnknownHostException::new) .init(mock(Outbox.class), new TestProcessorContext()); }
public String getReplicaInfos() { StringBuilder sb = new StringBuilder(); try (CloseableLock ignored = CloseableLock.lock(this.rwLock.readLock())) { for (Replica replica : replicas) { sb.append(String.format("%d:%d/%d/%d/%d:%s:%s,", replica.getBackendId(), replica.getVersion(), replica.getLastFailedVersion(), replica.getLastSuccessVersion(), replica.getMinReadableVersion(), replica.isBad() ? "BAD" : replica.getState(), getReplicaBackendState(replica.getBackendId()))); } } return sb.toString(); }
@Test public void testGetReplicaInfos() { LocalTablet tablet = new LocalTablet(); Replica replica1 = new Replica(1L, 10001L, 8, -1, 10, 10, ReplicaState.NORMAL, 9, 8); Replica replica2 = new Replica(1L, 10002L, 9, -1, 10, 10, ReplicaState.NORMAL, -1, 9); tablet.addReplica(replica1, false); tablet.addReplica(replica2, false); String infos = tablet.getReplicaInfos(); System.out.println(infos); }
protected abstract FullHttpRequest newHandshakeRequest();
@Test public void testDuplicateWebsocketHandshakeHeaders() { URI uri = URI.create("ws://localhost:9999/foo"); HttpHeaders inputHeaders = new DefaultHttpHeaders(); String bogusSubProtocol = "bogusSubProtocol"; String bogusHeaderValue = "bogusHeaderValue"; // add values for the headers that are reserved for use in the websockets handshake for (CharSequence header : getHandshakeRequiredHeaderNames()) { if (!HttpHeaderNames.HOST.equals(header)) { inputHeaders.add(header, bogusHeaderValue); } } inputHeaders.add(getProtocolHeaderName(), bogusSubProtocol); String realSubProtocol = "realSubProtocol"; WebSocketClientHandshaker handshaker = newHandshaker(uri, realSubProtocol, inputHeaders, false, true); FullHttpRequest request = handshaker.newHandshakeRequest(); HttpHeaders outputHeaders = request.headers(); // the header values passed in originally have been replaced with values generated by the Handshaker for (CharSequence header : getHandshakeRequiredHeaderNames()) { assertEquals(1, outputHeaders.getAll(header).size()); assertNotEquals(bogusHeaderValue, outputHeaders.get(header)); } // the subprotocol header value is that of the subprotocol string passed into the Handshaker assertEquals(1, outputHeaders.getAll(getProtocolHeaderName()).size()); assertEquals(realSubProtocol, outputHeaders.get(getProtocolHeaderName())); request.release(); }
@Bean("Languages") public Languages provide(Optional<List<Language>> languages) { if (languages.isPresent()) { return new Languages(languages.get().toArray(new Language[0])); } else { return new Languages(); } }
@Test public void should_provide_instance_when_languages() { Language A = mock(Language.class); when(A.getKey()).thenReturn("a"); Language B = mock(Language.class); when(B.getKey()).thenReturn("b"); LanguagesProvider provider = new LanguagesProvider(); List<Language> languageList = Arrays.asList(A, B); Languages languages = provider.provide(Optional.of(languageList)); assertThat(languages).isNotNull(); assertThat(languages.all()) .hasSize(2) .contains(A, B); }
public static AWSCloudCredential buildGlueCloudCredential(HiveConf hiveConf) { for (CloudConfigurationProvider factory : cloudConfigurationFactoryChain) { if (factory instanceof AWSCloudConfigurationProvider) { AWSCloudConfigurationProvider provider = ((AWSCloudConfigurationProvider) factory); return provider.buildGlueCloudCredential(hiveConf); } } // Should never reach here. return null; }
@Test public void testGlueCredential() { HiveConf conf = new HiveConf(); conf.set(CloudConfigurationConstants.AWS_GLUE_USE_AWS_SDK_DEFAULT_BEHAVIOR, "true"); AWSCloudCredential cred = CloudConfigurationFactory.buildGlueCloudCredential(conf); Assert.assertNotNull(cred); Assert.assertEquals("AWSCloudCredential{useAWSSDKDefaultBehavior=true, useInstanceProfile=false, " + "accessKey='', secretKey='', sessionToken='', iamRoleArn='', stsRegion='', " + "stsEndpoint='', externalId='', region='us-east-1', endpoint=''}", cred.toCredString()); }
public Collection<ComputeNodeInstance> loadAllComputeNodeInstances() { Collection<ComputeNodeInstance> result = new LinkedList<>(); for (InstanceType each : InstanceType.values()) { result.addAll(loadComputeNodeInstances(each)); } return result; }
@Test void assertLoadAllComputeNodeInstances() { when(repository.getChildrenKeys("/nodes/compute_nodes/online/jdbc")).thenReturn(Collections.singletonList("foo_instance_3307")); when(repository.getChildrenKeys("/nodes/compute_nodes/online/proxy")).thenReturn(Collections.singletonList("foo_instance_3308")); YamlComputeNodeData yamlComputeNodeData0 = new YamlComputeNodeData(); yamlComputeNodeData0.setAttribute("127.0.0.1"); yamlComputeNodeData0.setVersion("foo_version"); when(repository.query("/nodes/compute_nodes/online/jdbc/foo_instance_3307")).thenReturn(YamlEngine.marshal(yamlComputeNodeData0)); YamlComputeNodeData yamlComputeNodeData1 = new YamlComputeNodeData(); yamlComputeNodeData1.setAttribute("127.0.0.1@3308"); yamlComputeNodeData1.setVersion("foo_version"); when(repository.query("/nodes/compute_nodes/online/proxy/foo_instance_3308")).thenReturn(YamlEngine.marshal(yamlComputeNodeData1)); List<ComputeNodeInstance> actual = new ArrayList<>(new ComputeNodePersistService(repository).loadAllComputeNodeInstances()); assertThat(actual.size(), is(2)); assertThat(actual.get(0).getMetaData().getId(), is("foo_instance_3307")); assertThat(actual.get(0).getMetaData().getIp(), is("127.0.0.1")); assertThat(actual.get(1).getMetaData().getId(), is("foo_instance_3308")); assertThat(actual.get(1).getMetaData().getIp(), is("127.0.0.1")); assertThat(actual.get(1).getMetaData().getType(), is(InstanceType.PROXY)); assertThat(((ProxyInstanceMetaData) actual.get(1).getMetaData()).getPort(), is(3308)); }
@VisibleForTesting public SmsChannelDO validateSmsChannel(Long channelId) { SmsChannelDO channelDO = smsChannelService.getSmsChannel(channelId); if (channelDO == null) { throw exception(SMS_CHANNEL_NOT_EXISTS); } if (CommonStatusEnum.isDisable(channelDO.getStatus())) { throw exception(SMS_CHANNEL_DISABLE); } return channelDO; }
@Test public void testValidateSmsChannel_disable() { // 准备参数 Long channelId = randomLongId(); // mock 方法 SmsChannelDO channelDO = randomPojo(SmsChannelDO.class, o -> { o.setId(channelId); o.setStatus(CommonStatusEnum.DISABLE.getStatus()); // 保证 status 禁用,触发失败 }); when(smsChannelService.getSmsChannel(eq(channelId))).thenReturn(channelDO); // 调用,校验异常 assertServiceException(() -> smsTemplateService.validateSmsChannel(channelId), SMS_CHANNEL_DISABLE); }
private Function<KsqlConfig, Kudf> getUdfFactory( final Method method, final UdfDescription udfDescriptionAnnotation, final String functionName, final FunctionInvoker invoker, final String sensorName ) { return ksqlConfig -> { final Object actualUdf = FunctionLoaderUtils.instantiateFunctionInstance( method.getDeclaringClass(), udfDescriptionAnnotation.name()); if (actualUdf instanceof Configurable) { ExtensionSecurityManager.INSTANCE.pushInUdf(); try { ((Configurable) actualUdf) .configure(ksqlConfig.getKsqlFunctionsConfigProps(functionName)); } finally { ExtensionSecurityManager.INSTANCE.popOutUdf(); } } final PluggableUdf theUdf = new PluggableUdf(invoker, actualUdf); return metrics.<Kudf>map(m -> new UdfMetricProducer( m.getSensor(sensorName), theUdf, Time.SYSTEM )).orElse(theUdf); }; }
@Test public void shouldLoadDecimalUdfs() { // Given: final SqlDecimal schema = SqlTypes.decimal(2, 1); // When: final KsqlScalarFunction fun = FUNC_REG.getUdfFactory(FunctionName.of("floor")) .getFunction(ImmutableList.of(SqlArgument.of(schema))); // Then: assertThat(fun.name().text(), equalToIgnoringCase("floor")); }
protected static PrivateKey toPrivateKey(File keyFile, String keyPassword) throws NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeySpecException, InvalidAlgorithmParameterException, KeyException, IOException { return toPrivateKey(keyFile, keyPassword, true); }
@Test public void testPkcs1UnencryptedRsa() throws Exception { PrivateKey key = SslContext.toPrivateKey( new File(getClass().getResource("rsa_pkcs1_unencrypted.key").getFile()), null); assertNotNull(key); }
public void combine(AnalyzerSetting analyzerSetting) { this.analyzer.putAll(analyzerSetting.getAnalyzer()); this.tokenizer.putAll(analyzerSetting.tokenizer); this.filter.putAll(analyzerSetting.filter); this.charFilter.putAll(analyzerSetting.charFilter); }
@Test public void combine() { StorageModuleElasticsearchConfig elasticsearchConfig = new StorageModuleElasticsearchConfig(); AnalyzerSetting oapAnalyzerSetting = gson.fromJson(elasticsearchConfig.getOapAnalyzer(), AnalyzerSetting.class); Assertions.assertEquals(oapAnalyzerSetting, getDefaultOapAnalyzer()); AnalyzerSetting oapLogAnalyzerSetting = gson.fromJson( elasticsearchConfig.getOapLogAnalyzer(), AnalyzerSetting.class); Assertions.assertEquals(oapLogAnalyzerSetting, getDefaultOapLogAnalyzer()); AnalyzerSetting testAnalyzerSetting = gson.fromJson(ANALYZER_JSON, AnalyzerSetting.class); Assertions.assertEquals(testAnalyzerSetting, getTestOapAnalyzerSetting()); oapAnalyzerSetting.combine(oapLogAnalyzerSetting); oapAnalyzerSetting.combine(testAnalyzerSetting); Assertions.assertEquals(oapAnalyzerSetting, getMergedAnalyzerSetting()); }
@PostMapping("edit") public String updateProduct(@ModelAttribute(name = "product", binding = false) Product product, UpdateProductPayload payload, Model model, HttpServletResponse response) { try { this.productsRestClient.updateProduct(product.id(), payload.title(), payload.details()); return "redirect:/catalogue/products/%d".formatted(product.id()); } catch (BadRequestException exception) { response.setStatus(HttpStatus.BAD_REQUEST.value()); model.addAttribute("payload", payload); model.addAttribute("errors", exception.getErrors()); return "catalogue/products/edit"; } }
@Test void updateProduct_RequestIsInvalid_ReturnsProductEditPage() { // given var product = new Product(1, "Товар №1", "Описание товара №1"); var payload = new UpdateProductPayload(" ", null); var model = new ConcurrentModel(); var response = new MockHttpServletResponse(); doThrow(new BadRequestException(List.of("Ошибка 1", "Ошибка 2"))) .when(this.productsRestClient).updateProduct(1, " ", null); // when var result = this.controller.updateProduct(product, payload, model, response); // then assertEquals("catalogue/products/edit", result); assertEquals(payload, model.getAttribute("payload")); assertEquals(List.of("Ошибка 1", "Ошибка 2"), model.getAttribute("errors")); assertEquals(HttpStatus.BAD_REQUEST.value(), response.getStatus()); verify(this.productsRestClient).updateProduct(1, " ", null); verifyNoMoreInteractions(this.productsRestClient); }
public static QueryBuilder namedQuery(final String namedQuery) { return new QueryBuilder() { protected Query makeQueryObject(EntityManager entityManager) { return entityManager.createNamedQuery(namedQuery); } @Override public String toString() { return "Named: " + namedQuery + getParameterDescription(); } }; }
@Test public void testNamedQueryBuilder() { QueryBuilder q = QueryBuilder.namedQuery("step1"); assertNotNull(q); assertEquals("Named: step1", q.toString()); }
@Override public void bind( Map<String, Object> configurationProperties, boolean ignoreUnknownFields, boolean ignoreInvalidFields, Object configurationBean) { Iterable<PropertySource<?>> propertySources = asList(new MapPropertySource("internal", configurationProperties)); // Converts ConfigurationPropertySources Iterable<ConfigurationPropertySource> configurationPropertySources = from(propertySources); // Wrap Bindable from DubboConfig instance Bindable bindable = Bindable.ofInstance(configurationBean); Binder binder = new Binder(configurationPropertySources, new PropertySourcesPlaceholdersResolver(propertySources)); // Get BindHandler BindHandler bindHandler = getBindHandler(ignoreUnknownFields, ignoreInvalidFields); // Bind binder.bind("", bindable, bindHandler); }
@Test void testBinder() { ApplicationConfig applicationConfig = new ApplicationConfig(); Map<String, Object> properties = getSubProperties(environment.getPropertySources(), "dubbo.application"); dubboConfigBinder.bind(properties, true, true, applicationConfig); Assert.assertEquals("hello", applicationConfig.getName()); Assert.assertEquals("world", applicationConfig.getOwner()); RegistryConfig registryConfig = new RegistryConfig(); properties = getSubProperties(environment.getPropertySources(), "dubbo.registry"); dubboConfigBinder.bind(properties, true, true, registryConfig); Assert.assertEquals("10.20.153.17", registryConfig.getAddress()); ProtocolConfig protocolConfig = new ProtocolConfig(); properties = getSubProperties(environment.getPropertySources(), "dubbo.protocol"); dubboConfigBinder.bind(properties, true, true, protocolConfig); Assert.assertEquals(Integer.valueOf(20881), protocolConfig.getPort()); }
public final Span joinSpan(TraceContext context) { if (context == null) throw new NullPointerException("context == null"); if (!supportsJoin) return newChild(context); // set shared flag if not already done int flags = InternalPropagation.instance.flags(context); if (!context.shared()) { flags |= FLAG_SHARED; return toSpan(context, InternalPropagation.instance.withFlags(context, flags)); } else { flags &= ~FLAG_SHARED; return toSpan(InternalPropagation.instance.withFlags(context, flags), context); } }
@Test void joinSpan_notYetSampledIsNotShared_child() { TraceContext context = TraceContext.newBuilder().traceId(1).parentId(2).spanId(3).shared(true).build(); Span span = tracer.joinSpan(context); assertThat(span.context().shared()).isFalse(); }
protected static PKCS8EncodedKeySpec generateKeySpec(char[] password, byte[] key) throws IOException, PKCSException, OperatorCreationException { if (password == null || password.length == 0) { return new PKCS8EncodedKeySpec(key); } final PKCS8EncryptedPrivateKeyInfo privateKeyInfo = new PKCS8EncryptedPrivateKeyInfo(key); final InputDecryptorProvider decProv = new JceOpenSSLPKCS8DecryptorProviderBuilder().setProvider("BC").build(password); PrivateKeyInfo pkInfo = privateKeyInfo.decryptPrivateKeyInfo(decProv); PrivateKey privKey = new JcaPEMKeyConverter().setProvider("BC").getPrivateKey(pkInfo); return new PKCS8EncodedKeySpec(privKey.getEncoded()); }
@Test public void testGenerateKeySpecFromPBE1EncryptedPrivateKey() throws Exception { final URL url = Resources.getResource("org/graylog2/shared/security/tls/key-enc-pbe1.p8"); final byte[] privateKey = PemReader.readPrivateKey(Paths.get(url.toURI())); final PKCS8EncodedKeySpec keySpec = PemKeyStore.generateKeySpec("password".toCharArray(), privateKey); assertThat(keySpec.getFormat()).isEqualTo("PKCS#8"); assertThat(keySpec.getEncoded()).isNotEmpty(); }
public Optional<Integer> findProjectionIndex(final String projectionName) { int result = 1; for (Projection each : projections) { if (projectionName.equalsIgnoreCase(SQLUtils.getExactlyValue(each.getExpression()))) { return Optional.of(result); } result++; } return Optional.empty(); }
@Test void assertFindProjectionIndex() { Projection projection = getColumnProjection(); ProjectionsContext projectionsContext = new ProjectionsContext(0, 0, true, Collections.singleton(projection)); Optional<Integer> actual = projectionsContext.findProjectionIndex(projection.getExpression()); assertTrue(actual.isPresent()); assertThat(actual.get(), is(1)); }
static Result coerceUserList( final Collection<Expression> expressions, final ExpressionTypeManager typeManager ) { return coerceUserList(expressions, typeManager, Collections.emptyMap()); }
@Test public void shouldCoerceStringNumericWithENotationToDecimals() { // Given: final ImmutableList<Expression> expressions = ImmutableList.of( new IntegerLiteral(10), new StringLiteral("1e3") ); // When: final Result result = CoercionUtil.coerceUserList(expressions, typeManager); // Then: assertThat(result.commonType(), is(Optional.of(SqlTypes.decimal(10, 0)))); assertThat(result.expressions(), is(ImmutableList.of( new DecimalLiteral(new BigDecimal("10")), new DecimalLiteral(new BigDecimal("1000")) ))); }
@Override public byte[] serialize() { byte[] payloadData = null; if (this.payload != null) { this.payload.setParent(this); payloadData = this.payload.serialize(); } this.payloadLength = 0; if (payloadData != null) { this.payloadLength = (short) payloadData.length; } final byte[] data = new byte[FIXED_HEADER_LENGTH + payloadLength]; final ByteBuffer bb = ByteBuffer.wrap(data); bb.putInt((this.version & 0xf) << 28 | (this.trafficClass & 0xff) << 20 | this.flowLabel & 0xfffff); bb.putShort(this.payloadLength); bb.put(this.nextHeader); bb.put(this.hopLimit); bb.put(this.sourceAddress, 0, Ip6Address.BYTE_LENGTH); bb.put(this.destinationAddress, 0, Ip6Address.BYTE_LENGTH); if (payloadData != null) { bb.put(payloadData); } return data; }
@Test public void testSerialize() { IPv6 ipv6 = new IPv6(); ipv6.setPayload(udp); ipv6.setVersion((byte) 6); ipv6.setTrafficClass((byte) 0x93); ipv6.setFlowLabel(0x13579); ipv6.setNextHeader(PROTOCOL_UDP); ipv6.setHopLimit((byte) 32); ipv6.setSourceAddress(SOURCE_ADDRESS); ipv6.setDestinationAddress(DESTINATION_ADDRESS); assertArrayEquals(ipv6.serialize(), bytePacket); }
@Override public String builder(final String paramName, final ServerWebExchange exchange) { return HostAddressUtils.acquireHost(exchange); }
@Test public void testBuilderWithAnyParamName() { assertEquals(testhost, hostParameterData.builder(UUIDUtils.getInstance().generateShortUuid(), exchange)); }
@Override public String getResourceInputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; }
@Test public void testGetResourceInputNodeType() throws Exception { assertEquals( DictionaryConst.NODE_TYPE_FILE_FIELD, analyzer.getResourceInputNodeType() ); }
@Override public MailAccountDO getMailAccount(Long id) { return mailAccountMapper.selectById(id); }
@Test public void testGetMailAccount() { // mock 数据 MailAccountDO dbMailAccount = randomPojo(MailAccountDO.class); mailAccountMapper.insert(dbMailAccount);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbMailAccount.getId(); // 调用 MailAccountDO mailAccount = mailAccountService.getMailAccount(id); // 断言 assertPojoEquals(dbMailAccount, mailAccount); }
public static <K, N, V, S extends State> InternalKvState<K, N, ?> createStateAndWrapWithLatencyTrackingIfEnabled( InternalKvState<K, N, ?> kvState, StateDescriptor<S, V> stateDescriptor, LatencyTrackingStateConfig latencyTrackingStateConfig) throws Exception { if (latencyTrackingStateConfig.isEnabled()) { return new LatencyTrackingStateFactory<>( kvState, stateDescriptor, latencyTrackingStateConfig) .createState(); } return kvState; }
@TestTemplate @SuppressWarnings("unchecked") <K, N> void testTrackAggregatingState() throws Exception { InternalAggregatingState<K, N, Long, Long, Long> aggregatingState = mock(InternalAggregatingState.class); AggregatingStateDescriptor<Long, Long, Long> aggregatingStateDescriptor = new AggregatingStateDescriptor<>( "aggregate", new AggregateFunction<Long, Long, Long>() { private static final long serialVersionUID = 1L; @Override public Long createAccumulator() { return 0L; } @Override public Long add(Long value, Long accumulator) { return value + accumulator; } @Override public Long getResult(Long accumulator) { return accumulator; } @Override public Long merge(Long a, Long b) { return a + b; } }, Long.class); InternalKvState<K, N, ?> latencyTrackingState = LatencyTrackingStateFactory.createStateAndWrapWithLatencyTrackingIfEnabled( aggregatingState, aggregatingStateDescriptor, getLatencyTrackingStateConfig()); if (enableLatencyTracking) { assertThat(latencyTrackingState).isInstanceOf(LatencyTrackingAggregatingState.class); } else { assertThat(latencyTrackingState).isEqualTo(aggregatingState); } }
@Override public void run() { doHealthCheck(); }
@Test void testRunHealthyInstanceWithHeartBeat() { injectInstance(true, System.currentTimeMillis()); when(globalConfig.isExpireInstance()).thenReturn(true); beatCheckTask.run(); assertFalse(client.getAllInstancePublishInfo().isEmpty()); assertTrue(client.getInstancePublishInfo(Service.newService(NAMESPACE, GROUP_NAME, SERVICE_NAME)).isHealthy()); }
@Override public Node upload(final Path file, final Local local, final BandwidthThrottle throttle, final StreamListener listener, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final ThreadPool pool = ThreadPoolFactory.get("multipart", concurrency); try { final InputStream in; if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(containerService.getContainer(file))) { in = new SDSTripleCryptEncryptorFeature(session, nodeid).encrypt(file, local.getInputStream(), status); } else { in = local.getInputStream(); } final CreateFileUploadRequest createFileUploadRequest = new CreateFileUploadRequest() .directS3Upload(true) .timestampModification(status.getModified() != null ? new DateTime(status.getModified()) : null) .timestampCreation(status.getCreated() != null ? new DateTime(status.getCreated()) : null) .size(TransferStatus.UNKNOWN_LENGTH == status.getLength() ? null : status.getLength()) .parentId(Long.parseLong(nodeid.getVersionId(file.getParent()))) .name(file.getName()); final CreateFileUploadResponse createFileUploadResponse = new NodesApi(session.getClient()) .createFileUploadChannel(createFileUploadRequest, StringUtils.EMPTY); if(log.isDebugEnabled()) { log.debug(String.format("upload started for %s with response %s", file, createFileUploadResponse)); } final Map<Integer, TransferStatus> etags = new HashMap<>(); final List<PresignedUrl> presignedUrls = this.retrievePresignedUrls(createFileUploadResponse, status); final List<Future<TransferStatus>> parts = new ArrayList<>(); try { final String random = new UUIDRandomStringService().random(); // Full size of file final long size = status.getLength() + status.getOffset(); long offset = 0; long remaining = status.getLength(); for(int partNumber = 1; remaining >= 0; partNumber++) { final long length = Math.min(Math.max((size / (MAXIMUM_UPLOAD_PARTS - 1)), partsize), remaining); final PresignedUrl presignedUrl = presignedUrls.get(partNumber - 1); if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(containerService.getContainer(file))) { final Local temporary = temp.create(String.format("%s-%d", random, partNumber)); if(log.isDebugEnabled()) { log.debug(String.format("Encrypted contents for part %d to %s", partNumber, temporary)); } final FileBuffer buffer = new FileBuffer(temporary); new StreamCopier(status, StreamProgress.noop).withAutoclose(false).withLimit(length) .transfer(in, new BufferOutputStream(buffer)); parts.add(this.submit(pool, file, temporary, buffer, throttle, listener, status, presignedUrl.getUrl(), presignedUrl.getPartNumber(), 0L, length, callback)); } else { parts.add(this.submit(pool, file, local, Buffer.noop, throttle, listener, status, presignedUrl.getUrl(), presignedUrl.getPartNumber(), offset, length, callback)); } remaining -= length; offset += length; if(0L == remaining) { break; } } } finally { in.close(); } Interruptibles.awaitAll(parts) .forEach(part -> etags.put(part.getPart(), part)); final CompleteS3FileUploadRequest completeS3FileUploadRequest = new CompleteS3FileUploadRequest() .keepShareLinks(new HostPreferences(session.getHost()).getBoolean("sds.upload.sharelinks.keep")) .resolutionStrategy(CompleteS3FileUploadRequest.ResolutionStrategyEnum.OVERWRITE); if(status.getFilekey() != null) { final ObjectReader reader = session.getClient().getJSON().getContext(null).readerFor(FileKey.class); final FileKey fileKey = reader.readValue(status.getFilekey().array()); final EncryptedFileKey encryptFileKey = Crypto.encryptFileKey( TripleCryptConverter.toCryptoPlainFileKey(fileKey), TripleCryptConverter.toCryptoUserPublicKey(session.keyPair().getPublicKeyContainer()) ); completeS3FileUploadRequest.setFileKey(TripleCryptConverter.toSwaggerFileKey(encryptFileKey)); } etags.forEach((key, value) -> completeS3FileUploadRequest.addPartsItem( new S3FileUploadPart().partEtag(value.getChecksum().hash).partNumber(key))); if(log.isDebugEnabled()) { log.debug(String.format("Complete file upload with %s for %s", completeS3FileUploadRequest, file)); } new NodesApi(session.getClient()).completeS3FileUpload(completeS3FileUploadRequest, createFileUploadResponse.getUploadId(), StringUtils.EMPTY); // Polling return new SDSUploadService(session, nodeid).await(file, status, createFileUploadResponse.getUploadId()).getNode(); } catch(CryptoSystemException | InvalidFileKeyException | InvalidKeyPairException | UnknownVersionException e) { throw new TripleCryptExceptionMappingService().map("Upload {0} failed", e, file); } catch(ApiException e) { throw new SDSExceptionMappingService(nodeid).map("Upload {0} failed", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Upload {0} failed", e, file); } finally { temp.shutdown(); // Cancel future tasks pool.shutdown(false); } }
@Test public void testUploadMissingTargetDirectory() throws Exception { final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session); final SDSDirectS3UploadFeature feature = new SDSDirectS3UploadFeature(session, nodeid, new SDSDelegatingWriteFeature(session, nodeid, new SDSDirectS3WriteFeature(session, nodeid))); final Path room = new SDSDirectoryFeature(session, nodeid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path directory = new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Path test = new Path(directory, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); new DefaultLocalTouchFeature().touch(local); final TransferStatus status = new TransferStatus(); assertThrows(NotfoundException.class, () -> feature.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), status, new DisabledLoginCallback())); local.delete(); }
public static String[] parseUri(String uri) { return doParseUri(uri, false); }
@Test public void testParseUri() { String[] out1 = CamelURIParser.parseUri("smtp://localhost?username=davsclaus&password=secret"); assertEquals("smtp", out1[0]); assertEquals("localhost", out1[1]); assertEquals("username=davsclaus&password=secret", out1[2]); }
public GaugeProducer(MetricsEndpoint endpoint) { super(endpoint); Gauge<?> gauge = endpoint.getRegistry().getGauges().get(endpoint.getMetricsName()); if (gauge instanceof CamelMetricsGauge) { CamelMetricsGauge camelMetricsGauge = (CamelMetricsGauge) gauge; if (endpoint.getSubject() != null) { camelMetricsGauge.setValue(endpoint.getSubject()); } } else { if (endpoint.getSubject() != null) { endpoint.getRegistry().register(endpoint.getMetricsName(), new CamelMetricsGauge(endpoint.getSubject())); } else { LOG.info("No subject found for Gauge \"{}\". Ignoring...", endpoint.getMetricsName()); } } }
@Test public void testGaugeProducer() { assertThat(producer.getEndpoint().equals(endpoint), is(true)); }
@Override public void configure(ResourceGroup group, SelectionContext<VariableMap> criteria) { Map.Entry<ResourceGroupIdTemplate, ResourceGroupSpec> entry = getMatchingSpec(group, criteria); if (groups.putIfAbsent(group.getId(), group) == null) { // If a new spec replaces the spec returned from getMatchingSpec the group will be reconfigured on the next run of load(). configuredGroups.computeIfAbsent(entry.getKey(), v -> new LinkedList<>()).add(group.getId()); } synchronized (getRootGroup(group.getId())) { configureGroup(group, entry.getValue()); } }
@Test public void testConfiguration() { H2DaoProvider daoProvider = setup("test_configuration"); H2ResourceGroupsDao dao = daoProvider.get(); dao.createResourceGroupsGlobalPropertiesTable(); dao.createResourceGroupsTable(); dao.createSelectorsTable(); dao.insertResourceGroupsGlobalProperties("cpu_quota_period", "1h"); dao.insertResourceGroup(1, "global", "1MB", 1000, 100, 100, "weighted", null, true, "1h", "1d", "1h", "1MB", "1h", 0, null, ENVIRONMENT); dao.insertResourceGroup(2, "sub", "2MB", 4, 3, 3, null, 5, null, null, null, null, null, null, 0, 1L, ENVIRONMENT); dao.insertSelector(2, 1, null, null, null, null, null, null); DbManagerSpecProvider dbManagerSpecProvider = new DbManagerSpecProvider(daoProvider.get(), ENVIRONMENT, new ReloadingResourceGroupConfig()); ReloadingResourceGroupConfigurationManager manager = new ReloadingResourceGroupConfigurationManager((poolId, listener) -> {}, new ReloadingResourceGroupConfig(), dbManagerSpecProvider); AtomicBoolean exported = new AtomicBoolean(); InternalResourceGroup global = new InternalResourceGroup.RootInternalResourceGroup("global", (group, export) -> exported.set(export), directExecutor(), ignored -> Optional.empty(), rg -> false, new InMemoryNodeManager()); manager.configure(global, new SelectionContext<>(global.getId(), new VariableMap(ImmutableMap.of("USER", "user")))); assertEqualsResourceGroup(global, "1MB", 1000, 100, 100, WEIGHTED, DEFAULT_WEIGHT, true, new Duration(1, HOURS), new Duration(1, DAYS), new ResourceGroupQueryLimits(Optional.of(new Duration(1, HOURS)), Optional.of(new DataSize(1, MEGABYTE)), Optional.of(new Duration(1, HOURS)))); exported.set(false); InternalResourceGroup sub = global.getOrCreateSubGroup("sub", true); manager.configure(sub, new SelectionContext<>(sub.getId(), new VariableMap(ImmutableMap.of("USER", "user")))); assertEqualsResourceGroup(sub, "2MB", 4, 3, 3, FAIR, 5, false, new Duration(Long.MAX_VALUE, MILLISECONDS), new Duration(Long.MAX_VALUE, MILLISECONDS), NO_LIMITS); }
public HsDataView registerNewConsumer( int subpartitionId, HsConsumerId consumerId, HsSubpartitionConsumerInternalOperations operation) throws IOException { synchronized (lock) { checkState(!isReleased, "HsFileDataManager is already released."); lazyInitialize(); HsSubpartitionFileReader subpartitionReader = fileReaderFactory.createFileReader( subpartitionId, consumerId, dataFileChannel, operation, dataIndex, hybridShuffleConfiguration.getMaxBuffersReadAhead(), this::releaseSubpartitionReader, headerBuf); allReaders.add(subpartitionReader); mayTriggerReading(); return subpartitionReader; } }
@Test void testRunReleaseUnusedBuffers() throws Exception { TestingHsSubpartitionFileReader reader = new TestingHsSubpartitionFileReader(); CompletableFuture<Void> prepareForSchedulingFinished = new CompletableFuture<>(); reader.setPrepareForSchedulingRunnable(() -> prepareForSchedulingFinished.complete(null)); reader.setReadBuffersConsumer( (requestedBuffers, readBuffers) -> { assertThat(prepareForSchedulingFinished).isCompleted(); assertThat(requestedBuffers).hasSize(BUFFER_POOL_SIZE); assertThat(bufferPool.getAvailableBuffers()).isEqualTo(0); // read one buffer, return another buffer to data manager. readBuffers.add(requestedBuffers.poll()); }); factory.allReaders.add(reader); fileDataManager.registerNewConsumer(0, DEFAULT, subpartitionViewOperation); ioExecutor.trigger(); // not used buffer should be recycled. assertThat(bufferPool.getAvailableBuffers()).isEqualTo(1); }
public static Builder forPage(int page) { return new Builder(page); }
@Test void andSize_fails_with_IAE_if_size_is_0() { Pagination.Builder builder = forPage(1); assertThatThrownBy(() -> builder.andSize(0)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("page size must be >= 1"); }
@Override public KTable<K, V> reduce(final Reducer<V> adder, final Reducer<V> subtractor, final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) { return reduce(adder, subtractor, NamedInternal.empty(), materialized); }
@Test public void shouldThrowNullPointerOnReduceWhenAdderIsNull() { assertThrows(NullPointerException.class, () -> groupedTable.reduce( null, MockReducer.STRING_REMOVER, Materialized.as("store"))); }
public static <T> T checkNotNull(final T obj) { return checkNotNull(obj, VALIDATE_IS_NOT_NULL_EX_MESSAGE); }
@Test public void testCheckNotNullSuccess() { Preconditions.checkNotNull(NON_NULL_STRING); // null supplier Preconditions.checkNotNull(NON_NULL_STRING, null); // ill-formated string supplier Preconditions.checkNotNull(NON_NULL_STRING, ()-> String.format("%d", NON_INT_STRING)); // null pattern to string formatter Preconditions.checkNotNull(NON_NULL_STRING, NULL_FORMATTER, null, 1); // null arguments to string formatter Preconditions.checkNotNull(NON_NULL_STRING, EXPECTED_ERROR_MSG_ARGS, null, null); // illegal format exception Preconditions.checkNotNull(NON_NULL_STRING, "message %d %d", NON_INT_STRING, 1); // insufficient arguments Preconditions.checkNotNull(NON_NULL_STRING, EXPECTED_ERROR_MSG_ARGS, NON_INT_STRING); // null format in string supplier Preconditions.checkNotNull(NON_NULL_STRING, () -> String.format(NULL_FORMATTER, NON_INT_STRING)); }
@Override public <T> Serde<T> createSerde( final Schema schema, final KsqlConfig ksqlConfig, final Supplier<SchemaRegistryClient> srFactory, final Class<T> targetType, final boolean isKey ) { validateSchema(schema); final Optional<Schema> physicalSchema; if (useSchemaRegistryFormat) { physicalSchema = properties.getSchemaId().isPresent() ? Optional.of( SerdeUtils.getAndTranslateSchemaById(srFactory, properties.getSchemaId() .get(), new JsonSchemaTranslator())) : Optional.empty(); } else { physicalSchema = Optional.empty(); } final Converter converter = useSchemaRegistryFormat ? getSchemaRegistryConverter(srFactory.get(), ksqlConfig, properties.getSchemaId(), isKey) : getConverter(); // The translators are used in the serializer & deserializzer only for JSON_SR formats final ConnectDataTranslator dataTranslator = physicalSchema.isPresent() ? new ConnectSRSchemaDataTranslator(physicalSchema.get()) : new ConnectDataTranslator(schema); final Supplier<Serializer<T>> serializer = () -> createSerializer( targetType, dataTranslator, converter ); final Deserializer<T> deserializer = createDeserializer( ksqlConfig, schema, targetType, dataTranslator, converter ); // Sanity check: serializer.get(); return Serdes.serdeFrom( new ThreadLocalSerializer<>(serializer), deserializer ); }
@Test public void shouldThrowOnMapWithNoneStringKeys() { // Given: final ConnectSchema schemaOfInvalidMap = (ConnectSchema) SchemaBuilder .map(Schema.OPTIONAL_BOOLEAN_SCHEMA, Schema.OPTIONAL_STRING_SCHEMA) .build(); // When: final Exception e = assertThrows( KsqlException.class, () -> jsonFactory.createSerde(schemaOfInvalidMap, config, srFactory, String.class, false) ); // Then: assertThat(e.getMessage(), containsString( "JSON only supports MAP types with STRING keys")); }
@Override public String name() { return internal.name(); }
@Test public void shouldDelegateName() { when(inner.name()).thenReturn(STORE_NAME); assertThat(store.name(), is(STORE_NAME)); }
@Override public Collection<DatabasePacket> execute() throws SQLException { switch (packet.getType()) { case PREPARED_STATEMENT: connectionSession.getServerPreparedStatementRegistry().removePreparedStatement(packet.getName()); break; case PORTAL: portalContext.close(packet.getName()); break; default: throw new UnsupportedSQLOperationException(packet.getType().name()); } return Collections.singleton(new PostgreSQLCloseCompletePacket()); }
@Test void assertExecuteClosePreparedStatement() throws SQLException { when(connectionSession.getServerPreparedStatementRegistry()).thenReturn(new ServerPreparedStatementRegistry()); when(packet.getType()).thenReturn(PostgreSQLComClosePacket.Type.PREPARED_STATEMENT); when(packet.getName()).thenReturn("S_1"); PostgreSQLComCloseExecutor closeExecutor = new PostgreSQLComCloseExecutor(portalContext, packet, connectionSession); Collection<DatabasePacket> actual = closeExecutor.execute(); assertThat(actual.size(), is(1)); assertThat(actual.iterator().next(), is(instanceOf(PostgreSQLCloseCompletePacket.class))); }
public List<IntermediateRecord> trimInSegmentResults(GroupKeyGenerator groupKeyGenerator, GroupByResultHolder[] groupByResultHolders, int size) { // Should not reach here when numGroups <= heap size because there is no need to create a heap assert groupKeyGenerator.getNumKeys() > size; Iterator<GroupKeyGenerator.GroupKey> groupKeyIterator = groupKeyGenerator.getGroupKeys(); Comparator<IntermediateRecord> comparator = _intermediateRecordComparator.reversed(); // Initialize a heap with the first 'size' groups IntermediateRecord[] heap = new IntermediateRecord[size]; for (int i = 0; i < size; i++) { heap[i] = getIntermediateRecord(groupKeyIterator.next(), groupByResultHolders); } makeHeap(heap, size, comparator); // Keep updating the heap with the remaining groups while (groupKeyIterator.hasNext()) { IntermediateRecord intermediateRecord = getIntermediateRecord(groupKeyIterator.next(), groupByResultHolders); if (comparator.compare(intermediateRecord, heap[0]) > 0) { heap[0] = intermediateRecord; downHeap(heap, size, 0, comparator); } } return Arrays.asList(heap); }
@Test public void testInSegmentTrim() { TableResizer tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "d3 DESC")); List<IntermediateRecord> results = tableResizer.trimInSegmentResults(_groupKeyGenerator, _groupByResultHolders, TRIM_TO_SIZE); assertEquals(results.size(), TRIM_TO_SIZE); // _records[4], _records[3], _records[2] assertEquals(results.get(0)._record, _records.get(2)); if (results.get(1)._record.equals(_records.get(3))) { assertEquals(results.get(2)._record, _records.get(4)); } else { assertEquals(results.get(1)._record, _records.get(4)); assertEquals(results.get(2)._record, _records.get(3)); } tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext( QUERY_PREFIX + "SUM(m1) DESC, max(m2) DESC, DISTINCTCOUNT(m3) DESC")); results = tableResizer.trimInSegmentResults(_groupKeyGenerator, _groupByResultHolders, TRIM_TO_SIZE); assertEquals(results.size(), TRIM_TO_SIZE); // _records[2], _records[3], _records[1] assertEquals(results.get(0)._record, _records.get(1)); if (results.get(1)._record.equals(_records.get(3))) { assertEquals(results.get(2)._record, _records.get(2)); } else { assertEquals(results.get(1)._record, _records.get(2)); assertEquals(results.get(2)._record, _records.get(3)); } tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "DISTINCTCOUNT(m3) DESC, AVG(m4) ASC")); results = tableResizer.trimInSegmentResults(_groupKeyGenerator, _groupByResultHolders, TRIM_TO_SIZE); assertEquals(results.size(), TRIM_TO_SIZE); // _records[4], _records[3], _records[1] assertEquals(results.get(0)._record, _records.get(1)); if (results.get(1)._record.equals(_records.get(3))) { assertEquals(results.get(2)._record, _records.get(4)); } else { assertEquals(results.get(1)._record, _records.get(4)); assertEquals(results.get(2)._record, _records.get(3)); } }
@Override public <T> List<ExtensionWrapper<T>> find(Class<T> type) { log.debug("Finding extensions of extension point '{}'", type.getName()); Map<String, Set<String>> entries = getEntries(); List<ExtensionWrapper<T>> result = new ArrayList<>(); // add extensions found in classpath and plugins for (String pluginId : entries.keySet()) { // classpath's extensions <=> pluginId = null List<ExtensionWrapper<T>> pluginExtensions = find(type, pluginId); result.addAll(pluginExtensions); } if (result.isEmpty()) { log.debug("No extensions found for extension point '{}'", type.getName()); } else { log.debug("Found {} extensions for extension point '{}'", result.size(), type.getName()); } // sort by "ordinal" property Collections.sort(result); return result; }
@Test public void testFindFromPlugin() { ExtensionFinder instance = new AbstractExtensionFinder(pluginManager) { @Override public Map<String, Set<String>> readPluginsStorages() { Map<String, Set<String>> entries = new LinkedHashMap<>(); Set<String> bucket = new HashSet<>(); bucket.add("org.pf4j.test.TestExtension"); entries.put("plugin1", bucket); bucket = new HashSet<>(); bucket.add("org.pf4j.test.TestExtension"); entries.put("plugin2", bucket); return entries; } @Override public Map<String, Set<String>> readClasspathStorages() { return Collections.emptyMap(); } }; List<ExtensionWrapper<TestExtensionPoint>> list = instance.find(TestExtensionPoint.class); assertEquals(1, list.size()); list = instance.find(TestExtensionPoint.class, "plugin1"); assertEquals(1, list.size()); list = instance.find(TestExtensionPoint.class, "plugin2"); // "0" because the status of "plugin2" is STOPPED => no extensions assertEquals(0, list.size()); }
public static Map<String, String> getMaskedConnectConfig(final Map<String, String> config) { return config.entrySet().stream().collect(Collectors.toMap(Entry::getKey, e -> { if (ALLOWED_KEYS.contains(e.getKey())) { return e.getValue(); } return MASKED_STRING; })); }
@Test public void shouldMaskConfigMap() { // Given final ImmutableMap<String, String> config = ImmutableMap.of( "connector.class", "someclass", "model", "somemode", "key", "somekey" ); // When final Map<String, String> maskedConfig = QueryMask.getMaskedConnectConfig(config); // Then final ImmutableMap<String, String> expectedConfig = ImmutableMap.of( "connector.class", "someclass", "model", "'[string]'", "key", "'[string]'" ); assertThat(maskedConfig, is(expectedConfig)); }
public static <T> Collection<T> nullToEmpty(Collection<T> collection) { return collection == null ? Collections.emptyList() : collection; }
@Test public void testNullToEmpty_whenNotNull() { List<Integer> result = asList(1, 2, 3, 4, 5); assertEquals(result, nullToEmpty(result)); }
@Override public double read() { return gaugeSource.read(); }
@Test public void whenNotVisitedWithCachedMetricSourceReadsDefault() { DoubleGaugeImplTest.SomeObject someObject = new DoubleGaugeImplTest.SomeObject(); someObject.doubleField = 42.42D; metricsRegistry.registerDynamicMetricsProvider(someObject); DoubleGauge doubleGauge = metricsRegistry.newDoubleGauge("foo.doubleField"); // needed to collect dynamic metrics and update the gauge created from them metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(42.42D, doubleGauge.read(), 10E-6); // clears the cached metric source metricsRegistry.deregisterDynamicMetricsProvider(someObject); metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(DoubleGaugeImpl.DEFAULT_VALUE, doubleGauge.read(), 10E-6); }
public Result resolve(List<PluginDescriptor> plugins) { // create graphs dependenciesGraph = new DirectedGraph<>(); dependentsGraph = new DirectedGraph<>(); // populate graphs Map<String, PluginDescriptor> pluginByIds = new HashMap<>(); for (PluginDescriptor plugin : plugins) { addPlugin(plugin); pluginByIds.put(plugin.getPluginId(), plugin); } log.debug("Graph: {}", dependenciesGraph); // get a sorted list of dependencies List<String> sortedPlugins = dependenciesGraph.reverseTopologicalSort(); log.debug("Plugins order: {}", sortedPlugins); // create the result object Result result = new Result(sortedPlugins); resolved = true; if (sortedPlugins != null) { // no cyclic dependency // detect not found dependencies for (String pluginId : sortedPlugins) { if (!pluginByIds.containsKey(pluginId)) { result.addNotFoundDependency(pluginId); } } } // check dependencies versions for (PluginDescriptor plugin : plugins) { String pluginId = plugin.getPluginId(); String existingVersion = plugin.getVersion(); List<String> dependents = getDependents(pluginId); while (!dependents.isEmpty()) { String dependentId = dependents.remove(0); PluginDescriptor dependent = pluginByIds.get(dependentId); String requiredVersion = getDependencyVersionSupport(dependent, pluginId); boolean ok = checkDependencyVersion(requiredVersion, existingVersion); if (!ok) { result.addWrongDependencyVersion(new WrongDependencyVersion(pluginId, dependentId, existingVersion, requiredVersion)); } } } return result; }
@Test void goodDependencyVersion() { PluginDescriptor pd1 = new DefaultPluginDescriptor() .setPluginId("p1") .setDependencies("p2@2.0.0"); PluginDescriptor pd2 = new DefaultPluginDescriptor() .setPluginId("p2") .setPluginVersion("2.0.0"); List<PluginDescriptor> plugins = new ArrayList<>(); plugins.add(pd1); plugins.add(pd2); DependencyResolver.Result result = resolver.resolve(plugins); assertTrue(result.getWrongVersionDependencies().isEmpty()); }
public static IntrinsicMapTaskExecutor withSharedCounterSet( List<Operation> operations, CounterSet counters, ExecutionStateTracker executionStateTracker) { return new IntrinsicMapTaskExecutor(operations, counters, executionStateTracker); }
@Test public void testExceptionInAbortSuppressed() throws Exception { Operation o1 = Mockito.mock(Operation.class); Operation o2 = Mockito.mock(Operation.class); Operation o3 = Mockito.mock(Operation.class); Operation o4 = Mockito.mock(Operation.class); Mockito.doThrow(new Exception("in finish")).when(o2).finish(); Mockito.doThrow(new Exception("suppressed in abort")).when(o3).abort(); ExecutionStateTracker stateTracker = ExecutionStateTracker.newForTest(); try (IntrinsicMapTaskExecutor executor = IntrinsicMapTaskExecutor.withSharedCounterSet( Arrays.<Operation>asList(o1, o2, o3, o4), counterSet, stateTracker)) { executor.execute(); fail("Should have thrown"); } catch (Exception e) { InOrder inOrder = Mockito.inOrder(o1, o2, o3, o4); inOrder.verify(o4).start(); inOrder.verify(o3).start(); inOrder.verify(o2).start(); inOrder.verify(o1).start(); inOrder.verify(o1).finish(); inOrder.verify(o2).finish(); // this fails // Order of abort doesn't matter Mockito.verify(o1).abort(); Mockito.verify(o2).abort(); Mockito.verify(o3).abort(); // will throw an exception, but we shouldn't fail Mockito.verify(o4).abort(); Mockito.verifyNoMoreInteractions(o1, o2, o3, o4); // Make sure the failure while aborting shows up as a suppressed error assertThat(e.getMessage(), equalTo("in finish")); assertThat(e.getSuppressed(), arrayWithSize(1)); assertThat(e.getSuppressed()[0].getMessage(), equalTo("suppressed in abort")); } }
public boolean matchNotification(StageConfigIdentifier stageIdentifier, StageEvent event, MaterialRevisions materialRevisions) { if (!shouldSendEmailToMe()) { return false; } for (NotificationFilter filter : notificationFilters) { if (filter.matchStage(stageIdentifier, event)) { if (filter.isAppliedOnAllCheckins() || matchModification(materialRevisions)) { return true; } } } return false; }
@Test void shouldReturnFalseWhenEmailIsEmpty() { assertThat(new User("UserName", new String[]{"README"}, null, true).matchNotification(null, StageEvent.All, null)).isFalse(); assertThat(new User("UserName", new String[]{"README"}, "", true).matchNotification(null, StageEvent.All, null)).isFalse(); }
@Override @SuppressWarnings("unchecked") public <T> T get(final PluginConfigSpec<T> configSpec) { if (rawSettings.containsKey(configSpec.name())) { Object o = rawSettings.get(configSpec.name()); if (configSpec.type().isAssignableFrom(o.getClass())) { return (T) o; } else if (configSpec.type() == Double.class && o.getClass() == Long.class) { return configSpec.type().cast(((Long)o).doubleValue()); } else if (configSpec.type() == Boolean.class && o instanceof String) { return configSpec.type().cast(Boolean.parseBoolean((String) o)); } else if (configSpec.type() == Codec.class && o instanceof String && pluginFactory != null) { Codec codec = pluginFactory.buildDefaultCodec((String) o); return configSpec.type().cast(codec); } else if (configSpec.type() == Codec.class && o instanceof RubyObject && RubyCodecDelegator.isRubyCodecSubclass((RubyObject) o)) { Codec codec = pluginFactory.buildRubyCodecWrapper((RubyObject) o); return configSpec.type().cast(codec); } else if (configSpec.type() == URI.class && o instanceof String) { try { URI uri = new URI((String) o); return configSpec.type().cast(uri); } catch (URISyntaxException ex) { throw new IllegalStateException( String.format("Invalid URI specified for '%s'", configSpec.name())); } } else if (configSpec.type() == Password.class && o instanceof String) { Password p = new Password((String) o); return configSpec.type().cast(p); } else { throw new IllegalStateException( String.format("Setting value for '%s' of type '%s' incompatible with defined type of '%s'", configSpec.name(), o.getClass(), configSpec.type())); } } else if (configSpec.type() == Codec.class && configSpec.getRawDefaultValue() != null && pluginFactory != null) { Codec codec = pluginFactory.buildDefaultCodec(configSpec.getRawDefaultValue()); return configSpec.type().cast(codec); } else if (configSpec.type() == URI.class && configSpec.getRawDefaultValue() != null) { try { URI uri = new URI(configSpec.getRawDefaultValue()); return configSpec.type().cast(uri); } catch (URISyntaxException ex) { throw new IllegalStateException( String.format("Invalid default URI specified for '%s'", configSpec.name())); } } else if (configSpec.type() == Password.class && configSpec.getRawDefaultValue() != null) { Password p = new Password(configSpec.getRawDefaultValue()); return configSpec.type().cast(p); } else { return configSpec.defaultValue(); } }
@Test public void testDefaultCodec() { PluginConfigSpec<Codec> codecConfig = PluginConfigSpec.codecSetting("codec", "java-line"); Configuration config = new ConfigurationImpl(Collections.emptyMap(), new TestPluginFactory()); Codec codec = config.get(codecConfig); Assert.assertTrue(codec instanceof Line); }
@Override public Path copy(final Path source, final Path copy, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { final String target = new DefaultUrlProvider(session.getHost()).toUrl(copy).find(DescriptiveUrl.Type.provider).getUrl(); if(session.getFeature(Lock.class) != null && status.getLockId() != null) { // Indicate that the client has knowledge of that state token session.getClient().copy(new DAVPathEncoder().encode(source), target, status.isExists(), Collections.singletonMap(HttpHeaders.IF, String.format("(<%s>)", status.getLockId()))); } else { session.getClient().copy(new DAVPathEncoder().encode(source), target, status.isExists()); } listener.sent(status.getLength()); return copy.withAttributes(source.attributes()); } catch(SardineException e) { throw new DAVExceptionMappingService().map("Cannot copy {0}", e, source); } catch(IOException e) { throw new HttpExceptionMappingService().map(e, source); } }
@Test public void testCopyToExistingFile() throws Exception { final Path folder = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); new DAVDirectoryFeature(session).mkdir(folder, new TransferStatus()); final Path test = new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new DAVTouchFeature(session).touch(test, new TransferStatus()); final Path copy = new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new DAVTouchFeature(session).touch(copy, new TransferStatus()); assertThrows(ConflictException.class, () -> new DAVCopyFeature(session).copy(test, copy, new TransferStatus().exists(false), new DisabledConnectionCallback(), new DisabledStreamListener())); new DAVCopyFeature(session).copy(test, copy, new TransferStatus().exists(true), new DisabledConnectionCallback(), new DisabledStreamListener()); final Find find = new DefaultFindFeature(session); assertTrue(find.find(test)); assertTrue(find.find(copy)); new DAVDeleteFeature(session).delete(Arrays.asList(test, copy), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public JobDetails postProcess(JobDetails jobDetails) { if (isNotNullOrEmpty(substringBetween(jobDetails.getClassName(), "$$", "$$"))) { return new JobDetails( substringBefore(jobDetails.getClassName(), "$$"), jobDetails.getStaticFieldName(), jobDetails.getMethodName(), jobDetails.getJobParameters() ); } return jobDetails; }
@Test void postProcessWithCGLibReturnsUpdatedJobDetails() { // GIVEN final JobDetails jobDetails = defaultJobDetails().withClassName(TestService.class.getName() + "$$EnhancerByCGLIB$$6aee664d").build(); // WHEN final JobDetails result = cgLibPostProcessor.postProcess(jobDetails); // THEN assertThat(result) .isNotSameAs(jobDetails) .hasClass(TestService.class); }
public static List<CharSequence> unescapeCsvFields(CharSequence value) { List<CharSequence> unescaped = new ArrayList<CharSequence>(2); StringBuilder current = InternalThreadLocalMap.get().stringBuilder(); boolean quoted = false; int last = value.length() - 1; for (int i = 0; i <= last; i++) { char c = value.charAt(i); if (quoted) { switch (c) { case DOUBLE_QUOTE: if (i == last) { // Add the last field and return unescaped.add(current.toString()); return unescaped; } char next = value.charAt(++i); if (next == DOUBLE_QUOTE) { // 2 double-quotes should be unescaped to one current.append(DOUBLE_QUOTE); break; } if (next == COMMA) { // This is the end of a field. Let's start to parse the next field. quoted = false; unescaped.add(current.toString()); current.setLength(0); break; } // double-quote followed by other character is invalid throw newInvalidEscapedCsvFieldException(value, i - 1); default: current.append(c); } } else { switch (c) { case COMMA: // Start to parse the next field unescaped.add(current.toString()); current.setLength(0); break; case DOUBLE_QUOTE: if (current.length() == 0) { quoted = true; break; } // double-quote appears without being enclosed with double-quotes // fall through case LINE_FEED: // fall through case CARRIAGE_RETURN: // special characters appears without being enclosed with double-quotes throw newInvalidEscapedCsvFieldException(value, i); default: current.append(c); } } } if (quoted) { throw newInvalidEscapedCsvFieldException(value, last); } unescaped.add(current.toString()); return unescaped; }
@Test public void testUnescapeCsvFields() { assertEquals(Collections.singletonList(""), unescapeCsvFields("")); assertEquals(Arrays.asList("", ""), unescapeCsvFields(",")); assertEquals(Arrays.asList("a", ""), unescapeCsvFields("a,")); assertEquals(Arrays.asList("", "a"), unescapeCsvFields(",a")); assertEquals(Collections.singletonList("\""), unescapeCsvFields("\"\"\"\"")); assertEquals(Arrays.asList("\"", "\""), unescapeCsvFields("\"\"\"\",\"\"\"\"")); assertEquals(Collections.singletonList("netty"), unescapeCsvFields("netty")); assertEquals(Arrays.asList("hello", "netty"), unescapeCsvFields("hello,netty")); assertEquals(Collections.singletonList("hello,netty"), unescapeCsvFields("\"hello,netty\"")); assertEquals(Arrays.asList("hello", "netty"), unescapeCsvFields("\"hello\",\"netty\"")); assertEquals(Arrays.asList("a\"b", "c\"d"), unescapeCsvFields("\"a\"\"b\",\"c\"\"d\"")); assertEquals(Arrays.asList("a\rb", "c\nd"), unescapeCsvFields("\"a\rb\",\"c\nd\"")); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test public void testNotAndParenOr() { final Predicate parsed = PredicateExpressionParser.parse("!com.linkedin.data.it.AlwaysTruePredicate & !(com.linkedin.data.it.AlwaysTruePredicate | com.linkedin.data.it.AlwaysFalsePredicate)"); Assert.assertEquals(parsed.getClass(), AndPredicate.class); final List<Predicate> andChildren = ((AndPredicate) parsed).getChildPredicates(); Assert.assertEquals(andChildren.get(0).getClass(), NotPredicate.class); Assert.assertEquals(andChildren.get(1).getClass(), NotPredicate.class); final Predicate notChild1 = ((NotPredicate) andChildren.get(0)).getChildPredicate(); Assert.assertEquals(notChild1.getClass(), AlwaysTruePredicate.class); final Predicate notChild2 = ((NotPredicate) andChildren.get(1)).getChildPredicate(); Assert.assertEquals(notChild2.getClass(), OrPredicate.class); final List<Predicate> orChildren = ((OrPredicate) notChild2).getChildPredicates(); Assert.assertEquals(orChildren.get(0).getClass(), AlwaysTruePredicate.class); Assert.assertEquals(orChildren.get(1).getClass(), AlwaysFalsePredicate.class); }
public boolean eval(StructLike data) { return new EvalVisitor().eval(data); }
@Test public void testNot() { Evaluator evaluator = new Evaluator(STRUCT, not(equal("x", 7))); assertThat(evaluator.eval(TestHelpers.Row.of(7))).as("not(7 == 7) => false").isFalse(); assertThat(evaluator.eval(TestHelpers.Row.of(8))).as("not(8 == 7) => false").isTrue(); Evaluator structEvaluator = new Evaluator(STRUCT, not(equal("s1.s2.s3.s4.i", 7))); assertThat( structEvaluator.eval( TestHelpers.Row.of( 7, null, null, TestHelpers.Row.of( TestHelpers.Row.of(TestHelpers.Row.of(TestHelpers.Row.of(7))))))) .as("not(7 == 7) => false") .isFalse(); assertThat( structEvaluator.eval( TestHelpers.Row.of( 8, null, null, TestHelpers.Row.of( TestHelpers.Row.of(TestHelpers.Row.of(TestHelpers.Row.of(8))))))) .as("not(8 == 7) => false") .isTrue(); }