code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht; import java.nio.ByteBuffer; import java.util.List; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.GridCacheMessage; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode; import org.jetbrains.annotations.NotNull; /** * Affinity assignment response. */ public class GridDhtAffinityAssignmentResponse extends GridCacheMessage { /** */ private static final long serialVersionUID = 0L; /** Topology version. */ private AffinityTopologyVersion topVer; /** Affinity assignment. */ @GridDirectTransient @GridToStringInclude private List<List<ClusterNode>> affAssignment; /** Affinity assignment bytes. */ private byte[] affAssignmentBytes; /** * Empty constructor. */ public GridDhtAffinityAssignmentResponse() { // No-op. } /** * @param cacheId Cache ID. * @param topVer Topology version. * @param affAssignment Affinity assignment. */ public GridDhtAffinityAssignmentResponse(int cacheId, @NotNull AffinityTopologyVersion topVer, List<List<ClusterNode>> affAssignment) { this.cacheId = cacheId; this.topVer = topVer; this.affAssignment = affAssignment; } /** {@inheritDoc} */ @Override public boolean partitionExchangeMessage() { return true; } /** * @return Topology version. */ @Override public AffinityTopologyVersion topologyVersion() { return topVer; } /** * @return Affinity assignment. */ public List<List<ClusterNode>> affinityAssignment() { return affAssignment; } /** {@inheritDoc} */ @Override public byte directType() { return 29; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 5; } /** * @param ctx Context. */ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); if (affAssignment != null) affAssignmentBytes = ctx.marshaller().marshal(affAssignment); } /** {@inheritDoc} */ @SuppressWarnings("ForLoopReplaceableByForEach") @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); if (affAssignmentBytes != null) { affAssignment = ctx.marshaller().unmarshal(affAssignmentBytes, ldr); // TODO IGNITE-10: setting 'local' for nodes not needed when IGNITE-10 is implemented. int assignments = affAssignment.size(); for (int n = 0; n < assignments; n++) { List<ClusterNode> nodes = affAssignment.get(n); int size = nodes.size(); for (int i = 0; i < size; i++) { ClusterNode node = nodes.get(i); if (node instanceof TcpDiscoveryNode) ((TcpDiscoveryNode)node).local(node.id().equals(ctx.localNodeId())); } } } } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 3: if (!writer.writeByteArray("affAssignmentBytes", affAssignmentBytes)) return false; writer.incrementState(); case 4: if (!writer.writeMessage("topVer", topVer)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 3: affAssignmentBytes = reader.readByteArray("affAssignmentBytes"); if (!reader.isLastRead()) return false; reader.incrementState(); case 4: topVer = reader.readMessage("topVer"); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(GridDhtAffinityAssignmentResponse.class); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtAffinityAssignmentResponse.class, this); } }
dlnufox/ignite
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtAffinityAssignmentResponse.java
Java
apache-2.0
6,291
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.websocket; import static org.springframework.security.config.Elements.*; import java.util.Comparator; import java.util.List; import java.util.Map; import org.springframework.beans.BeansException; import org.springframework.beans.PropertyValue; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanReference; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; import org.springframework.beans.factory.support.ManagedList; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.beans.factory.xml.XmlReaderContext; import org.springframework.messaging.simp.SimpMessageType; import org.springframework.messaging.simp.annotation.support.SimpAnnotationMethodMessageHandler; import org.springframework.security.access.vote.ConsensusBased; import org.springframework.security.config.Elements; import org.springframework.security.messaging.access.expression.ExpressionBasedMessageSecurityMetadataSourceFactory; import org.springframework.security.messaging.access.expression.MessageExpressionVoter; import org.springframework.security.messaging.access.intercept.ChannelSecurityInterceptor; import org.springframework.security.messaging.context.AuthenticationPrincipalArgumentResolver; import org.springframework.security.messaging.context.SecurityContextChannelInterceptor; import org.springframework.security.messaging.util.matcher.SimpDestinationMessageMatcher; import org.springframework.security.messaging.util.matcher.SimpMessageTypeMatcher; import org.springframework.security.messaging.web.csrf.CsrfChannelInterceptor; import org.springframework.security.messaging.web.socket.server.CsrfTokenHandshakeInterceptor; import org.springframework.util.AntPathMatcher; import org.springframework.util.PathMatcher; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; /** * Parses Spring Security's websocket namespace support. A simple example is: * * <code> * &lt;websocket-message-broker&gt; * &lt;intercept-message pattern='/permitAll' access='permitAll' /&gt; * &lt;intercept-message pattern='/denyAll' access='denyAll' /&gt; * &lt;/websocket-message-broker&gt; * </code> * * <p> * The above configuration will ensure that any SimpAnnotationMethodMessageHandler has the * AuthenticationPrincipalArgumentResolver registered as a custom argument resolver. It * also ensures that the SecurityContextChannelInterceptor is automatically registered for * the clientInboundChannel. Last, it ensures that a ChannelSecurityInterceptor is * registered with the clientInboundChannel. * </p> * * <p> * If finer control is necessary, the id attribute can be used as shown below: * </p> * * <code> * &lt;websocket-message-broker id="channelSecurityInterceptor"&gt; * &lt;intercept-message pattern='/permitAll' access='permitAll' /&gt; * &lt;intercept-message pattern='/denyAll' access='denyAll' /&gt; * &lt;/websocket-message-broker&gt; * </code> * * <p> * Now the configuration will only create a bean named ChannelSecurityInterceptor and * assign it to the id of channelSecurityInterceptor. Users can explicitly wire Spring * Security using the standard Spring Messaging XML namespace support. * </p> * * @author Rob Winch * @since 4.0 */ public final class WebSocketMessageBrokerSecurityBeanDefinitionParser implements BeanDefinitionParser { private static final String ID_ATTR = "id"; private static final String DISABLED_ATTR = "same-origin-disabled"; private static final String PATTERN_ATTR = "pattern"; private static final String ACCESS_ATTR = "access"; private static final String TYPE_ATTR = "type"; private static final String PATH_MATCHER_BEAN_NAME = "springSecurityMessagePathMatcher"; /** * @param element * @param parserContext * @return */ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinitionRegistry registry = parserContext.getRegistry(); XmlReaderContext context = parserContext.getReaderContext(); ManagedMap<BeanDefinition, String> matcherToExpression = new ManagedMap<>(); String id = element.getAttribute(ID_ATTR); Element expressionHandlerElt = DomUtils.getChildElementByTagName(element, EXPRESSION_HANDLER); String expressionHandlerRef = expressionHandlerElt == null ? null : expressionHandlerElt.getAttribute("ref"); boolean expressionHandlerDefined = StringUtils.hasText(expressionHandlerRef); boolean sameOriginDisabled = Boolean.parseBoolean(element .getAttribute(DISABLED_ATTR)); List<Element> interceptMessages = DomUtils.getChildElementsByTagName(element, Elements.INTERCEPT_MESSAGE); for (Element interceptMessage : interceptMessages) { String matcherPattern = interceptMessage.getAttribute(PATTERN_ATTR); String accessExpression = interceptMessage.getAttribute(ACCESS_ATTR); String messageType = interceptMessage.getAttribute(TYPE_ATTR); BeanDefinition matcher = createMatcher(matcherPattern, messageType, parserContext, interceptMessage); matcherToExpression.put(matcher, accessExpression); } BeanDefinitionBuilder mds = BeanDefinitionBuilder .rootBeanDefinition(ExpressionBasedMessageSecurityMetadataSourceFactory.class); mds.setFactoryMethod("createExpressionMessageMetadataSource"); mds.addConstructorArgValue(matcherToExpression); if(expressionHandlerDefined) { mds.addConstructorArgReference(expressionHandlerRef); } String mdsId = context.registerWithGeneratedName(mds.getBeanDefinition()); ManagedList<BeanDefinition> voters = new ManagedList<>(); BeanDefinitionBuilder messageExpressionVoterBldr = BeanDefinitionBuilder.rootBeanDefinition(MessageExpressionVoter.class); if(expressionHandlerDefined) { messageExpressionVoterBldr.addPropertyReference("expressionHandler", expressionHandlerRef); } voters.add(messageExpressionVoterBldr.getBeanDefinition()); BeanDefinitionBuilder adm = BeanDefinitionBuilder .rootBeanDefinition(ConsensusBased.class); adm.addConstructorArgValue(voters); BeanDefinitionBuilder inboundChannelSecurityInterceptor = BeanDefinitionBuilder .rootBeanDefinition(ChannelSecurityInterceptor.class); inboundChannelSecurityInterceptor.addConstructorArgValue(registry .getBeanDefinition(mdsId)); inboundChannelSecurityInterceptor.addPropertyValue("accessDecisionManager", adm.getBeanDefinition()); String inSecurityInterceptorName = context .registerWithGeneratedName(inboundChannelSecurityInterceptor .getBeanDefinition()); if (StringUtils.hasText(id)) { registry.registerAlias(inSecurityInterceptorName, id); if(!registry.containsBeanDefinition(PATH_MATCHER_BEAN_NAME)) { registry.registerBeanDefinition(PATH_MATCHER_BEAN_NAME, new RootBeanDefinition(AntPathMatcher.class)); } } else { BeanDefinitionBuilder mspp = BeanDefinitionBuilder .rootBeanDefinition(MessageSecurityPostProcessor.class); mspp.addConstructorArgValue(inSecurityInterceptorName); mspp.addConstructorArgValue(sameOriginDisabled); context.registerWithGeneratedName(mspp.getBeanDefinition()); } return null; } private BeanDefinition createMatcher(String matcherPattern, String messageType, ParserContext parserContext, Element interceptMessage) { boolean hasPattern = StringUtils.hasText(matcherPattern); boolean hasMessageType = StringUtils.hasText(messageType); if (!hasPattern) { BeanDefinitionBuilder matcher = BeanDefinitionBuilder .rootBeanDefinition(SimpMessageTypeMatcher.class); matcher.addConstructorArgValue(messageType); return matcher.getBeanDefinition(); } String factoryName = null; if (hasPattern && hasMessageType) { SimpMessageType type = SimpMessageType.valueOf(messageType); if (SimpMessageType.MESSAGE == type) { factoryName = "createMessageMatcher"; } else if (SimpMessageType.SUBSCRIBE == type) { factoryName = "createSubscribeMatcher"; } else { parserContext .getReaderContext() .error("Cannot use intercept-websocket@message-type=" + messageType + " with a pattern because the type does not have a destination.", interceptMessage); } } BeanDefinitionBuilder matcher = BeanDefinitionBuilder .rootBeanDefinition(SimpDestinationMessageMatcher.class); matcher.setFactoryMethod(factoryName); matcher.addConstructorArgValue(matcherPattern); matcher.addConstructorArgValue(new RuntimeBeanReference("springSecurityMessagePathMatcher")); return matcher.getBeanDefinition(); } static class MessageSecurityPostProcessor implements BeanDefinitionRegistryPostProcessor { /** * This is not available prior to Spring 4.2 */ private static final String WEB_SOCKET_AMMH_CLASS_NAME = "org.springframework.web.socket.messaging.WebSocketAnnotationMethodMessageHandler"; private static final String CLIENT_INBOUND_CHANNEL_BEAN_ID = "clientInboundChannel"; private static final String INTERCEPTORS_PROP = "interceptors"; private static final String CUSTOM_ARG_RESOLVERS_PROP = "customArgumentResolvers"; private final String inboundSecurityInterceptorId; private final boolean sameOriginDisabled; public MessageSecurityPostProcessor(String inboundSecurityInterceptorId, boolean sameOriginDisabled) { this.inboundSecurityInterceptorId = inboundSecurityInterceptorId; this.sameOriginDisabled = sameOriginDisabled; } public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException { String[] beanNames = registry.getBeanDefinitionNames(); for (String beanName : beanNames) { BeanDefinition bd = registry.getBeanDefinition(beanName); String beanClassName = bd.getBeanClassName(); if (SimpAnnotationMethodMessageHandler.class.getName().equals(beanClassName) || WEB_SOCKET_AMMH_CLASS_NAME.equals(beanClassName)) { PropertyValue current = bd.getPropertyValues().getPropertyValue( CUSTOM_ARG_RESOLVERS_PROP); ManagedList<Object> argResolvers = new ManagedList<>(); if (current != null) { argResolvers.addAll((ManagedList<?>) current.getValue()); } argResolvers.add(new RootBeanDefinition( AuthenticationPrincipalArgumentResolver.class)); bd.getPropertyValues().add(CUSTOM_ARG_RESOLVERS_PROP, argResolvers); if(!registry.containsBeanDefinition(PATH_MATCHER_BEAN_NAME)) { PropertyValue pathMatcherProp = bd.getPropertyValues().getPropertyValue("pathMatcher"); Object pathMatcher = pathMatcherProp == null ? null : pathMatcherProp.getValue(); if(pathMatcher instanceof BeanReference) { registry.registerAlias(((BeanReference) pathMatcher).getBeanName(), PATH_MATCHER_BEAN_NAME); } } } else if ("org.springframework.web.socket.server.support.WebSocketHttpRequestHandler" .equals(beanClassName)) { addCsrfTokenHandshakeInterceptor(bd); } else if ("org.springframework.web.socket.sockjs.transport.TransportHandlingSockJsService" .equals(beanClassName)) { addCsrfTokenHandshakeInterceptor(bd); } else if ("org.springframework.web.socket.sockjs.transport.handler.DefaultSockJsService" .equals(beanClassName)) { addCsrfTokenHandshakeInterceptor(bd); } } if (!registry.containsBeanDefinition(CLIENT_INBOUND_CHANNEL_BEAN_ID)) { return; } ManagedList<Object> interceptors = new ManagedList(); interceptors.add(new RootBeanDefinition( SecurityContextChannelInterceptor.class)); if (!sameOriginDisabled) { interceptors.add(new RootBeanDefinition(CsrfChannelInterceptor.class)); } interceptors.add(registry.getBeanDefinition(inboundSecurityInterceptorId)); BeanDefinition inboundChannel = registry .getBeanDefinition(CLIENT_INBOUND_CHANNEL_BEAN_ID); PropertyValue currentInterceptorsPv = inboundChannel.getPropertyValues() .getPropertyValue(INTERCEPTORS_PROP); if (currentInterceptorsPv != null) { ManagedList<?> currentInterceptors = (ManagedList<?>) currentInterceptorsPv .getValue(); interceptors.addAll(currentInterceptors); } inboundChannel.getPropertyValues().add(INTERCEPTORS_PROP, interceptors); if(!registry.containsBeanDefinition(PATH_MATCHER_BEAN_NAME)) { registry.registerBeanDefinition(PATH_MATCHER_BEAN_NAME, new RootBeanDefinition(AntPathMatcher.class)); } } private void addCsrfTokenHandshakeInterceptor(BeanDefinition bd) { if (sameOriginDisabled) { return; } String interceptorPropertyName = "handshakeInterceptors"; ManagedList<? super Object> interceptors = new ManagedList<>(); interceptors.add(new RootBeanDefinition(CsrfTokenHandshakeInterceptor.class)); interceptors.addAll((ManagedList<Object>) bd.getPropertyValues().get( interceptorPropertyName)); bd.getPropertyValues().add(interceptorPropertyName, interceptors); } public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { } } static class DelegatingPathMatcher implements PathMatcher { private PathMatcher delegate = new AntPathMatcher(); public boolean isPattern(String path) { return delegate.isPattern(path); } public boolean match(String pattern, String path) { return delegate.match(pattern, path); } public boolean matchStart(String pattern, String path) { return delegate.matchStart(pattern, path); } public String extractPathWithinPattern(String pattern, String path) { return delegate.extractPathWithinPattern(pattern, path); } public Map<String, String> extractUriTemplateVariables(String pattern, String path) { return delegate.extractUriTemplateVariables(pattern, path); } public Comparator<String> getPatternComparator(String path) { return delegate.getPatternComparator(path); } public String combine(String pattern1, String pattern2) { return delegate.combine(pattern1, pattern2); } void setPathMatcher(PathMatcher pathMatcher) { this.delegate = pathMatcher; } } }
kazuki43zoo/spring-security
config/src/main/java/org/springframework/security/config/websocket/WebSocketMessageBrokerSecurityBeanDefinitionParser.java
Java
apache-2.0
15,241
package net.mgsx.game.core.binding; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.Group; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.ObjectMap; import com.badlogic.gdx.utils.ObjectMap.Entry; public class BindingManager { final private static ObjectMap<String, Binding> bindings = new ObjectMap<String, Binding>(); static final Array<Learner> learners = new Array<Learner>(new Learner[]{new KeyboardLearner()}); public static void setBindings(Binding b) { bindings.put(b.target, b); } public static Binding getBinding(String target) { return bindings.get(target); } public static void clear() { for(Entry<String, Binding> entry : BindingManager.bindings) { for(Learner learner : BindingManager.learners){ learner.unbind(entry.value); } } BindingManager.bindings.clear(); } public static void applyBindings(Binding b, Stage stage) { bindings.put(b.target, b); bindActor(stage.getRoot()); for(Learner learner : BindingManager.learners){ learner.bind(b); } } public static void applyBindings(String key) { Binding b = bindings.get(key); if(b != null){ for(Learner learner : BindingManager.learners){ learner.bind(b); } } } private static void bindActor(Actor actor) { if(actor instanceof Learnable) { final Learnable learnable = (Learnable)actor; Binding bind = BindingManager.getBinding(learnable.bindKey()); if(bind != null){ bind.accessor = learnable.accessorToBind(); } } if(actor instanceof Group){ for(Actor child : ((Group) actor).getChildren()){ bindActor(child); } } } public static ObjectMap<String, Binding> bindings() { return bindings; } }
mgsx-dev/gdx-kit
core/src/net/mgsx/game/core/binding/BindingManager.java
Java
apache-2.0
1,782
/* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.android.share; import android.provider.ContactsContract; import com.google.zxing.BarcodeFormat; import com.google.zxing.client.android.Contents; import com.google.zxing.client.android.Intents; import com.google.zxing.client.android.R; import android.app.Activity; import android.content.ContentResolver; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.BaseColumns; import android.provider.Browser; import android.text.ClipboardManager; import android.util.Log; import android.view.KeyEvent; import android.view.View; import android.widget.Button; import android.widget.TextView; /** * Barcode Scanner can share data like contacts and bookmarks by displaying a QR Code on screen, * such that another user can scan the barcode with their phone. * * @author dswitkin@google.com (Daniel Switkin) */ public final class ShareActivity extends Activity { private static final String TAG = ShareActivity.class.getSimpleName(); private static final int PICK_BOOKMARK = 0; private static final int PICK_CONTACT = 1; private static final int PICK_APP = 2; private Button clipboardButton; private final Button.OnClickListener contactListener = new Button.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_PICK, ContactsContract.Contacts.CONTENT_URI); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); startActivityForResult(intent, PICK_CONTACT); } }; private final Button.OnClickListener bookmarkListener = new Button.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_PICK); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.setClassName(ShareActivity.this, BookmarkPickerActivity.class.getName()); startActivityForResult(intent, PICK_BOOKMARK); } }; private final Button.OnClickListener appListener = new Button.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_PICK); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.setClassName(ShareActivity.this, AppPickerActivity.class.getName()); startActivityForResult(intent, PICK_APP); } }; private final Button.OnClickListener clipboardListener = new Button.OnClickListener() { @Override public void onClick(View v) { ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); // Should always be true, because we grey out the clipboard button in onResume() if it's empty if (clipboard.hasText()) { launchSearch(clipboard.getText().toString()); } } }; private final View.OnKeyListener textListener = new View.OnKeyListener() { @Override public boolean onKey(View view, int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_ENTER && event.getAction() == KeyEvent.ACTION_DOWN) { String text = ((TextView) view).getText().toString(); if (text != null && !text.isEmpty()) { launchSearch(text); } return true; } return false; } }; private void launchSearch(String text) { Intent intent = new Intent(Intents.Encode.ACTION); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intents.Encode.TYPE, Contents.Type.TEXT); intent.putExtra(Intents.Encode.DATA, text); intent.putExtra(Intents.Encode.FORMAT, BarcodeFormat.QR_CODE.toString()); startActivity(intent); } @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.share); findViewById(R.id.share_contact_button).setOnClickListener(contactListener); findViewById(R.id.share_bookmark_button).setOnClickListener(bookmarkListener); findViewById(R.id.share_app_button).setOnClickListener(appListener); clipboardButton = (Button) findViewById(R.id.share_clipboard_button); clipboardButton.setOnClickListener(clipboardListener); findViewById(R.id.share_text_view).setOnKeyListener(textListener); } @Override protected void onResume() { super.onResume(); ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); clipboardButton.setEnabled(clipboard.hasText()); } @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { if (resultCode == RESULT_OK) { switch (requestCode) { case PICK_BOOKMARK: case PICK_APP: showTextAsBarcode(intent.getStringExtra(Browser.BookmarkColumns.URL)); break; case PICK_CONTACT: // Data field is content://contacts/people/984 showContactAsBarcode(intent.getData()); break; } } } private void showTextAsBarcode(String text) { Log.i(TAG, "Showing text as barcode: " + text); if (text == null) { return; // Show error? } Intent intent = new Intent(Intents.Encode.ACTION); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intents.Encode.TYPE, Contents.Type.TEXT); intent.putExtra(Intents.Encode.DATA, text); intent.putExtra(Intents.Encode.FORMAT, BarcodeFormat.QR_CODE.toString()); startActivity(intent); } /** * Takes a contact Uri and does the necessary database lookups to retrieve that person's info, * then sends an Encode intent to render it as a QR Code. * * @param contactUri A Uri of the form content://contacts/people/17 */ private void showContactAsBarcode(Uri contactUri) { Log.i(TAG, "Showing contact URI as barcode: " + contactUri); if (contactUri == null) { return; // Show error? } ContentResolver resolver = getContentResolver(); Cursor cursor; try { // We're seeing about six reports a week of this exception although I don't understand why. cursor = resolver.query(contactUri, null, null, null, null); } catch (IllegalArgumentException ignored) { return; } if (cursor == null) { return; } String id; String name; boolean hasPhone; try { if (!cursor.moveToFirst()) { return; } id = cursor.getString(cursor.getColumnIndex(BaseColumns._ID)); name = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME)); hasPhone = cursor.getInt(cursor.getColumnIndex(ContactsContract.Contacts.HAS_PHONE_NUMBER)) > 0; } finally { cursor.close(); } // Don't require a name to be present, this contact might be just a phone number. Bundle bundle = new Bundle(); if (name != null && !name.isEmpty()) { bundle.putString(ContactsContract.Intents.Insert.NAME, massageContactData(name)); } if (hasPhone) { Cursor phonesCursor = resolver.query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI, null, ContactsContract.CommonDataKinds.Phone.CONTACT_ID + '=' + id, null, null); if (phonesCursor != null) { try { int foundPhone = 0; int phonesNumberColumn = phonesCursor.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER); while (phonesCursor.moveToNext() && foundPhone < Contents.PHONE_KEYS.length) { String number = phonesCursor.getString(phonesNumberColumn); if (number != null && !number.isEmpty()) { bundle.putString(Contents.PHONE_KEYS[foundPhone], massageContactData(number)); } foundPhone++; } } finally { phonesCursor.close(); } } } Cursor methodsCursor = resolver.query(ContactsContract.CommonDataKinds.StructuredPostal.CONTENT_URI, null, ContactsContract.CommonDataKinds.StructuredPostal.CONTACT_ID + '=' + id, null, null); if (methodsCursor != null) { try { if (methodsCursor.moveToNext()) { String data = methodsCursor.getString( methodsCursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS)); if (data != null && !data.isEmpty()) { bundle.putString(ContactsContract.Intents.Insert.POSTAL, massageContactData(data)); } } } finally { methodsCursor.close(); } } Cursor emailCursor = resolver.query(ContactsContract.CommonDataKinds.Email.CONTENT_URI, null, ContactsContract.CommonDataKinds.Email.CONTACT_ID + '=' + id, null, null); if (emailCursor != null) { try { int foundEmail = 0; int emailColumn = emailCursor.getColumnIndex(ContactsContract.CommonDataKinds.Email.DATA); while (emailCursor.moveToNext() && foundEmail < Contents.EMAIL_KEYS.length) { String email = emailCursor.getString(emailColumn); if (email != null && !email.isEmpty()) { bundle.putString(Contents.EMAIL_KEYS[foundEmail], massageContactData(email)); } foundEmail++; } } finally { emailCursor.close(); } } Intent intent = new Intent(Intents.Encode.ACTION); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intents.Encode.TYPE, Contents.Type.CONTACT); intent.putExtra(Intents.Encode.DATA, bundle); intent.putExtra(Intents.Encode.FORMAT, BarcodeFormat.QR_CODE.toString()); Log.i(TAG, "Sending bundle for encoding: " + bundle); startActivity(intent); } private static String massageContactData(String data) { // For now -- make sure we don't put newlines in shared contact data. It messes up // any known encoding of contact data. Replace with space. if (data.indexOf('\n') >= 0) { data = data.replace("\n", " "); } if (data.indexOf('\r') >= 0) { data = data.replace("\r", " "); } return data; } }
peterdocter/zxing
android/src/com/google/zxing/client/android/share/ShareActivity.java
Java
apache-2.0
11,036
package org.ektorp.impl; import java.util.ArrayList; import java.util.List; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.ObjectMapper; import org.ektorp.DbAccessException; import org.ektorp.http.HttpResponse; import org.ektorp.http.StdResponseHandler; /** * * @author henrik lundgren * */ public class DocIdResponseHandler extends StdResponseHandler<List<String>> { private final JsonFactory jsonFactory; public DocIdResponseHandler(ObjectMapper om) { jsonFactory = om.getJsonFactory(); } @Override public List<String> success(HttpResponse hr) throws Exception { JsonParser jp = jsonFactory.createJsonParser(hr.getContent()); if (jp.nextToken() != JsonToken.START_OBJECT) { throw new DbAccessException("Expected data to start with an Object"); } boolean inRow = false; List<String> result = null; while (jp.nextToken() != null) { switch (jp.getCurrentToken()) { case START_ARRAY: inRow = true; break; case END_ARRAY: inRow = false; break; case FIELD_NAME: String n = jp.getCurrentName(); if (inRow) { if ("id".equals(n)) { jp.nextToken(); result.add(jp.getText()); } } else if ("total_rows".equals(n)) { jp.nextToken(); result = new ArrayList<String>(jp.getIntValue()); } break; } } return result; } }
JMaltat/fragaria-ektorp
org.ektorp/src/main/java/org/ektorp/impl/DocIdResponseHandler.java
Java
apache-2.0
1,471
/** * Copyright (c) 2013-2020 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * A field descriptor builder that includes helper functions for spatial indexing hints and * `CoordinateReferenceSystem`. * * @param <T> the adapter field type */ public class SpatialFieldDescriptorBuilder<T> extends FieldDescriptorBuilder<T, SpatialFieldDescriptor<T>, SpatialFieldDescriptorBuilder<T>> { protected CoordinateReferenceSystem crs = GeometryUtils.getDefaultCRS(); public SpatialFieldDescriptorBuilder(final Class<T> bindingClass) { super(bindingClass); } /** * Hint that the field contains both latitude and longitude information and should be used in * spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> spatialIndexHint() { return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT).indexHint( SpatialField.LATITUDE_DIMENSION_HINT); } /** * Hint that the field contains latitude information and should be used in spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> latitudeIndexHint() { return this.indexHint(SpatialField.LATITUDE_DIMENSION_HINT); } /** * Hint that the field contains longitude information and should be used in spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> longitudeIndexHint() { return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT); } /** * Specify the coordinate reference system of the spatial field. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> crs(final CoordinateReferenceSystem crs) { this.crs = crs; return this; } @Override public SpatialFieldDescriptor<T> build() { return new SpatialFieldDescriptor<>(bindingClass, fieldName, indexHints, crs); } }
locationtech/geowave
core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptorBuilder.java
Java
apache-2.0
2,649
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2019 the original author or authors. */ package org.assertj.core.api; import java.util.List; import java.util.Map; /** * Concrete assertions for {@link Map}s without any final methods to allow proxying. */ public class ProxyableMapAssert<KEY, VALUE> extends AbstractMapAssert<ProxyableMapAssert<KEY, VALUE>, Map<KEY, VALUE>, KEY, VALUE> { public ProxyableMapAssert(Map<KEY, VALUE> actual) { super(actual, ProxyableMapAssert.class); } @Override protected <ELEMENT> AbstractListAssert<?, List<? extends ELEMENT>, ELEMENT, ObjectAssert<ELEMENT>> newListAssertInstance(List<? extends ELEMENT> newActual) { return new ProxyableListAssert<>(newActual); } }
xasx/assertj-core
src/main/java/org/assertj/core/api/ProxyableMapAssert.java
Java
apache-2.0
1,243
// This is a generated file. Not intended for manual editing. package org.mule.tooling.lang.dw.parser.psi; import java.util.List; import org.jetbrains.annotations.*; import com.intellij.psi.PsiElement; public interface WeaveOutputDirective extends WeaveDirective { @Nullable WeaveDataType getDataType(); @Nullable WeaveOptions getOptions(); }
machaval/mule-intellij-plugins
data-weave-plugin/src/main/gen/org/mule/tooling/lang/dw/parser/psi/WeaveOutputDirective.java
Java
apache-2.0
356
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("Joke.Data")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Joke.Data")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("e791180b-b34a-482d-9589-0314f4fa77e6")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
huluwa/superjokes
src/Joke.Data/Properties/AssemblyInfo.cs
C#
apache-2.0
1,394
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright [2010] [Anso Labs, LLC] # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Take uploaded bucket contents and register them as disk images (AMIs). Requires decryption using keys in the manifest. """ # TODO(jesse): Got these from Euca2ools, will need to revisit them import binascii import glob import json import os import shutil import tarfile import tempfile from xml.etree import ElementTree from nova import exception from nova import flags from nova import utils from nova.objectstore import bucket FLAGS = flags.FLAGS flags.DEFINE_string('images_path', utils.abspath('../images'), 'path to decrypted images') class Image(object): def __init__(self, image_id): self.image_id = image_id self.path = os.path.abspath(os.path.join(FLAGS.images_path, image_id)) if not self.path.startswith(os.path.abspath(FLAGS.images_path)) or \ not os.path.isdir(self.path): raise exception.NotFound def delete(self): for fn in ['info.json', 'image']: try: os.unlink(os.path.join(self.path, fn)) except: pass try: os.rmdir(self.path) except: pass def is_authorized(self, context): try: return self.metadata['isPublic'] or context.user.is_admin() or self.metadata['imageOwnerId'] == context.project.id except: return False def set_public(self, state): md = self.metadata md['isPublic'] = state with open(os.path.join(self.path, 'info.json'), 'w') as f: json.dump(md, f) @staticmethod def all(): images = [] for fn in glob.glob("%s/*/info.json" % FLAGS.images_path): try: image_id = fn.split('/')[-2] images.append(Image(image_id)) except: pass return images @property def owner_id(self): return self.metadata['imageOwnerId'] @property def metadata(self): with open(os.path.join(self.path, 'info.json')) as f: return json.load(f) @staticmethod def create(image_id, image_location, context): image_path = os.path.join(FLAGS.images_path, image_id) os.makedirs(image_path) bucket_name = image_location.split("/")[0] manifest_path = image_location[len(bucket_name)+1:] bucket_object = bucket.Bucket(bucket_name) manifest = ElementTree.fromstring(bucket_object[manifest_path].read()) image_type = 'machine' try: kernel_id = manifest.find("machine_configuration/kernel_id").text if kernel_id == 'true': image_type = 'kernel' except: pass try: ramdisk_id = manifest.find("machine_configuration/ramdisk_id").text if ramdisk_id == 'true': image_type = 'ramdisk' except: pass info = { 'imageId': image_id, 'imageLocation': image_location, 'imageOwnerId': context.project.id, 'isPublic': False, # FIXME: grab public from manifest 'architecture': 'x86_64', # FIXME: grab architecture from manifest 'type' : image_type } def write_state(state): info['imageState'] = state with open(os.path.join(image_path, 'info.json'), "w") as f: json.dump(info, f) write_state('pending') encrypted_filename = os.path.join(image_path, 'image.encrypted') with open(encrypted_filename, 'w') as f: for filename in manifest.find("image").getiterator("filename"): shutil.copyfileobj(bucket_object[filename.text].file, f) write_state('decrypting') # FIXME: grab kernelId and ramdiskId from bundle manifest encrypted_key = binascii.a2b_hex(manifest.find("image/ec2_encrypted_key").text) encrypted_iv = binascii.a2b_hex(manifest.find("image/ec2_encrypted_iv").text) cloud_private_key = os.path.join(FLAGS.ca_path, "private/cakey.pem") decrypted_filename = os.path.join(image_path, 'image.tar.gz') Image.decrypt_image(encrypted_filename, encrypted_key, encrypted_iv, cloud_private_key, decrypted_filename) write_state('untarring') image_file = Image.untarzip_image(image_path, decrypted_filename) shutil.move(os.path.join(image_path, image_file), os.path.join(image_path, 'image')) write_state('available') os.unlink(decrypted_filename) os.unlink(encrypted_filename) @staticmethod def decrypt_image(encrypted_filename, encrypted_key, encrypted_iv, cloud_private_key, decrypted_filename): key, err = utils.execute('openssl rsautl -decrypt -inkey %s' % cloud_private_key, encrypted_key) if err: raise exception.Error("Failed to decrypt private key: %s" % err) iv, err = utils.execute('openssl rsautl -decrypt -inkey %s' % cloud_private_key, encrypted_iv) if err: raise exception.Error("Failed to decrypt initialization vector: %s" % err) out, err = utils.execute('openssl enc -d -aes-128-cbc -in %s -K %s -iv %s -out %s' % (encrypted_filename, key, iv, decrypted_filename)) if err: raise exception.Error("Failed to decrypt image file %s : %s" % (encrypted_filename, err)) @staticmethod def untarzip_image(path, filename): tar_file = tarfile.open(filename, "r|gz") tar_file.extractall(path) image_file = tar_file.getnames()[0] tar_file.close() return image_file
movmov/cc
nova/objectstore/image.py
Python
apache-2.0
6,265
using Cirrious.CrossCore.Plugins; namespace AzureAccessControl.Sample.Droid.Bootstrap { public class VisibilityPluginBootstrap : MvxPluginBootstrapAction<Cirrious.MvvmCross.Plugins.Visibility.PluginLoader> { } }
softlion/Cheesebaron.MvxPlugins
Samples/AzureAccessControl.Sample.Droid/Bootstrap/VisibilityPluginBootstrap.cs
C#
apache-2.0
232
/* * Copyright 2016-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package examples.springbatch.paging; import static examples.springbatch.mapper.PersonDynamicSqlSupport.*; import static org.mybatis.dynamic.sql.SqlBuilder.*; import javax.sql.DataSource; import org.apache.ibatis.session.SqlSessionFactory; import org.mybatis.dynamic.sql.select.render.SelectStatementProvider; import org.mybatis.dynamic.sql.update.render.UpdateStatementProvider; import org.mybatis.dynamic.sql.util.springbatch.SpringBatchUtility; import org.mybatis.spring.SqlSessionFactoryBean; import org.mybatis.spring.annotation.MapperScan; import org.mybatis.spring.batch.MyBatisBatchItemWriter; import org.mybatis.spring.batch.MyBatisPagingItemReader; import org.springframework.batch.core.Job; import org.springframework.batch.core.Step; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.launch.support.RunIdIncrementer; import org.springframework.batch.item.ItemProcessor; import org.springframework.batch.item.ItemReader; import org.springframework.batch.item.ItemWriter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.core.convert.converter.Converter; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.transaction.PlatformTransactionManager; import examples.springbatch.common.PersonRecord; import examples.springbatch.mapper.PersonMapper; @EnableBatchProcessing @Configuration @ComponentScan("examples.springbatch.common") @MapperScan("examples.springbatch.mapper") public class PagingReaderBatchConfiguration { @Autowired private JobBuilderFactory jobBuilderFactory; @Autowired private StepBuilderFactory stepBuilderFactory; @Bean public DataSource dataSource() { return new EmbeddedDatabaseBuilder() .setType(EmbeddedDatabaseType.HSQL) .addScript("classpath:/org/springframework/batch/core/schema-drop-hsqldb.sql") .addScript("classpath:/org/springframework/batch/core/schema-hsqldb.sql") .addScript("classpath:/examples/springbatch/schema.sql") .addScript("classpath:/examples/springbatch/data.sql") .build(); } @Bean public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception { SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean(); sessionFactory.setDataSource(dataSource); return sessionFactory.getObject(); } @Bean public PlatformTransactionManager transactionManager(DataSource dataSource) { return new DataSourceTransactionManager(dataSource); } @Bean public MyBatisPagingItemReader<PersonRecord> reader(SqlSessionFactory sqlSessionFactory) { SelectStatementProvider selectStatement = SpringBatchUtility.selectForPaging(person.allColumns()) .from(person) .where(forPagingTest, isEqualTo(true)) .orderBy(id) .build() .render(); MyBatisPagingItemReader<PersonRecord> reader = new MyBatisPagingItemReader<>(); reader.setQueryId(PersonMapper.class.getName() + ".selectMany"); reader.setSqlSessionFactory(sqlSessionFactory); reader.setParameterValues(SpringBatchUtility.toParameterValues(selectStatement)); reader.setPageSize(7); return reader; } @Bean public MyBatisBatchItemWriter<PersonRecord> writer(SqlSessionFactory sqlSessionFactory, Converter<PersonRecord, UpdateStatementProvider> convertor) { MyBatisBatchItemWriter<PersonRecord> writer = new MyBatisBatchItemWriter<>(); writer.setSqlSessionFactory(sqlSessionFactory); writer.setItemToParameterConverter(convertor); writer.setStatementId(PersonMapper.class.getName() + ".update"); return writer; } @Bean public Step step1(ItemReader<PersonRecord> reader, ItemProcessor<PersonRecord, PersonRecord> processor, ItemWriter<PersonRecord> writer) { return stepBuilderFactory.get("step1") .<PersonRecord, PersonRecord>chunk(7) .reader(reader) .processor(processor) .writer(writer) .build(); } @Bean public Job upperCaseLastName(Step step1) { return jobBuilderFactory.get("upperCaseLastName") .incrementer(new RunIdIncrementer()) .flow(step1) .end() .build(); } }
jeffgbutler/mybatis-qbe
src/test/java/examples/springbatch/paging/PagingReaderBatchConfiguration.java
Java
apache-2.0
5,674
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.datapipeline.model.transform; import java.io.ByteArrayInputStream; import java.util.Collections; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.datapipeline.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.protocol.json.*; /** * SetStatusRequest Marshaller */ public class SetStatusRequestMarshaller implements Marshaller<Request<SetStatusRequest>, SetStatusRequest> { private final SdkJsonProtocolFactory protocolFactory; public SetStatusRequestMarshaller(SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<SetStatusRequest> marshall(SetStatusRequest setStatusRequest) { if (setStatusRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<SetStatusRequest> request = new DefaultRequest<SetStatusRequest>( setStatusRequest, "DataPipeline"); request.addHeader("X-Amz-Target", "DataPipeline.SetStatus"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { final StructuredJsonGenerator jsonGenerator = protocolFactory .createGenerator(); jsonGenerator.writeStartObject(); if (setStatusRequest.getPipelineId() != null) { jsonGenerator.writeFieldName("pipelineId").writeValue( setStatusRequest.getPipelineId()); } com.amazonaws.internal.SdkInternalList<String> objectIdsList = (com.amazonaws.internal.SdkInternalList<String>) setStatusRequest .getObjectIds(); if (!objectIdsList.isEmpty() || !objectIdsList.isAutoConstruct()) { jsonGenerator.writeFieldName("objectIds"); jsonGenerator.writeStartArray(); for (String objectIdsListValue : objectIdsList) { if (objectIdsListValue != null) { jsonGenerator.writeValue(objectIdsListValue); } } jsonGenerator.writeEndArray(); } if (setStatusRequest.getStatus() != null) { jsonGenerator.writeFieldName("status").writeValue( setStatusRequest.getStatus()); } jsonGenerator.writeEndObject(); byte[] content = jsonGenerator.getBytes(); request.setContent(new ByteArrayInputStream(content)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", jsonGenerator.getContentType()); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
nterry/aws-sdk-java
aws-java-sdk-datapipeline/src/main/java/com/amazonaws/services/datapipeline/model/transform/SetStatusRequestMarshaller.java
Java
apache-2.0
3,921
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import org.apache.camel.AsyncCallback; import org.apache.camel.AsyncProcessor; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Navigate; import org.apache.camel.Processor; import org.apache.camel.Traceable; import org.apache.camel.spi.IdAware; import org.apache.camel.support.AsyncProcessorConverterHelper; import org.apache.camel.support.AsyncProcessorSupport; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.service.ServiceHelper; import static org.apache.camel.processor.PipelineHelper.continueProcessing; /** * Creates a Pipeline pattern where the output of the previous step is sent as * input to the next step, reusing the same message exchanges */ public class Pipeline extends AsyncProcessorSupport implements Navigate<Processor>, Traceable, IdAware { private final CamelContext camelContext; private List<AsyncProcessor> processors; private String id; public Pipeline(CamelContext camelContext, Collection<Processor> processors) { this.camelContext = camelContext; this.processors = processors.stream().map(AsyncProcessorConverterHelper::convert).collect(Collectors.toList()); } public static Processor newInstance(CamelContext camelContext, List<Processor> processors) { if (processors.isEmpty()) { return null; } else if (processors.size() == 1) { return processors.get(0); } return new Pipeline(camelContext, processors); } public static Processor newInstance(final CamelContext camelContext, final Processor... processors) { if (processors == null || processors.length == 0) { return null; } else if (processors.length == 1) { return processors[0]; } final List<Processor> toBeProcessed = new ArrayList<>(processors.length); for (Processor processor : processors) { if (processor != null) { toBeProcessed.add(processor); } } return new Pipeline(camelContext, toBeProcessed); } @Override public boolean process(Exchange exchange, AsyncCallback callback) { if (exchange.isTransacted()) { camelContext.getReactiveExecutor().scheduleSync(() -> Pipeline.this.doProcess(exchange, callback, processors.iterator(), true), "Step[" + exchange.getExchangeId() + "," + Pipeline.this + "]"); } else { camelContext.getReactiveExecutor().scheduleMain(() -> Pipeline.this.doProcess(exchange, callback, processors.iterator(), true), "Step[" + exchange.getExchangeId() + "," + Pipeline.this + "]"); } return false; } protected void doProcess(Exchange exchange, AsyncCallback callback, Iterator<AsyncProcessor> processors, boolean first) { if (continueRouting(processors, exchange) && (first || continueProcessing(exchange, "so breaking out of pipeline", log))) { // prepare for next run if (exchange.hasOut()) { exchange.setIn(exchange.getOut()); exchange.setOut(null); } // get the next processor AsyncProcessor processor = processors.next(); processor.process(exchange, doneSync -> camelContext.getReactiveExecutor().schedule(() -> doProcess(exchange, callback, processors, false), "Step[" + exchange.getExchangeId() + "," + Pipeline.this + "]")); } else { ExchangeHelper.copyResults(exchange, exchange); // logging nextExchange as it contains the exchange that might have altered the payload and since // we are logging the completion if will be confusing if we log the original instead // we could also consider logging the original and the nextExchange then we have *before* and *after* snapshots log.trace("Processing complete for exchangeId: {} >>> {}", exchange.getExchangeId(), exchange); camelContext.getReactiveExecutor().callback(callback); } } protected boolean continueRouting(Iterator<AsyncProcessor> it, Exchange exchange) { Object stop = exchange.getProperty(Exchange.ROUTE_STOP); if (stop != null) { boolean doStop = exchange.getContext().getTypeConverter().convertTo(Boolean.class, stop); if (doStop) { log.debug("ExchangeId: {} is marked to stop routing: {}", exchange.getExchangeId(), exchange); return false; } } // continue if there are more processors to route boolean answer = it.hasNext(); log.trace("ExchangeId: {} should continue routing: {}", exchange.getExchangeId(), answer); return answer; } @Override protected void doStart() throws Exception { ServiceHelper.startService(processors); } @Override protected void doStop() throws Exception { ServiceHelper.stopService(processors); } @Override public String toString() { return "Pipeline[" + getProcessors() + "]"; } public List<Processor> getProcessors() { return (List) processors; } @Override public String getTraceLabel() { return "pipeline"; } @Override public String getId() { return id; } @Override public void setId(String id) { this.id = id; } public List<Processor> next() { if (!hasNext()) { return null; } return new ArrayList<>(processors); } public boolean hasNext() { return processors != null && !processors.isEmpty(); } }
Fabryprog/camel
core/camel-base/src/main/java/org/apache/camel/processor/Pipeline.java
Java
apache-2.0
6,742
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.flink.source.reader.deserializer; import java.util.HashMap; import java.util.Map; /** Message contains byte array. */ public class BytesMessage { private byte[] data; private Map<String, String> properties = new HashMap<>(); public byte[] getData() { return data; } public void setData(byte[] data) { this.data = data; } public Map<String, String> getProperties() { return properties; } public void setProperties(Map<String, String> props) { this.properties = props; } public Object getProperty(String key) { return properties.get(key); } public void setProperty(String key, String value) { properties.put(key, value); } }
StyleTang/incubator-rocketmq-externals
rocketmq-flink/src/main/java/org/apache/rocketmq/flink/source/reader/deserializer/BytesMessage.java
Java
apache-2.0
1,571
//===--- LookupVisibleDecls - Swift Name Lookup Routines ------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// // // This file implements the lookupVisibleDecls interface for visiting named // declarations. // //===----------------------------------------------------------------------===// #include "NameLookupImpl.h" #include "swift/AST/ASTContext.h" #include "swift/AST/GenericSignatureBuilder.h" #include "swift/AST/Initializer.h" #include "swift/AST/LazyResolver.h" #include "swift/AST/NameLookup.h" #include "swift/AST/ProtocolConformance.h" #include "swift/AST/SubstitutionMap.h" #include "swift/Basic/SourceManager.h" #include "swift/Basic/STLExtras.h" #include "swift/Sema/IDETypeChecking.h" #include "llvm/ADT/SetVector.h" #include <set> using namespace swift; void VisibleDeclConsumer::anchor() {} void VectorDeclConsumer::anchor() {} void NamedDeclConsumer::anchor() {} namespace { struct LookupState { private: /// If \c false, an unqualified lookup of all visible decls in a /// DeclContext. /// /// If \c true, lookup of all visible members of a given object (possibly of /// metatype type). unsigned IsQualified : 1; /// Is this a qualified lookup on a metatype? unsigned IsOnMetatype : 1; /// Did we recurse into a superclass? unsigned IsOnSuperclass : 1; unsigned InheritsSuperclassInitializers : 1; /// Should instance members be included even if lookup is performed on a type? unsigned IncludeInstanceMembers : 1; LookupState() : IsQualified(0), IsOnMetatype(0), IsOnSuperclass(0), InheritsSuperclassInitializers(0), IncludeInstanceMembers(0) {} public: LookupState(const LookupState &) = default; static LookupState makeQualified() { LookupState Result; Result.IsQualified = 1; return Result; } static LookupState makeUnqualified() { LookupState Result; Result.IsQualified = 0; return Result; } bool isQualified() const { return IsQualified; } bool isOnMetatype() const { return IsOnMetatype; } bool isOnSuperclass() const { return IsOnSuperclass; } bool isInheritsSuperclassInitializers() const { return InheritsSuperclassInitializers; } bool isIncludingInstanceMembers() const { return IncludeInstanceMembers; } LookupState withOnMetatype() const { auto Result = *this; Result.IsOnMetatype = 1; return Result; } LookupState withOnSuperclass() const { auto Result = *this; Result.IsOnSuperclass = 1; return Result; } LookupState withInheritsSuperclassInitializers() const { auto Result = *this; Result.InheritsSuperclassInitializers = 1; return Result; } LookupState withoutInheritsSuperclassInitializers() const { auto Result = *this; Result.InheritsSuperclassInitializers = 0; return Result; } LookupState withIncludedInstanceMembers() const { auto Result = *this; Result.IncludeInstanceMembers = 1; return Result; } }; } // unnamed namespace static bool areTypeDeclsVisibleInLookupMode(LookupState LS) { // Nested type declarations can be accessed only with unqualified lookup or // on metatypes. return !LS.isQualified() || LS.isOnMetatype(); } static bool isDeclVisibleInLookupMode(ValueDecl *Member, LookupState LS, const DeclContext *FromContext, LazyResolver *TypeResolver) { if (TypeResolver) { TypeResolver->resolveDeclSignature(Member); TypeResolver->resolveAccessControl(Member); } // Check access when relevant. if (!Member->getDeclContext()->isLocalContext() && !isa<GenericTypeParamDecl>(Member) && !isa<ParamDecl>(Member) && FromContext->getASTContext().LangOpts.EnableAccessControl) { if (Member->isInvalid() && !Member->hasAccess()) return false; if (!Member->isAccessibleFrom(FromContext)) return false; } if (auto *FD = dyn_cast<FuncDecl>(Member)) { // Cannot call static functions on non-metatypes. if (!LS.isOnMetatype() && FD->isStatic()) return false; // Otherwise, either call a function or curry it. return true; } if (auto *VD = dyn_cast<VarDecl>(Member)) { // Cannot use static properties on non-metatypes. if (!(LS.isQualified() && LS.isOnMetatype()) && VD->isStatic()) return false; // Cannot use instance properties on metatypes. if (LS.isOnMetatype() && !VD->isStatic() && !LS.isIncludingInstanceMembers()) return false; return true; } if (isa<EnumElementDecl>(Member)) { // Cannot reference enum elements on non-metatypes. if (!(LS.isQualified() && LS.isOnMetatype())) return false; } if (auto CD = dyn_cast<ConstructorDecl>(Member)) { // Constructors with stub implementations cannot be called in Swift. if (CD->hasStubImplementation()) return false; if (LS.isQualified() && LS.isOnSuperclass()) { // Cannot call initializers from a superclass, except for inherited // convenience initializers. return LS.isInheritsSuperclassInitializers() && CD->isInheritable(); } } if (isa<TypeDecl>(Member)) return areTypeDeclsVisibleInLookupMode(LS); return true; } /// Lookup members in extensions of \p LookupType, using \p BaseType as the /// underlying type when checking any constraints on the extensions. static void doGlobalExtensionLookup(Type BaseType, Type LookupType, SmallVectorImpl<ValueDecl *> &FoundDecls, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver) { auto nominal = LookupType->getAnyNominal(); // Look in each extension of this type. for (auto extension : nominal->getExtensions()) { if (!isExtensionApplied(*const_cast<DeclContext*>(CurrDC), BaseType, extension)) continue; bool validatedExtension = false; if (TypeResolver && extension->getAsProtocolExtensionContext()) { if (!TypeResolver->isProtocolExtensionUsable( const_cast<DeclContext *>(CurrDC), BaseType, extension)) { continue; } validatedExtension = true; } for (auto Member : extension->getMembers()) { if (auto VD = dyn_cast<ValueDecl>(Member)) if (isDeclVisibleInLookupMode(VD, LS, CurrDC, TypeResolver)) { // Resolve the extension, if we haven't done so already. if (!validatedExtension && TypeResolver) { TypeResolver->resolveExtension(extension); validatedExtension = true; } FoundDecls.push_back(VD); } } } // Handle shadowing. removeShadowedDecls(FoundDecls, CurrDC->getParentModule(), TypeResolver); } /// \brief Enumerate immediate members of the type \c LookupType and its /// extensions, as seen from the context \c CurrDC. /// /// Don't do lookup into superclasses or implemented protocols. Uses /// \p BaseType as the underlying type when checking any constraints on the /// extensions. static void lookupTypeMembers(Type BaseType, Type LookupType, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver) { NominalTypeDecl *D = LookupType->getAnyNominal(); assert(D && "should have a nominal type"); bool LookupFromChildDeclContext = false; const DeclContext *TempDC = CurrDC; while (!TempDC->isModuleContext()) { if (TempDC == D) { LookupFromChildDeclContext = true; break; } TempDC = TempDC->getParent(); } SmallVector<ValueDecl*, 2> FoundDecls; if (LookupFromChildDeclContext) { // Current decl context is contained inside 'D', so generic parameters // are visible. if (D->getGenericParams()) for (auto Param : *D->getGenericParams()) if (isDeclVisibleInLookupMode(Param, LS, CurrDC, TypeResolver)) FoundDecls.push_back(Param); } for (Decl *Member : D->getMembers()) { if (auto *VD = dyn_cast<ValueDecl>(Member)) if (isDeclVisibleInLookupMode(VD, LS, CurrDC, TypeResolver)) FoundDecls.push_back(VD); } doGlobalExtensionLookup(BaseType, LookupType, FoundDecls, CurrDC, LS, Reason, TypeResolver); // Report the declarations we found to the consumer. for (auto *VD : FoundDecls) Consumer.foundDecl(VD, Reason); } /// Enumerate AnyObject declarations as seen from context \c CurrDC. static void doDynamicLookup(VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, LazyResolver *TypeResolver) { class DynamicLookupConsumer : public VisibleDeclConsumer { VisibleDeclConsumer &ChainedConsumer; LookupState LS; const DeclContext *CurrDC; LazyResolver *TypeResolver; llvm::DenseSet<std::pair<DeclBaseName, CanType>> FunctionsReported; llvm::DenseSet<CanType> SubscriptsReported; llvm::DenseSet<std::pair<Identifier, CanType>> PropertiesReported; public: explicit DynamicLookupConsumer(VisibleDeclConsumer &ChainedConsumer, LookupState LS, const DeclContext *CurrDC, LazyResolver *TypeResolver) : ChainedConsumer(ChainedConsumer), LS(LS), CurrDC(CurrDC), TypeResolver(TypeResolver) {} void foundDecl(ValueDecl *D, DeclVisibilityKind Reason) override { // If the declaration has an override, name lookup will also have found // the overridden method. Skip this declaration, because we prefer the // overridden method. if (D->getOverriddenDecl()) return; // Ensure that the declaration has a type. if (!D->hasInterfaceType()) { if (!TypeResolver) return; TypeResolver->resolveDeclSignature(D); if (!D->hasInterfaceType()) return; } switch (D->getKind()) { #define DECL(ID, SUPER) \ case DeclKind::ID: #define VALUE_DECL(ID, SUPER) #include "swift/AST/DeclNodes.def" llvm_unreachable("not a ValueDecl!"); // Types cannot be found by dynamic lookup. case DeclKind::GenericTypeParam: case DeclKind::AssociatedType: case DeclKind::TypeAlias: case DeclKind::Enum: case DeclKind::Class: case DeclKind::Struct: case DeclKind::Protocol: return; // Initializers cannot be found by dynamic lookup. case DeclKind::Constructor: case DeclKind::Destructor: return; // These cases are probably impossible here but can also just // be safely ignored. case DeclKind::EnumElement: case DeclKind::Param: case DeclKind::Module: return; // For other kinds of values, check if we already reported a decl // with the same signature. case DeclKind::Func: { auto FD = cast<FuncDecl>(D); assert(FD->getImplicitSelfDecl() && "should not find free functions"); (void)FD; if (FD->isInvalid()) break; // Get the type without the first uncurry level with 'self'. CanType T = D->getInterfaceType() ->castTo<AnyFunctionType>() ->getResult() ->getCanonicalType(); auto Signature = std::make_pair(D->getBaseName(), T); if (!FunctionsReported.insert(Signature).second) return; break; } case DeclKind::Subscript: { auto Signature = D->getInterfaceType()->getCanonicalType(); if (!SubscriptsReported.insert(Signature).second) return; break; } case DeclKind::Var: { auto *VD = cast<VarDecl>(D); auto Signature = std::make_pair(VD->getName(), VD->getInterfaceType()->getCanonicalType()); if (!PropertiesReported.insert(Signature).second) return; break; } } if (isDeclVisibleInLookupMode(D, LS, CurrDC, TypeResolver)) ChainedConsumer.foundDecl(D, DeclVisibilityKind::DynamicLookup); } }; DynamicLookupConsumer ConsumerWrapper(Consumer, LS, CurrDC, TypeResolver); CurrDC->getParentSourceFile()->forAllVisibleModules( [&](ModuleDecl::ImportedModule Import) { Import.second->lookupClassMembers(Import.first, ConsumerWrapper); }); } namespace { typedef llvm::SmallPtrSet<TypeDecl *, 8> VisitedSet; } // end anonymous namespace static DeclVisibilityKind getReasonForSuper(DeclVisibilityKind Reason) { switch (Reason) { case DeclVisibilityKind::MemberOfCurrentNominal: case DeclVisibilityKind::MemberOfProtocolImplementedByCurrentNominal: case DeclVisibilityKind::MemberOfSuper: return DeclVisibilityKind::MemberOfSuper; case DeclVisibilityKind::MemberOfOutsideNominal: return DeclVisibilityKind::MemberOfOutsideNominal; default: llvm_unreachable("should not see this kind"); } } static void lookupDeclsFromProtocolsBeingConformedTo( Type BaseTy, VisibleDeclConsumer &Consumer, LookupState LS, const DeclContext *FromContext, DeclVisibilityKind Reason, LazyResolver *TypeResolver, VisitedSet &Visited) { NominalTypeDecl *CurrNominal = BaseTy->getAnyNominal(); if (!CurrNominal) return; for (auto Conformance : CurrNominal->getAllConformances()) { auto Proto = Conformance->getProtocol(); if (!Proto->isAccessibleFrom(FromContext)) continue; DeclVisibilityKind ReasonForThisProtocol; if (Reason == DeclVisibilityKind::MemberOfCurrentNominal) ReasonForThisProtocol = DeclVisibilityKind::MemberOfProtocolImplementedByCurrentNominal; else ReasonForThisProtocol = getReasonForSuper(Reason); auto NormalConformance = Conformance->getRootNormalConformance(); for (auto Member : Proto->getMembers()) { if (auto *ATD = dyn_cast<AssociatedTypeDecl>(Member)) { // Skip type decls if they aren't visible, or any type that has a // witness. This cuts down on duplicates. if (areTypeDeclsVisibleInLookupMode(LS) && !NormalConformance->hasTypeWitness(ATD)) { Consumer.foundDecl(ATD, ReasonForThisProtocol); } continue; } if (auto *VD = dyn_cast<ValueDecl>(Member)) { if (TypeResolver) TypeResolver->resolveDeclSignature(VD); // Skip value requirements that have corresponding witnesses. This cuts // down on duplicates. if (!NormalConformance->hasWitness(VD) || !NormalConformance->getWitness(VD, nullptr) || NormalConformance->getWitness(VD, nullptr).getDecl()->getFullName() != VD->getFullName()) { Consumer.foundDecl(VD, ReasonForThisProtocol); } } } // Add members from any extensions. SmallVector<ValueDecl *, 2> FoundDecls; doGlobalExtensionLookup(BaseTy, Proto->getDeclaredType(), FoundDecls, FromContext, LS, ReasonForThisProtocol, TypeResolver); for (auto *VD : FoundDecls) Consumer.foundDecl(VD, ReasonForThisProtocol); } } static void lookupVisibleMemberDeclsImpl(Type BaseTy, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB, VisitedSet &Visited); static void lookupVisibleProtocolMemberDecls( Type BaseTy, ProtocolType *PT, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB, VisitedSet &Visited) { if (!Visited.insert(PT->getDecl()).second) return; for (auto Proto : PT->getDecl()->getInheritedProtocols()) lookupVisibleProtocolMemberDecls(BaseTy, Proto->getDeclaredType(), Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); lookupTypeMembers(BaseTy, PT, Consumer, CurrDC, LS, Reason, TypeResolver); } static void lookupVisibleMemberDeclsImpl( Type BaseTy, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB, VisitedSet &Visited) { // Just look through l-valueness. It doesn't affect name lookup. assert(BaseTy && "lookup into null type"); assert(!BaseTy->hasLValueType()); // Handle metatype references, as in "some_type.some_member". These are // special and can't have extensions. if (auto MTT = BaseTy->getAs<AnyMetatypeType>()) { // The metatype represents an arbitrary named type: dig through to the // declared type to see what we're dealing with. Type Ty = MTT->getInstanceType(); LookupState subLS = LookupState::makeQualified().withOnMetatype(); if (LS.isIncludingInstanceMembers()) { subLS = subLS.withIncludedInstanceMembers(); } // Just perform normal dot lookup on the type see if we find extensions or // anything else. For example, type SomeTy.SomeMember can look up static // functions, and can even look up non-static functions as well (thus // getting the address of the member). lookupVisibleMemberDeclsImpl(Ty, Consumer, CurrDC, subLS, Reason, TypeResolver, GSB, Visited); return; } // Lookup module references, as on some_module.some_member. These are // special and can't have extensions. if (ModuleType *MT = BaseTy->getAs<ModuleType>()) { AccessFilteringDeclConsumer FilteringConsumer(CurrDC, Consumer, TypeResolver); MT->getModule()->lookupVisibleDecls(ModuleDecl::AccessPathTy(), FilteringConsumer, NLKind::QualifiedLookup); return; } // If the base is AnyObject, we are doing dynamic lookup. if (BaseTy->isAnyObject()) { doDynamicLookup(Consumer, CurrDC, LS, TypeResolver); return; } // If the base is a protocol, enumerate its members. if (ProtocolType *PT = BaseTy->getAs<ProtocolType>()) { lookupVisibleProtocolMemberDecls(BaseTy, PT, Consumer, CurrDC, LS, Reason, TypeResolver, GSB, Visited); return; } // If the base is a protocol composition, enumerate members of the protocols. if (auto PC = BaseTy->getAs<ProtocolCompositionType>()) { for (auto Member : PC->getMembers()) lookupVisibleMemberDeclsImpl(Member, Consumer, CurrDC, LS, Reason, TypeResolver, GSB, Visited); return; } // Enumerate members of archetype's requirements. if (ArchetypeType *Archetype = BaseTy->getAs<ArchetypeType>()) { for (auto Proto : Archetype->getConformsTo()) lookupVisibleProtocolMemberDecls( BaseTy, Proto->getDeclaredType(), Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); if (auto superclass = Archetype->getSuperclass()) lookupVisibleMemberDeclsImpl(superclass, Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); return; } // If we're looking into a type parameter and we have a generic signature // builder, use the GSB to resolve where we should look. if (BaseTy->isTypeParameter() && GSB) { auto EquivClass = GSB->resolveEquivalenceClass(BaseTy, ArchetypeResolutionKind::CompleteWellFormed); if (!EquivClass) return; if (EquivClass->concreteType) { BaseTy = EquivClass->concreteType; } else { // Conformances for (const auto &Conforms : EquivClass->conformsTo) { lookupVisibleProtocolMemberDecls( BaseTy, Conforms.first->getDeclaredType(), Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); } // Superclass. if (EquivClass->superclass) { lookupVisibleMemberDeclsImpl(EquivClass->superclass, Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); } return; } } llvm::SmallPtrSet<ClassDecl *, 8> Ancestors; do { NominalTypeDecl *CurNominal = BaseTy->getAnyNominal(); if (!CurNominal) break; // Look in for members of a nominal type. lookupTypeMembers(BaseTy, BaseTy, Consumer, CurrDC, LS, Reason, TypeResolver); lookupDeclsFromProtocolsBeingConformedTo(BaseTy, Consumer, LS, CurrDC, Reason, TypeResolver, Visited); // If we have a class type, look into its superclass. auto *CurClass = dyn_cast<ClassDecl>(CurNominal); if (CurClass && CurClass->hasSuperclass()) { // FIXME: This path is no substitute for an actual circularity check. // The real fix is to check that the superclass doesn't introduce a // circular reference before it's written into the AST. if (Ancestors.count(CurClass)) { break; } BaseTy = CurClass->getSuperclass(); Reason = getReasonForSuper(Reason); bool InheritsSuperclassInitializers = CurClass->inheritsSuperclassInitializers(TypeResolver); if (LS.isOnSuperclass() && !InheritsSuperclassInitializers) LS = LS.withoutInheritsSuperclassInitializers(); else if (!LS.isOnSuperclass()) { LS = LS.withOnSuperclass(); if (InheritsSuperclassInitializers) LS = LS.withInheritsSuperclassInitializers(); } } else { break; } Ancestors.insert(CurClass); } while (1); } namespace { struct FoundDeclTy { ValueDecl *D; DeclVisibilityKind Reason; FoundDeclTy(ValueDecl *D, DeclVisibilityKind Reason) : D(D), Reason(Reason) {} friend bool operator==(const FoundDeclTy &LHS, const FoundDeclTy &RHS) { // If this ever changes - e.g. to include Reason - be sure to also update // DenseMapInfo<FoundDeclTy>::getHashValue(). return LHS.D == RHS.D; } }; } // end anonymous namespace namespace llvm { template <> struct DenseMapInfo<FoundDeclTy> { static inline FoundDeclTy getEmptyKey() { return FoundDeclTy{nullptr, DeclVisibilityKind::LocalVariable}; } static inline FoundDeclTy getTombstoneKey() { return FoundDeclTy{reinterpret_cast<ValueDecl *>(0x1), DeclVisibilityKind::LocalVariable}; } static unsigned getHashValue(const FoundDeclTy &Val) { // Note: FoundDeclTy::operator== only considers D, so don't hash Reason here. return llvm::hash_value(Val.D); } static bool isEqual(const FoundDeclTy &LHS, const FoundDeclTy &RHS) { return LHS == RHS; } }; } // namespace llvm namespace { /// Similar to swift::conflicting, but lenient about protocol extensions which /// don't affect code completion's concept of overloading. static bool relaxedConflicting(const OverloadSignature &sig1, const OverloadSignature &sig2) { // If the base names are different, they can't conflict. if (sig1.Name.getBaseName() != sig2.Name.getBaseName()) return false; // If one is a compound name and the other is not, they do not conflict // if one is a property and the other is a non-nullary function. if (sig1.Name.isCompoundName() != sig2.Name.isCompoundName()) { return !((sig1.IsProperty && sig2.Name.getArgumentNames().size() > 0) || (sig2.IsProperty && sig1.Name.getArgumentNames().size() > 0)); } // Allow null property types to match non-null ones, which only happens when // one property is from a generic extension and the other is not. if (sig1.InterfaceType != sig2.InterfaceType) { if (!sig1.IsProperty || !sig2.IsProperty) return false; if (sig1.InterfaceType && sig2.InterfaceType) return false; } return sig1.Name == sig2.Name && sig1.UnaryOperator == sig2.UnaryOperator && sig1.IsInstanceMember == sig2.IsInstanceMember; } /// Hack to guess at whether substituting into the type of a declaration will /// be okay. /// FIXME: This is awful. We should either have Type::subst() work for /// GenericFunctionType, or we should kill it outright. static bool shouldSubstIntoDeclType(Type type) { auto genericFnType = type->getAs<GenericFunctionType>(); if (!genericFnType) return true; return false; } class OverrideFilteringConsumer : public VisibleDeclConsumer { public: std::set<ValueDecl *> AllFoundDecls; std::map<DeclBaseName, std::set<ValueDecl *>> FoundDecls; llvm::SetVector<FoundDeclTy> DeclsToReport; Type BaseTy; const DeclContext *DC; LazyResolver *TypeResolver; bool IsTypeLookup = false; OverrideFilteringConsumer(Type BaseTy, const DeclContext *DC, LazyResolver *resolver) : BaseTy(BaseTy), DC(DC), TypeResolver(resolver) { assert(!BaseTy->hasLValueType()); if (auto *MetaTy = BaseTy->getAs<AnyMetatypeType>()) { BaseTy = MetaTy->getInstanceType(); IsTypeLookup = true; } assert(DC && BaseTy); } void foundDecl(ValueDecl *VD, DeclVisibilityKind Reason) override { if (!AllFoundDecls.insert(VD).second) return; // If this kind of declaration doesn't participate in overriding, there's // no filtering to do here. if (!isa<AbstractFunctionDecl>(VD) && !isa<AbstractStorageDecl>(VD)) { DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } if (TypeResolver) { TypeResolver->resolveDeclSignature(VD); TypeResolver->resolveAccessControl(VD); } if (VD->isInvalid()) { FoundDecls[VD->getBaseName()].insert(VD); DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } auto &PossiblyConflicting = FoundDecls[VD->getBaseName()]; // Check all overridden decls. { auto *CurrentVD = VD->getOverriddenDecl(); while (CurrentVD) { if (!AllFoundDecls.insert(CurrentVD).second) break; if (PossiblyConflicting.count(CurrentVD)) { PossiblyConflicting.erase(CurrentVD); PossiblyConflicting.insert(VD); bool Erased = DeclsToReport.remove( FoundDeclTy(CurrentVD, DeclVisibilityKind::LocalVariable)); assert(Erased); (void)Erased; DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } CurrentVD = CurrentVD->getOverriddenDecl(); } } // Does it make sense to substitute types? // Don't pass UnboundGenericType here. If you see this assertion // being hit, fix the caller, don't remove it. assert(IsTypeLookup || !BaseTy->hasUnboundGenericType()); // If the base type is AnyObject, we might be doing a dynamic // lookup, so the base type won't match the type of the member's // context type. // // If the base type is not a nominal type, we can't substitute // the member type. // // If the member is a free function and not a member of a type, // don't substitute either. bool shouldSubst = (!BaseTy->isAnyObject() && !BaseTy->hasTypeVariable() && BaseTy->getNominalOrBoundGenericNominal() && VD->getDeclContext()->isTypeContext()); ModuleDecl *M = DC->getParentModule(); // Hack; we shouldn't be filtering at this level anyway. if (!VD->hasInterfaceType()) { FoundDecls[VD->getBaseName()].insert(VD); DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } auto FoundSignature = VD->getOverloadSignature(); if (FoundSignature.InterfaceType && shouldSubst && shouldSubstIntoDeclType(FoundSignature.InterfaceType)) { auto subs = BaseTy->getMemberSubstitutionMap(M, VD); if (auto CT = FoundSignature.InterfaceType.subst(subs)) FoundSignature.InterfaceType = CT->getCanonicalType(); } for (auto I = PossiblyConflicting.begin(), E = PossiblyConflicting.end(); I != E; ++I) { auto *OtherVD = *I; if (OtherVD->isInvalid() || !OtherVD->hasInterfaceType()) { // For some invalid decls it might be impossible to compute the // signature, for example, if the types could not be resolved. continue; } auto OtherSignature = OtherVD->getOverloadSignature(); if (OtherSignature.InterfaceType && shouldSubst && shouldSubstIntoDeclType(OtherSignature.InterfaceType)) { auto subs = BaseTy->getMemberSubstitutionMap(M, OtherVD); if (auto CT = OtherSignature.InterfaceType.subst(subs)) OtherSignature.InterfaceType = CT->getCanonicalType(); } if (relaxedConflicting(FoundSignature, OtherSignature)) { if (VD->getFormalAccess() > OtherVD->getFormalAccess()) { PossiblyConflicting.erase(I); PossiblyConflicting.insert(VD); bool Erased = DeclsToReport.remove( FoundDeclTy(OtherVD, DeclVisibilityKind::LocalVariable)); assert(Erased); (void)Erased; DeclsToReport.insert(FoundDeclTy(VD, Reason)); } return; } } PossiblyConflicting.insert(VD); DeclsToReport.insert(FoundDeclTy(VD, Reason)); } }; } // unnamed namespace /// \brief Enumerate all members in \c BaseTy (including members of extensions, /// superclasses and implemented protocols), as seen from the context \c CurrDC. /// /// This operation corresponds to a standard "dot" lookup operation like "a.b" /// where 'self' is the type of 'a'. This operation is only valid after name /// binding. static void lookupVisibleMemberDecls( Type BaseTy, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB) { OverrideFilteringConsumer ConsumerWrapper(BaseTy, CurrDC, TypeResolver); VisitedSet Visited; lookupVisibleMemberDeclsImpl(BaseTy, ConsumerWrapper, CurrDC, LS, Reason, TypeResolver, GSB, Visited); // Report the declarations we found to the real consumer. for (const auto &DeclAndReason : ConsumerWrapper.DeclsToReport) Consumer.foundDecl(DeclAndReason.D, DeclAndReason.Reason); } void swift::lookupVisibleDecls(VisibleDeclConsumer &Consumer, const DeclContext *DC, LazyResolver *TypeResolver, bool IncludeTopLevel, SourceLoc Loc) { const ModuleDecl &M = *DC->getParentModule(); const SourceManager &SM = DC->getASTContext().SourceMgr; auto Reason = DeclVisibilityKind::MemberOfCurrentNominal; // If we are inside of a method, check to see if there are any ivars in scope, // and if so, whether this is a reference to one of them. while (!DC->isModuleScopeContext()) { const ValueDecl *BaseDecl = nullptr; Type ExtendedType; auto LS = LookupState::makeUnqualified(); // Skip initializer contexts, we will not find any declarations there. if (isa<Initializer>(DC)) { DC = DC->getParent(); LS = LS.withOnMetatype(); } GenericParamList *GenericParams = DC->getGenericParamsOfContext(); if (auto *AFD = dyn_cast<AbstractFunctionDecl>(DC)) { // Look for local variables; normally, the parser resolves these // for us, but it can't do the right thing inside local types. // FIXME: when we can parse and typecheck the function body partially for // code completion, AFD->getBody() check can be removed. if (Loc.isValid() && AFD->getBody()) { namelookup::FindLocalVal(SM, Loc, Consumer).visit(AFD->getBody()); } for (auto *P : AFD->getParameterLists()) namelookup::FindLocalVal(SM, Loc, Consumer).checkParameterList(P); // Constructors and destructors don't have 'self' in parameter patterns. if (isa<ConstructorDecl>(AFD) || isa<DestructorDecl>(AFD)) if (auto *selfParam = AFD->getImplicitSelfDecl()) Consumer.foundDecl(const_cast<ParamDecl*>(selfParam), DeclVisibilityKind::FunctionParameter); if (AFD->getDeclContext()->isTypeContext()) { ExtendedType = AFD->getDeclContext()->getSelfTypeInContext(); BaseDecl = AFD->getImplicitSelfDecl(); DC = DC->getParent(); if (auto *FD = dyn_cast<FuncDecl>(AFD)) if (FD->isStatic()) ExtendedType = MetatypeType::get(ExtendedType); } } else if (auto CE = dyn_cast<ClosureExpr>(DC)) { if (Loc.isValid()) { namelookup::FindLocalVal(SM, Loc, Consumer).visit(CE->getBody()); if (auto P = CE->getParameters()) { namelookup::FindLocalVal(SM, Loc, Consumer).checkParameterList(P); } } } else if (auto ED = dyn_cast<ExtensionDecl>(DC)) { ExtendedType = ED->getExtendedType(); if (ExtendedType) BaseDecl = ExtendedType->getNominalOrBoundGenericNominal(); } else if (auto ND = dyn_cast<NominalTypeDecl>(DC)) { ExtendedType = ND->getDeclaredTypeInContext(); BaseDecl = ND; } if (BaseDecl && ExtendedType) { ::lookupVisibleMemberDecls(ExtendedType, Consumer, DC, LS, Reason, TypeResolver, nullptr); } // Check any generic parameters for something with the given name. namelookup::FindLocalVal(SM, Loc, Consumer) .checkGenericParams(GenericParams); DC = DC->getParent(); Reason = DeclVisibilityKind::MemberOfOutsideNominal; } SmallVector<ModuleDecl::ImportedModule, 8> extraImports; if (auto SF = dyn_cast<SourceFile>(DC)) { if (Loc.isValid()) { // Look for local variables in top-level code; normally, the parser // resolves these for us, but it can't do the right thing for // local types. namelookup::FindLocalVal(SM, Loc, Consumer).checkSourceFile(*SF); } if (IncludeTopLevel) { auto &cached = SF->getCachedVisibleDecls(); if (!cached.empty()) { for (auto result : cached) Consumer.foundDecl(result, DeclVisibilityKind::VisibleAtTopLevel); return; } SF->getImportedModules(extraImports, ModuleDecl::ImportFilter::Private); } } if (IncludeTopLevel) { using namespace namelookup; SmallVector<ValueDecl *, 0> moduleResults; auto &mutableM = const_cast<ModuleDecl&>(M); lookupVisibleDeclsInModule(&mutableM, {}, moduleResults, NLKind::UnqualifiedLookup, ResolutionKind::Overloadable, TypeResolver, DC, extraImports); for (auto result : moduleResults) Consumer.foundDecl(result, DeclVisibilityKind::VisibleAtTopLevel); if (auto SF = dyn_cast<SourceFile>(DC)) SF->cacheVisibleDecls(std::move(moduleResults)); } } void swift::lookupVisibleMemberDecls(VisibleDeclConsumer &Consumer, Type BaseTy, const DeclContext *CurrDC, LazyResolver *TypeResolver, bool includeInstanceMembers, GenericSignatureBuilder *GSB) { assert(CurrDC); LookupState ls = LookupState::makeQualified(); if (includeInstanceMembers) { ls = ls.withIncludedInstanceMembers(); } ::lookupVisibleMemberDecls(BaseTy, Consumer, CurrDC, ls, DeclVisibilityKind::MemberOfCurrentNominal, TypeResolver, GSB); }
OscarSwanros/swift
lib/AST/LookupVisibleDecls.cpp
C++
apache-2.0
36,523
# # Author:: Panagiotis Papadomitsos (<pj@ezgr.net>) # # Cookbook Name:: php # Recipe:: module_common # # Copyright 2009-2011, Opscode, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pkg = value_for_platform_family( [ 'rhel', 'fedora' ] => %w{ php-common php-cli php-mbstring php-gd php-intl php-pspell php-mcrypt php-soap php-sqlite php-xml php-xmlrpc }, 'debian' => %w{ php5-curl php5-json php5-cli php5-gd php5-intl php5-pspell php5-mcrypt php5-mhash php5-sqlite php5-xsl php5-xmlrpc } ) pkg.each do |ppkg| package ppkg do action :install end end
VladimirTS/chef-php
recipes/module_common.rb
Ruby
apache-2.0
1,074
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.demo.jaxrs.server.beanParam; import javax.servlet.http.Part; import javax.ws.rs.FormParam; import javax.ws.rs.QueryParam; public class TestBeanParameterWithUpload { @QueryParam("query") private String queryStr; @FormParam("up1") private Part up1; private Part up2; public String getQueryStr() { return queryStr; } public void setQueryStr(String queryStr) { this.queryStr = queryStr; } public Part getUp1() { return up1; } public void setUp1(Part up1) { this.up1 = up1; } public Part getUp2() { return up2; } @FormParam("up2") public void setUp2(Part up2) { this.up2 = up2; } @Override public String toString() { final StringBuilder sb = new StringBuilder("TestBeanParameterWithUpload{"); sb.append("queryStr='").append(queryStr).append('\''); sb.append('}'); return sb.toString(); } }
ServiceComb/java-chassis
demo/demo-jaxrs/jaxrs-server/src/main/java/org/apache/servicecomb/demo/jaxrs/server/beanParam/TestBeanParameterWithUpload.java
Java
apache-2.0
1,712
'use strict'; angular.module('publisherApp') .factory('Register', function ($resource) { return $resource('api/register', {}, { }); });
GIP-RECIA/esup-publisher-ui
src/main/webapp/scripts/components/auth/services/register.service.js
JavaScript
apache-2.0
163
/* Copyright 2009 Ramnivas Laddad Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ //Listing 2.10 Using the Spring container application context package ajia.main; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import ajia.messaging.MessageCommunicator; public class Main { public static void main(String[] args) { ApplicationContext context = new ClassPathXmlApplicationContext( "applicationContext.xml"); MessageCommunicator messageCommunicator = (MessageCommunicator) context .getBean("messageCommunicator"); messageCommunicator.deliver("Wanna learn AspectJ?"); messageCommunicator.deliver("Harry", "having fun?"); } }
sobkowiak/aspectj-in-action-code
ch02/workspace/Section2.6AspectJSpringIntegration/src/main/java/ajia/main/Main.java
Java
apache-2.0
1,245
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.yoko.orb.OCI; // // IDL:orb.yoko.apache.org/OCI/ConnectorSeq:1.0 // final public class ConnectorSeqHolder implements org.omg.CORBA.portable.Streamable { public Connector[] value; public ConnectorSeqHolder() { } public ConnectorSeqHolder(Connector[] initial) { value = initial; } public void _read(org.omg.CORBA.portable.InputStream in) { value = ConnectorSeqHelper.read(in); } public void _write(org.omg.CORBA.portable.OutputStream out) { ConnectorSeqHelper.write(out, value); } public org.omg.CORBA.TypeCode _type() { return ConnectorSeqHelper.type(); } }
apache/geronimo-yoko
yoko-core/src/main/java/org/apache/yoko/orb/OCI/ConnectorSeqHolder.java
Java
apache-2.0
1,507
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.app.impl; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.apache.karaf.features.Feature; import org.apache.karaf.features.FeaturesService; import org.onosproject.app.ApplicationAdminService; import org.onosproject.app.ApplicationEvent; import org.onosproject.app.ApplicationListener; import org.onosproject.app.ApplicationService; import org.onosproject.app.ApplicationState; import org.onosproject.app.ApplicationStore; import org.onosproject.app.ApplicationStoreDelegate; import org.onosproject.event.AbstractListenerManager; import org.onosproject.core.Application; import org.onosproject.core.ApplicationId; import org.onosproject.core.Permission; import org.slf4j.Logger; import java.io.InputStream; import java.util.Set; import static com.google.common.base.Preconditions.checkNotNull; import static org.onosproject.app.ApplicationEvent.Type.*; import static org.onosproject.security.AppGuard.checkPermission; import static org.slf4j.LoggerFactory.getLogger; /** * Implementation of the application management service. */ @Component(immediate = true) @Service public class ApplicationManager extends AbstractListenerManager<ApplicationEvent, ApplicationListener> implements ApplicationService, ApplicationAdminService { private final Logger log = getLogger(getClass()); private static final String APP_ID_NULL = "Application ID cannot be null"; private final ApplicationStoreDelegate delegate = new InternalStoreDelegate(); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ApplicationStore store; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected FeaturesService featuresService; private boolean initializing; @Activate public void activate() { eventDispatcher.addSink(ApplicationEvent.class, listenerRegistry); initializing = true; store.setDelegate(delegate); initializing = false; log.info("Started"); } @Deactivate public void deactivate() { eventDispatcher.removeSink(ApplicationEvent.class); store.unsetDelegate(delegate); log.info("Stopped"); } @Override public Set<Application> getApplications() { checkPermission(Permission.APP_READ); return store.getApplications(); } @Override public ApplicationId getId(String name) { checkPermission(Permission.APP_READ); checkNotNull(name, "Name cannot be null"); return store.getId(name); } @Override public Application getApplication(ApplicationId appId) { checkPermission(Permission.APP_READ); checkNotNull(appId, APP_ID_NULL); return store.getApplication(appId); } @Override public ApplicationState getState(ApplicationId appId) { checkPermission(Permission.APP_READ); checkNotNull(appId, APP_ID_NULL); return store.getState(appId); } @Override public Set<Permission> getPermissions(ApplicationId appId) { checkPermission(Permission.APP_READ); checkNotNull(appId, APP_ID_NULL); return store.getPermissions(appId); } @Override public Application install(InputStream appDescStream) { checkNotNull(appDescStream, "Application archive stream cannot be null"); return store.create(appDescStream); } @Override public void uninstall(ApplicationId appId) { checkNotNull(appId, APP_ID_NULL); try { store.remove(appId); } catch (Exception e) { log.warn("Unable to purge application directory for {}", appId.name()); } } @Override public void activate(ApplicationId appId) { checkNotNull(appId, APP_ID_NULL); store.activate(appId); } @Override public void deactivate(ApplicationId appId) { checkNotNull(appId, APP_ID_NULL); store.deactivate(appId); } @Override public void setPermissions(ApplicationId appId, Set<Permission> permissions) { checkNotNull(appId, APP_ID_NULL); checkNotNull(permissions, "Permissions cannot be null"); store.setPermissions(appId, permissions); } private class InternalStoreDelegate implements ApplicationStoreDelegate { @Override public void notify(ApplicationEvent event) { ApplicationEvent.Type type = event.type(); Application app = event.subject(); try { if (type == APP_ACTIVATED) { if (installAppFeatures(app)) { log.info("Application {} has been activated", app.id().name()); } } else if (type == APP_DEACTIVATED) { if (uninstallAppFeatures(app)) { log.info("Application {} has been deactivated", app.id().name()); } } else if (type == APP_INSTALLED) { if (installAppArtifacts(app)) { log.info("Application {} has been installed", app.id().name()); } } else if (type == APP_UNINSTALLED) { if (uninstallAppFeatures(app) || uninstallAppArtifacts(app)) { log.info("Application {} has been uninstalled", app.id().name()); } } post(event); } catch (Exception e) { log.warn("Unable to perform operation on application " + app.id().name(), e); } } } // The following methods are fully synchronized to guard against remote vs. // locally induced feature service interactions. private synchronized boolean installAppArtifacts(Application app) throws Exception { if (app.featuresRepo().isPresent() && featuresService.getRepository(app.featuresRepo().get()) == null) { featuresService.addRepository(app.featuresRepo().get()); return true; } return false; } private synchronized boolean uninstallAppArtifacts(Application app) throws Exception { if (app.featuresRepo().isPresent() && featuresService.getRepository(app.featuresRepo().get()) != null) { featuresService.removeRepository(app.featuresRepo().get()); return true; } return false; } private synchronized boolean installAppFeatures(Application app) throws Exception { boolean changed = false; for (String name : app.features()) { Feature feature = featuresService.getFeature(name); if (feature != null && !featuresService.isInstalled(feature)) { featuresService.installFeature(name); changed = true; } else if (feature == null && !initializing) { // Suppress feature-not-found reporting during startup since these // can arise naturally from the staggered cluster install. log.warn("Feature {} not found", name); } } return changed; } private synchronized boolean uninstallAppFeatures(Application app) throws Exception { boolean changed = false; for (String name : app.features()) { Feature feature = featuresService.getFeature(name); if (feature != null && featuresService.isInstalled(feature)) { featuresService.uninstallFeature(name); changed = true; } else if (feature == null) { log.warn("Feature {} not found", name); } } return changed; } }
rvhub/onos
core/net/src/main/java/org/onosproject/app/impl/ApplicationManager.java
Java
apache-2.0
8,584
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; const models = require('./index'); /** * Responsys dataset. * * @extends models['Dataset'] */ class ResponsysObjectDataset extends models['Dataset'] { /** * Create a ResponsysObjectDataset. * @member {object} [tableName] The table name. Type: string (or Expression * with resultType string). */ constructor() { super(); } /** * Defines the metadata of ResponsysObjectDataset * * @returns {object} metadata of ResponsysObjectDataset * */ mapper() { return { required: false, serializedName: 'ResponsysObject', type: { name: 'Composite', polymorphicDiscriminator: { serializedName: 'type', clientName: 'type' }, uberParent: 'Dataset', className: 'ResponsysObjectDataset', modelProperties: { description: { required: false, serializedName: 'description', type: { name: 'String' } }, structure: { required: false, serializedName: 'structure', type: { name: 'Object' } }, linkedServiceName: { required: true, serializedName: 'linkedServiceName', defaultValue: {}, type: { name: 'Composite', className: 'LinkedServiceReference' } }, parameters: { required: false, serializedName: 'parameters', type: { name: 'Dictionary', value: { required: false, serializedName: 'ParameterSpecificationElementType', type: { name: 'Composite', className: 'ParameterSpecification' } } } }, annotations: { required: false, serializedName: 'annotations', type: { name: 'Sequence', element: { required: false, serializedName: 'ObjectElementType', type: { name: 'Object' } } } }, folder: { required: false, serializedName: 'folder', type: { name: 'Composite', className: 'DatasetFolder' } }, type: { required: true, serializedName: 'type', isPolymorphicDiscriminator: true, type: { name: 'String' } }, tableName: { required: false, serializedName: 'typeProperties.tableName', type: { name: 'Object' } } } } }; } } module.exports = ResponsysObjectDataset;
xingwu1/azure-sdk-for-node
lib/services/datafactoryManagement/lib/models/responsysObjectDataset.js
JavaScript
apache-2.0
3,288
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xml.security.stax.impl.processor.output; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import org.apache.xml.security.exceptions.XMLSecurityException; import org.apache.xml.security.stax.config.JCEAlgorithmMapper; import org.apache.xml.security.stax.config.ResourceResolverMapper; import org.apache.xml.security.stax.ext.AbstractOutputProcessor; import org.apache.xml.security.stax.ext.OutputProcessorChain; import org.apache.xml.security.stax.ext.ResourceResolver; import org.apache.xml.security.stax.ext.SecurePart; import org.apache.xml.security.stax.ext.Transformer; import org.apache.xml.security.stax.ext.XMLSecurityConstants; import org.apache.xml.security.stax.ext.XMLSecurityUtils; import org.apache.xml.security.stax.ext.stax.XMLSecEvent; import org.apache.xml.security.stax.ext.stax.XMLSecStartElement; import org.apache.xml.security.stax.impl.SignaturePartDef; import org.apache.xml.security.stax.impl.transformer.TransformIdentity; import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_Excl; import org.apache.xml.security.stax.impl.util.DigestOutputStream; import org.apache.xml.security.utils.UnsyncBufferedOutputStream; import org.apache.xml.security.utils.XMLUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** */ public abstract class AbstractSignatureOutputProcessor extends AbstractOutputProcessor { private static final transient Logger LOG = LoggerFactory.getLogger(AbstractSignatureOutputProcessor.class); private final List<SignaturePartDef> signaturePartDefList = new ArrayList<>(); private InternalSignatureOutputProcessor activeInternalSignatureOutputProcessor; public AbstractSignatureOutputProcessor() throws XMLSecurityException { super(); } public List<SignaturePartDef> getSignaturePartDefList() { return signaturePartDefList; } @Override public abstract void processEvent(XMLSecEvent xmlSecEvent, OutputProcessorChain outputProcessorChain) throws XMLStreamException, XMLSecurityException; @Override public void doFinal(OutputProcessorChain outputProcessorChain) throws XMLStreamException, XMLSecurityException { doFinalInternal(outputProcessorChain); super.doFinal(outputProcessorChain); } protected void doFinalInternal(OutputProcessorChain outputProcessorChain) throws XMLSecurityException, XMLStreamException { Map<Object, SecurePart> dynamicSecureParts = outputProcessorChain.getSecurityContext().getAsMap(XMLSecurityConstants.SIGNATURE_PARTS); if (dynamicSecureParts != null) { Iterator<Map.Entry<Object, SecurePart>> securePartsMapIterator = dynamicSecureParts.entrySet().iterator(); while (securePartsMapIterator.hasNext()) { Map.Entry<Object, SecurePart> securePartEntry = securePartsMapIterator.next(); final SecurePart securePart = securePartEntry.getValue(); if (securePart.getExternalReference() != null) { digestExternalReference(outputProcessorChain, securePart); } } } verifySignatureParts(outputProcessorChain); } protected void digestExternalReference( OutputProcessorChain outputProcessorChain, SecurePart securePart) throws XMLSecurityException, XMLStreamException { final String externalReference = securePart.getExternalReference(); ResourceResolver resourceResolver = ResourceResolverMapper.getResourceResolver( externalReference, outputProcessorChain.getDocumentContext().getBaseURI()); String digestAlgo = securePart.getDigestMethod(); if (digestAlgo == null) { digestAlgo = getSecurityProperties().getSignatureDigestAlgorithm(); } DigestOutputStream digestOutputStream = createMessageDigestOutputStream(digestAlgo); InputStream inputStream = resourceResolver.getInputStreamFromExternalReference(); SignaturePartDef signaturePartDef = new SignaturePartDef(); signaturePartDef.setSecurePart(securePart); signaturePartDef.setSigRefId(externalReference); signaturePartDef.setExternalResource(true); signaturePartDef.setTransforms(securePart.getTransforms()); signaturePartDef.setDigestAlgo(digestAlgo); try { if (securePart.getTransforms() != null) { signaturePartDef.setExcludeVisibleC14Nprefixes(true); Transformer transformer = buildTransformerChain(digestOutputStream, signaturePartDef, null); transformer.transform(inputStream); transformer.doFinal(); } else { XMLSecurityUtils.copy(inputStream, digestOutputStream); } digestOutputStream.close(); } catch (IOException e) { throw new XMLSecurityException(e); } String calculatedDigest = XMLUtils.encodeToString(digestOutputStream.getDigestValue()); LOG.debug("Calculated Digest: {}", calculatedDigest); signaturePartDef.setDigestValue(calculatedDigest); getSignaturePartDefList().add(signaturePartDef); } protected void verifySignatureParts(OutputProcessorChain outputProcessorChain) throws XMLSecurityException { List<SignaturePartDef> signaturePartDefs = getSignaturePartDefList(); Map<Object, SecurePart> dynamicSecureParts = outputProcessorChain.getSecurityContext().getAsMap(XMLSecurityConstants.SIGNATURE_PARTS); if (dynamicSecureParts != null) { Iterator<Map.Entry<Object, SecurePart>> securePartsMapIterator = dynamicSecureParts.entrySet().iterator(); loop: while (securePartsMapIterator.hasNext()) { Map.Entry<Object, SecurePart> securePartEntry = securePartsMapIterator.next(); final SecurePart securePart = securePartEntry.getValue(); if (securePart.isRequired()) { for (int i = 0; i < signaturePartDefs.size(); i++) { SignaturePartDef signaturePartDef = signaturePartDefs.get(i); if (signaturePartDef.getSecurePart() == securePart) { continue loop; } } throw new XMLSecurityException("stax.signature.securePartNotFound", new Object[] {securePart.getName()}); } } } } protected InternalSignatureOutputProcessor getActiveInternalSignatureOutputProcessor() { return activeInternalSignatureOutputProcessor; } protected void setActiveInternalSignatureOutputProcessor( InternalSignatureOutputProcessor activeInternalSignatureOutputProcessor) { this.activeInternalSignatureOutputProcessor = activeInternalSignatureOutputProcessor; } protected DigestOutputStream createMessageDigestOutputStream(String digestAlgorithm) throws XMLSecurityException { String jceName = JCEAlgorithmMapper.translateURItoJCEID(digestAlgorithm); String jceProvider = JCEAlgorithmMapper.getJCEProviderFromURI(digestAlgorithm); if (jceName == null) { throw new XMLSecurityException("algorithms.NoSuchMap", new Object[] {digestAlgorithm}); } MessageDigest messageDigest; try { if (jceProvider != null) { messageDigest = MessageDigest.getInstance(jceName, jceProvider); } else { messageDigest = MessageDigest.getInstance(jceName); } } catch (NoSuchAlgorithmException e) { throw new XMLSecurityException(e); } catch (NoSuchProviderException e) { throw new XMLSecurityException(e); } return new DigestOutputStream(messageDigest); } protected Transformer buildTransformerChain(OutputStream outputStream, SignaturePartDef signaturePartDef, XMLSecStartElement xmlSecStartElement) throws XMLSecurityException { String[] transforms = signaturePartDef.getTransforms(); if (transforms == null || transforms.length == 0) { Transformer transformer = new TransformIdentity(); transformer.setOutputStream(outputStream); return transformer; } Transformer parentTransformer = null; for (int i = transforms.length - 1; i >= 0; i--) { String transform = transforms[i]; Map<String, Object> transformerProperties = null; if (getSecurityProperties().isAddExcC14NInclusivePrefixes() && XMLSecurityConstants.NS_C14N_EXCL_OMIT_COMMENTS.equals(transform)) { Set<String> prefixSet = XMLSecurityUtils.getExcC14NInclusiveNamespacePrefixes( xmlSecStartElement, signaturePartDef.isExcludeVisibleC14Nprefixes() ); StringBuilder prefixes = new StringBuilder(); for (Iterator<String> iterator = prefixSet.iterator(); iterator.hasNext(); ) { String prefix = iterator.next(); if (prefixes.length() != 0) { prefixes.append(' '); } prefixes.append(prefix); } signaturePartDef.setInclusiveNamespacesPrefixes(prefixes.toString()); List<String> inclusiveNamespacePrefixes = new ArrayList<>(prefixSet); transformerProperties = new HashMap<>(); transformerProperties.put( Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespacePrefixes); } if (parentTransformer != null) { parentTransformer = XMLSecurityUtils.getTransformer( parentTransformer, null, transformerProperties, transform, XMLSecurityConstants.DIRECTION.OUT); } else { parentTransformer = XMLSecurityUtils.getTransformer( null, outputStream, transformerProperties, transform, XMLSecurityConstants.DIRECTION.OUT); } } return parentTransformer; } public class InternalSignatureOutputProcessor extends AbstractOutputProcessor { private SignaturePartDef signaturePartDef; private XMLSecStartElement xmlSecStartElement; private int elementCounter; private OutputStream bufferedDigestOutputStream; private DigestOutputStream digestOutputStream; private Transformer transformer; public InternalSignatureOutputProcessor(SignaturePartDef signaturePartDef, XMLSecStartElement xmlSecStartElement) throws XMLSecurityException { super(); this.addBeforeProcessor(InternalSignatureOutputProcessor.class.getName()); this.signaturePartDef = signaturePartDef; this.xmlSecStartElement = xmlSecStartElement; } @Override public void init(OutputProcessorChain outputProcessorChain) throws XMLSecurityException { this.digestOutputStream = createMessageDigestOutputStream(signaturePartDef.getDigestAlgo()); this.bufferedDigestOutputStream = new UnsyncBufferedOutputStream(digestOutputStream); this.transformer = buildTransformerChain(this.bufferedDigestOutputStream, signaturePartDef, xmlSecStartElement); super.init(outputProcessorChain); } @Override public void processEvent(XMLSecEvent xmlSecEvent, OutputProcessorChain outputProcessorChain) throws XMLStreamException, XMLSecurityException { transformer.transform(xmlSecEvent); if (XMLStreamConstants.START_ELEMENT == xmlSecEvent.getEventType()) { elementCounter++; } else if (XMLStreamConstants.END_ELEMENT == xmlSecEvent.getEventType()) { elementCounter--; if (elementCounter == 0 && xmlSecEvent.asEndElement().getName().equals(this.xmlSecStartElement.getName())) { transformer.doFinal(); try { bufferedDigestOutputStream.close(); } catch (IOException e) { throw new XMLSecurityException(e); } String calculatedDigest = XMLUtils.encodeToString(this.digestOutputStream.getDigestValue()); LOG.debug("Calculated Digest: {}", calculatedDigest); signaturePartDef.setDigestValue(calculatedDigest); outputProcessorChain.removeProcessor(this); //from now on signature is possible again setActiveInternalSignatureOutputProcessor(null); } } outputProcessorChain.processEvent(xmlSecEvent); } } }
apache/santuario-java
src/main/java/org/apache/xml/security/stax/impl/processor/output/AbstractSignatureOutputProcessor.java
Java
apache-2.0
14,441
/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ package: 'foam.ui.md', name: 'SectionView', extends: 'foam.flow.Element', requires: [ 'foam.ui.Icon', 'foam.ui.md.ExpandableView' ], constants: { ELEMENT_NAME: 'section' }, properties: [ { model_: 'BooleanProperty', name: 'expandable', defaultValue: true, postSet: function(old, nu) { if ( ! this.$ || old === nu ) return; // Need full re-render to correctly wire (or not wire) this.on('click'). this.updateHTML(); } }, { model_: 'BooleanProperty', name: 'expanded', defaultValue: true }, { model_: 'StringProperty', name: 'title', defaultValue: 'Heading' }, { model_: 'StringProperty', name: 'titleClass', defaultValue: 'md-subhead' }, { model_: 'ViewFactoryProperty', name: 'icon', defaultValue: null }, { model_: 'ViewFactoryProperty', name: 'delegate' }, { name: 'delegateView', postSet: function(old, nu) { if ( old && old.expanded$ ) Events.unfollow(this.expanded$, old.expanded$); if ( nu && nu.expanded$ ) Events.follow(this.expanded$, nu.expanded$); } }, { model_: 'StringProperty', name: 'expandedIconId', lazyFactory: function() { return this.id + '-expanded-icon'; } }, { model_: 'ViewFactoryProperty', name: 'expandedIcon', defaultValue: function() { return this.Icon.create({ id: this.expandedIconId, url: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAQAAABKfvVzAAAARUlEQVR4AWMY1GAUNAAhScr/A2EDKcr/ACFcC2HlvxnCGMIhWohVDgQwLYSVh8K4hLU0AJWHQNkILXX47NDCIjIIwSgAAGEBHc5iOzTwAAAAAElFTkSuQmCC', ligature: 'expand_less', extraClassName: 'expanded-icon' }, this.Y); } }, ], methods: [ { name: 'initHTML', code: function() { this.SUPER.apply(this, arguments); if ( this.expandable ) { this.delegateView.expandedIcon = this.X.$(this.expandedIconId); } } } ], listeners: [ { name: 'onToggleExpanded', code: function() { this.delegateView && this.delegateView.toggleExpanded && this.delegateView.toggleExpanded(); } } ], templates: [ function toInnerHTML() {/* <% this.delegateView = this.delegate(); this.addDataChild(this.delegateView); %> <heading id="{{this.id}}-heading" class="{{this.titleClass}}"> <% if ( this.icon ) { %>%%icon()<% } %> <span>{{this.title}}</span> <% if ( this.expandable ) { this.on('click', this.onToggleExpanded, this.id + '-heading'); %> <div class="flex-flush-right"> %%expandedIcon() </div> <% } %> </heading> %%delegateView */}, function CSS() {/* section heading { display: flex; align-items: center; cursor: pointer; margin: 8px 0; } section heading > * { flex-grow: 0; } section heading div.flex-flush-right { flex-grow: 1; display: flex; justify-content: flex-end; } section heading icon { margin-right: 12px; } section heading icon.expanded-icon { margin-right: initial; } */} ] });
mdittmer/foam
js/foam/ui/md/SectionView.js
JavaScript
apache-2.0
3,734
package threadmanagement.join; import java.util.Date; import java.util.concurrent.TimeUnit; /** * 功能: * 作者: ldl * 时间: 2016-07-29 17:51 */ public class DataSourcesLoader implements Runnable { @Override public void run() { System.out.printf("开始加载资源: %s\n", new Date()); try { TimeUnit.SECONDS.sleep(4); } catch (InterruptedException e) { e.printStackTrace(); } System.out.printf("资源加载结束 : %s\n", new Date()); } }
wsldl123292/testeveryting
concurrency/src/main/java/threadmanagement/join/DataSourcesLoader.java
Java
apache-2.0
532
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using QuantConnect.Configuration; using QuantConnect.Data; using QuantConnect.Logging; using QuantConnect.Securities; using QuantConnect.Util; namespace QuantConnect.ToolBox.Polygon { public class PolygonDownloaderProgram { /// <summary> /// Primary entry point to the program. This program only supports SecurityType.Equity /// </summary> public static void PolygonDownloader(IList<string> tickers, string securityTypeString, string market, string resolutionString, DateTime fromDate, DateTime toDate) { if (tickers.IsNullOrEmpty() || securityTypeString.IsNullOrEmpty() || market.IsNullOrEmpty() || resolutionString.IsNullOrEmpty()) { Console.WriteLine("PolygonDownloader ERROR: '--tickers=' or '--security-type=' or '--market=' or '--resolution=' parameter is missing"); Console.WriteLine("--tickers=eg SPY,AAPL"); Console.WriteLine("--security-type=Equity"); Console.WriteLine("--market=usa"); Console.WriteLine("--resolution=Minute/Hour/Daily"); Environment.Exit(1); } try { // Load settings from command line var resolution = (Resolution)Enum.Parse(typeof(Resolution), resolutionString); var securityType = (SecurityType)Enum.Parse(typeof(SecurityType), securityTypeString); // Polygon.io does not support Crypto historical quotes var tickTypes = securityType == SecurityType.Crypto ? new List<TickType> { TickType.Trade } : SubscriptionManager.DefaultDataTypes()[securityType]; // Load settings from config.json var dataDirectory = Config.Get("data-directory", "../../../Data"); var startDate = fromDate.ConvertToUtc(TimeZones.NewYork); var endDate = toDate.ConvertToUtc(TimeZones.NewYork); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); // Create an instance of the downloader using (var downloader = new PolygonDataDownloader()) { foreach (var ticker in tickers) { var symbol = Symbol.Create(ticker, securityType, market); var exchangeTimeZone = marketHoursDatabase.GetExchangeHours(market, symbol, securityType).TimeZone; var dataTimeZone = marketHoursDatabase.GetDataTimeZone(market, symbol, securityType); foreach (var tickType in tickTypes) { // Download the data var data = downloader.Get(symbol, resolution, startDate, endDate, tickType) .Select(x => { x.Time = x.Time.ConvertTo(exchangeTimeZone, dataTimeZone); return x; } ); // Save the data var writer = new LeanDataWriter(resolution, symbol, dataDirectory, tickType); writer.Write(data); } } } } catch (Exception err) { Log.Error(err); } } } }
StefanoRaggi/Lean
ToolBox/Polygon/PolygonDownloaderProgram.cs
C#
apache-2.0
4,321
package com.zhaidaosi.game.jgframework.common.queue; /** * 可快速定位的FIFO队列 */ public class BaseQueue<E> { private BaseQueueElement<E> start = null; private BaseQueueElement<E> end = null; private Object lock = new Object(); private long putCount = 0; private long takeCount = 0; private long size = 0; /** * 获取队列头 * * @return */ public BaseQueueElement<E> getStart() { return start; } /** * 从队列尾插入元素 * * @param value * @return */ public BaseQueueElement<E> put(E value) { if (value == null) { return null; } BaseQueueElement<E> element = new BaseQueueElement<E>(value); synchronized (lock) { if (start == null) { start = element; end = element; } else { element.setBefore(end); end.setNext(element); end = element; } putCount++; size++; element.setNo(putCount); } return element; } /** * 从队列头弹出元素 * * @return */ public BaseQueueElement<E> take() { if (start == null) { return null; } BaseQueueElement<E> element; synchronized (lock) { element = start; if (start == end) { start = null; end = null; putCount = 0; takeCount = 0; } else { start = start.getNext(); start.setBefore(null); takeCount++; } size--; element.reset(); } return element; } /** * 删除一个元素 * * @param element */ public boolean remove(BaseQueueElement<E> element) { BaseQueueElement<E> after = null; synchronized (lock) { if (element == start && element == end) { start = null; end = null; putCount = 0; takeCount = 0; } else if (element == start) { start = start.getNext(); start.setBefore(null); takeCount++; } else if (element == end) { end = element.getBefore(); end.setNext(null); } else { // 判断是否在队列中 if (element.getBefore().getNext() != element || element.getNext().getBefore() != element) { return false; } after = element.getNext(); after.setBefore(element.getBefore()); element.getBefore().setNext(after); } size--; element.reset(); if (after != null) { do { after.setNo(after.getNo() - 1); after = after.getNext(); } while (after != null); } } return true; } /** * 查找元素所在的位置 * * @param element * @return */ public long findIndex(BaseQueueElement<E> element) { if (element == null) { return -1; } synchronized (lock) { return element.getNo() - takeCount; } } /** * 返回队列长度 * * @return */ public long size() { synchronized (lock) { return size; } } /** * 删除队列所有元素 */ public void clear() { synchronized (lock) { while (start != null) { BaseQueueElement<E> element = start; start = start.getNext(); element.reset(); element = null; } start = null; end = null; putCount = 0; takeCount = 0; size = 0; } } }
tempbottle/JgFramework
src/main/java/com/zhaidaosi/game/jgframework/common/queue/BaseQueue.java
Java
apache-2.0
4,050
/************************************************ * * * Node.js v0.8.8 API * * * ************************************************/ /************************************************ * * * GLOBAL * * * ************************************************/ declare var process: NodeProcess; declare var global: any; declare var console: { log(...data: any[]): void; info(...data: any[]): void; error(...data: any[]): void; warn(...data: any[]): void; dir(obj: any): void; timeEnd(label: string): void; trace(label: string): void; assert(expression: any, ...message: string[]): void; } declare var __filename: string; declare var __dirname: string; declare function setTimeout(callback: () => void , ms: number): any; declare function clearTimeout(timeoutId: any); declare function setInterval(callback: () => void , ms: number): any; declare function clearInterval(intervalId: any); declare var require: { (id: string): any; resolve(): string; cache: any; extensions: any; } declare var module: { exports: any; require(id: string): any; id: string; filename: string; loaded: bool; parent: any; children: any[]; } // Same as module.exports declare var exports: any; declare var SlowBuffer: { new (str: string, encoding?: string): NodeBuffer; new (size: number): NodeBuffer; new (array: any[]): NodeBuffer; prototype: NodeBuffer; isBuffer(obj: any): bool; byteLength(string: string, encoding?: string): number; concat(list: NodeBuffer[], totalLength?: number): NodeBuffer; }; declare var Buffer: { new (str: string, encoding?: string): NodeBuffer; new (size: number): NodeBuffer; new (array: any[]): NodeBuffer; prototype: NodeBuffer; isBuffer(obj: any): bool; byteLength(string: string, encoding?: string): number; concat(list: NodeBuffer[], totalLength?: number): NodeBuffer; } /************************************************ * * * INTERFACES * * * ************************************************/ interface EventEmitter { addListener(event: string, listener: Function); on(event: string, listener: Function); once(event: string, listener: Function): void; removeListener(event: string, listener: Function): void; removeAllListener(event: string): void; setMaxListeners(n: number): void; listeners(event: string): { Function; }[]; emit(event: string, arg1?: any, arg2?: any): void; } interface WritableStream extends EventEmitter { writable: bool; write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; end(): void; end(str: string, enconding: string): void; end(buffer: NodeBuffer): void; destroy(): void; destroySoon(): void; } interface ReadableStream extends EventEmitter { readable: bool; setEncoding(encoding: string): void; pause(): void; resume(): void; destroy(): void; pipe(destination: WritableStream, options?: { end?: bool; }): void; } interface NodeProcess extends EventEmitter { stdout: WritableStream; stderr: WritableStream; stdin: ReadableStream; argv: string[]; execPath: string; abort(): void; chdir(directory: string): void; cwd(): void; env: any; exit(code?: number): void; getgid(): number; setgid(id: number): void; getuid(): number; setuid(id: number): void; version: string; versions: { http_parser: string; node: string; v8: string; ares: string; uv: string; zlib: string; openssl: string; }; config: { target_defaults: { cflags: any[]; default_configuration: string; defines: string[]; include_dirs: string[]; libraries: string[]; }; variables: { clang: number; host_arch: string; node_install_npm: bool; node_install_waf: bool; node_prefix: string; node_shared_openssl: bool; node_shared_v8: bool; node_shared_zlib: bool; node_use_dtrace: bool; node_use_etw: bool; node_use_openssl: bool; target_arch: string; v8_no_strict_aliasing: number; v8_use_snapshot: bool; visibility: string; }; }; kill(pid: number, signal?: string): void; pid: number; title: string; arch: string; platform: string; memoryUsage(): { rss: number; heapTotal; number; heapUsed: number; }; nextTick(callback: Function): void; umask(mask?: number): number; uptime(): number; hrtime(): number[]; } // Buffer class interface NodeBuffer { [index: number]: number; write(string: string, offset?: number, length?: number, encoding?: string): number; toString(encoding: string, start: number, end: number): string; length: number; copy(targetBuffer: NodeBuffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): void; slice(start?: number, end?: number): NodeBuffer; readUInt8(offset: number, noAsset?: bool): number; readUInt16LE(offset: number, noAssert?: bool): number; readUInt16BE(offset: number, noAssert?: bool): number; readUInt32LE(offset: number, noAssert?: bool): number; readUInt32BE(offset: number, noAssert?: bool): number; readInt8(offset: number, noAssert?: bool): number; readInt16LE(offset: number, noAssert?: bool): number; readInt16BE(offset: number, noAssert?: bool): number; readInt32LE(offset: number, noAssert?: bool): number; readInt32BE(offset: number, noAssert?: bool): number; readFloatLE(offset: number, noAssert?: bool): number; readFloatBE(offset: number, noAssert?: bool): number; readDoubleLE(offset: number, noAssert?: bool): number; readDoubleBE(offset: number, noAssert?: bool): number; writeUInt8(value: number, offset: number, noAssert?: bool): void; writeUInt16LE(value: number, offset: number, noAssert?: bool): void; writeUInt16BE(value: number, offset: number, noAssert?: bool): void; writeUInt32LE(value: number, offset: number, noAssert?: bool): void; writeUInt32BE(value: number, offset: number, noAssert?: bool): void; writeInt8(value: number, offset: number, noAssert?: bool): void; writeInt16LE(value: number, offset: number, noAssert?: bool): void; writeInt16BE(value: number, offset: number, noAssert?: bool): void; writeInt32LE(value: number, offset: number, noAssert?: bool): void; writeInt32BE(value: number, offset: number, noAssert?: bool): void; writeFloatLE(value: number, offset: number, noAssert?: bool): void; writeFloatBE(value: number, offset: number, noAssert?: bool): void; writeDoubleLE(value: number, offset: number, noAssert?: bool): void; writeDoubleBE(value: number, offset: number, noAssert?: bool): void; fill(value: any, offset?: number, end?: number): void; INSPECT_MAX_BYTES: number; } /************************************************ * * * MODULES * * * ************************************************/ declare module "querystring" { export function stringify(obj: any, sep?: string, eq?: string): string; export function parse(str: string, sep?: string, eq?: string, options?: { maxKeys?: number; }): any; export function escape(): any; export function unescape(): any; } declare module "events" { export interface NodeEventEmitter { addListener(event: string, listener: Function); on(event: string, listener: Function): any; once(event: string, listener: Function): void; removeListener(event: string, listener: Function): void; removeAllListener(event: string): void; setMaxListeners(n: number): void; listeners(event: string): { Function; }[]; emit(event: string, arg1?: any, arg2?: any): void; } export var EventEmitter: NodeEventEmitter; } declare module "http" { import events = module("events"); import net = module("net"); import stream = module("stream"); export interface Server extends events.NodeEventEmitter { listen(port: number, hostname?: string, backlog?: number, callback?: Function): void; listen(path: string, callback?: Function): void; listen(handle: any, listeningListener?: Function): void; close(cb?: any): void; maxHeadersCount: number; } export interface ServerRequest extends events.NodeEventEmitter, stream.ReadableStream { method: string; url: string; headers: string; trailers: string; httpVersion: string; setEncoding(encoding?: string): void; pause(): void; resume(): void; connection: net.NodeSocket; } export interface ServerResponse extends events.NodeEventEmitter, stream.WritableStream { // Extended base methods write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; writeContinue(): void; writeHead(statusCode: number, reasonPhrase?: string, headers?: any): void; writeHead(statusCode: number, headers?: any): void; statusCode: number; setHeader(name: string, value: string): void; sendDate: bool; getHeader(name: string): string; removeHeader(name: string): void; write(chunk: any, encoding?: string): any; addTrailers(headers: any): void; end(data?: any, encoding?: string): void; } export interface ClientRequest extends events.NodeEventEmitter, stream.WritableStream { // Extended base methods write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; write(chunk: any, encoding?: string): void; end(data?: any, encoding?: string): void; abort(): void; setTimeout(timeout: number, callback?: Function): void; setNoDelay(noDelay?: Function): void; setSocketKeepAlive(enable?: bool, initialDelay?: number): void; } export interface ClientResponse extends events.NodeEventEmitter, stream.ReadableStream { statusCode: number; httpVersion: string; headers: any; trailers: any; setEncoding(encoding?: string): void; pause(): void; resume(): void; } export interface Agent { maxSockets: number; sockets: any; requests: any; } export var STATUS_CODES; export function createServer(requestListener?: (request: ServerRequest, response: ServerResponse) =>void ): Server; export function createClient(port?: number, host?: string): any; export function request(options: any, callback?: Function): ClientRequest; export function get(options: any, callback?: Function): ClientRequest; export var globalAgent: Agent; } declare module "cluster" { import child_process = module("child_process"); export interface ClusterSettings { exec: string; args: string[]; silent: bool; } export interface Worker { id: string; process: child_process; suicide: bool; send(message: any, sendHandle?: any): void; destroy(): void; disconnect(): void; } export var settings: ClusterSettings; export var isMaster: bool; export var isWorker: bool; export function setupMaster(settings?: ClusterSettings): void; export function fork(env?: any): Worker; export function disconnect(callback?: Function): void; export var workers: any; // Event emitter export function addListener(event: string, listener: Function): void; export function on(event: string, listener: Function): any; export function once(event: string, listener: Function): void; export function removeListener(event: string, listener: Function): void; export function removeAllListener(event: string): void; export function setMaxListeners(n: number): void; export function listeners(event: string): { Function; }[]; export function emit(event: string, arg1?: any, arg2?: any): void; } declare module "zlib" { import stream = module("stream"); export interface ZlibOptions { chunkSize?: number; windowBits?: number; level?: number; memLevel?: number; strategy?: number; dictionary?: any; } export interface Gzip extends stream.ReadWriteStream { } export interface Gunzip extends stream.ReadWriteStream { } export interface Deflate extends stream.ReadWriteStream { } export interface Inflate extends stream.ReadWriteStream { } export interface DeflateRaw extends stream.ReadWriteStream { } export interface InflateRaw extends stream.ReadWriteStream { } export interface Unzip extends stream.ReadWriteStream { } export function createGzip(options: ZlibOptions): Gzip; export function createGunzip(options: ZlibOptions): Gunzip; export function createDeflate(options: ZlibOptions): Deflate; export function createInflate(options: ZlibOptions): Inflate; export function createDeflateRaw(options: ZlibOptions): DeflateRaw; export function createInflateRaw(options: ZlibOptions): InflateRaw; export function createUnzip(options: ZlibOptions): Unzip; export function deflate(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function deflateRaw(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function gzip(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function gunzip(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function inflate(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function inflateRaw(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function unzip(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; // Constants export var Z_NO_FLUSH: number; export var Z_PARTIAL_FLUSH: number; export var Z_SYNC_FLUSH: number; export var Z_FULL_FLUSH: number; export var Z_FINISH: number; export var Z_BLOCK: number; export var Z_TREES: number; export var Z_OK: number; export var Z_STREAM_END: number; export var Z_NEED_DICT: number; export var Z_ERRNO: number; export var Z_STREAM_ERROR: number; export var Z_DATA_ERROR: number; export var Z_MEM_ERROR: number; export var Z_BUF_ERROR: number; export var Z_VERSION_ERROR: number; export var Z_NO_COMPRESSION: number; export var Z_BEST_SPEED: number; export var Z_BEST_COMPRESSION: number; export var Z_DEFAULT_COMPRESSION: number; export var Z_FILTERED: number; export var Z_HUFFMAN_ONLY: number; export var Z_RLE: number; export var Z_FIXED: number; export var Z_DEFAULT_STRATEGY: number; export var Z_BINARY: number; export var Z_TEXT: number; export var Z_ASCII: number; export var Z_UNKNOWN: number; export var Z_DEFLATED: number; export var Z_NULL: number; } declare module "os" { export function tmpDir(): string; export function hostname(): string; export function type(): string; export function platform(): string; export function arch(): string; export function release(): string; export function uptime(): number; export function loadavg(): number[]; export function totalmem(): number; export function freemem(): number; export function cpus(): { model: string; speed: number; times: { user: number; nice: number; sys: number; idle: number; irq: number; }; }[]; export function networkInterfaces(): any; export var EOL: string; } declare module "https" { import tls = module("tls"); import events = module("events"); import http = module("http"); export interface ServerOptions { pfx?: any; key?: any; passphrase?: string; cert?: any; ca?: any; crl?: any; ciphers?: string; honorCipherOrder?: bool; requestCert?: bool; rejectUnauthorized?: bool; NPNProtocols?: any; SNICallback?: (servername: string) => any; } export interface RequestOptions { host?: string; hostname?: string; port?: number; path?: string; method?: string; headers?: any; auth?: string; agent?: any; pfx?: any; key?: any; passphrase?: string; cert?: any; ca?: any; ciphers?: string; rejectUnauthorized?: bool; } export interface NodeAgent { maxSockets: number; sockets: any; requests: any; } export var Agent: { new (options?: RequestOptions): NodeAgent; }; export interface Server extends tls.Server { } export function createServer(options: ServerOptions, requestListener?: Function): Server; export function request(options: RequestOptions, callback?: (res: events.NodeEventEmitter) =>void ): http.ClientRequest; export function get(options: RequestOptions, callback?: (res: events.NodeEventEmitter) =>void ): http.ClientRequest; export var globalAgent: NodeAgent; } declare module "punycode" { export function decode(string: string): string; export function encode(string: string): string; export function toUnicode(domain: string): string; export function toASCII(domain: string): string; export var ucs2: ucs2; interface ucs2 { decode(string: string): string; encode(codePoints: number[]): string; } export var version; } declare module "repl" { import stream = module("stream"); import events = module("events"); export interface ReplOptions { prompt?: string; input?: stream.ReadableStream; output?: stream.WritableStream; terminal?: bool; eval?: Function; useColors?: bool; useGlobal?: bool; ignoreUndefined?: bool; writer?: Function; } export function start(options: ReplOptions): events.NodeEventEmitter; } declare module "readline" { import events = module("events"); import stream = module("stream"); export interface ReadLine extends events.NodeEventEmitter { setPrompt(prompt: string, length: number): void; prompt(preserveCursor?: bool): void; question(query: string, callback: Function): void; pause(): void; resume(): void; close(): void; write(data: any, key?: any): void; } export interface ReadLineOptions { input: stream.ReadableStream; output: stream.WritableStream; completer?: Function; terminal?: bool; } export function createInterface(options: ReadLineOptions): ReadLine; } declare module "vm" { export interface Context { } export interface Script { runInThisContext(): void; runInNewContext(sandbox?: Context): void; } export function runInThisContext(code: string, filename?: string): void; export function runInNewContext(code: string, sandbox?: Context, filename?: string): void; export function runInContext(code: string, context: Context, filename?: string): void; export function createContext(initSandbox?: Context): Context; export function createScript(code: string, filename?: string): Script; } declare module "child_process" { import events = module("events"); import stream = module("stream"); export interface ChildProcess extends events.NodeEventEmitter { stdin: stream.WritableStream; stdout: stream.ReadableStream; stderr: stream.ReadableStream; pid: number; kill(signal?: string): void; send(message: any, sendHandle: any): void; disconnect(): void; } export function spawn(command: string, args?: string[], options?: { cwd?: string; stdio?: any; custom?: any; env?: any; detached?: bool; }): ChildProcess; export function exec(command: string, options: { cwd?: string; stdio?: any; customFds?: any; env?: any; encoding?: string; timeout?: number; maxBuffer?: number; killSignal?: string; }, callback: (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) =>void ): ChildProcess; export function exec(command: string, callback: (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) =>void ): ChildProcess; export function execFile(file: string, args: string[], options: { cwd?: string; stdio?: any; customFds?: any; env?: any; encoding?: string; timeout?: number; maxBuffer?: string; killSignal?: string; }, callback: (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) =>void ): ChildProcess; export function fork(modulePath: string, args?: string[], options?: { cwd?: string; env?: any; encoding?: string; }): ChildProcess; } declare module "url" { export interface Url { href?: string; protocol?: string; auth?: string; hostname?: string; port?: string; host?: string; pathname?: string; search?: string; query?: string; slashes?: bool; hash?: string; } export function parse(urlStr: string, parseQueryString? , slashesDenoteHost? ): Url; export function format(url: Url): string; export function resolve(from: string, to: string): string; } declare module "dns" { export function lookup(domain: string, family: number, callback: (err: Error, address: string, family: number) =>void ): string; export function lookup(domain: string, callback: (err: Error, address: string, family: number) =>void ): string; export function resolve(domain: string, rrtype: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolve(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolve4(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolve6(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveMx(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveTxt(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveSrv(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveNs(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveCname(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function reverse(ip: string, callback: (err: Error, domains: string[]) =>void ): string[]; } declare module "net" { import stream = module("stream"); export interface NodeSocket extends stream.ReadWriteStream { // Extended base methods write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; connect(port: number, host?: string, connectionListener?: Function): void; connect(path: string, connectionListener?: Function): void; bufferSize: number; setEncoding(encoding?: string): void; write(data: any, encoding?: string, callback?: Function): void; end(data?: any, encoding?: string): void; destroy(): void; pause(): void; resume(): void; setTimeout(timeout: number, callback?: Function); void; setNoDelay(noDelay?: bool): void; setKeepAlive(enable?: bool, initialDelay?: number): void; address(): { port: number; family: string; address: string; }; remoteAddress: string; remotePort: number; bytesRead: number; bytesWritten: number; } export var Socket: { new (options?: { fd?: string; type?: string; allowHalfOpen?: bool; }): NodeSocket; }; export interface Server extends NodeSocket { listen(port: number, host?: string, backlog?: number, listeningListener?: Function): void; listen(path: string, listeningListener?: Function): void; listen(handle: any, listeningListener?: Function): void; close(callback?: Function): void; address(): { port: number; family: string; address: string; }; maxConnections: number; connections: number; } export function createServer(connectionListener?: (socket: NodeSocket) =>void ): Server; export function createServer(options?: { allowHalfOpen?: bool; }, connectionListener?: (socket: NodeSocket) =>void ): Server; export function connect(options: { allowHalfOpen?: bool; }, connectionListener?: Function): void; export function connect(port: number, host?: string, connectionListener?: Function): void; export function connect(path: string, connectionListener?: Function): void; export function createConnection(options: { allowHalfOpen?: bool; }, connectionListener?: Function): void; export function createConnection(port: number, host?: string, connectionListener?: Function): void; export function createConnection(path: string, connectionListener?: Function): void; export function isIP(input: string): number; export function isIPv4(input: string): bool; export function isIPv6(input: string): bool; } declare module "dgram" { import events = module("events"); export function createSocket(type: string, callback?: Function): Socket; interface Socket extends events.NodeEventEmitter { send(buf: NodeBuffer, offset: number, length: number, port: number, address: string, callback?: Function): void; bind(port: number, address?: string): void; close(): void; address: { address: string; family: string; port: number; }; setBroadcast(flag: bool): void; setMulticastTTL(ttl: number): void; setMulticastLoopback(flag: bool): void; addMembership(multicastAddress: string, multicastInterface?: string): void; dropMembership(multicastAddress: string, multicastInterface?: string): void; } } declare module "fs" { import stream = module("stream"); interface Stats { isFile(): bool; isDirectory(): bool; isBlockDevice(): bool; isCharacterDevice(): bool; isSymbolicLink(): bool; isFIFO(): bool; isSocket(): bool; dev: number; ino: number; mode: number; nlink: number; uid: number; gid: number; rdev: number; size: number; blksize: number; blocks: number; atime: Date; mtime: Date; ctime: Date; } interface FSWatcher { close(): void; } export interface ReadStream extends stream.ReadableStream { } export interface WriteStream extends stream.WritableStream { } export function rename(oldPath: string, newPath: string, callback?: Function): void; export function renameSync(oldPath: string, newPath: string): void; export function truncate(fd: number, len: number, callback?: Function): void; export function truncateSync(fd: number, len: number): void; export function chown(path: string, uid: number, gid: number, callback?: Function): void; export function chownSync(path: string, uid: number, gid: number): void; export function fchown(fd: number, uid: number, gid: number, callback?: Function): void; export function fchownSync(fd: number, uid: number, gid: number): void; export function lchown(path: string, uid: number, gid: number, callback?: Function): void; export function lchownSync(path: string, uid: number, gid: number): void; export function chmod(path: string, mode: number, callback?: Function): void; export function chmod(path: string, mode: string, callback?: Function): void; export function chmodSync(path: string, mode: number): void; export function chmodSync(path: string, mode: string): void; export function fchmod(fd: number, mode: number, callback?: Function): void; export function fchmod(fd: number, mode: string, callback?: Function): void; export function fchmodSync(fd: number, mode: number): void; export function fchmodSync(fd: number, mode: string): void; export function lchmod(path: string, mode: string, callback?: Function): void; export function lchmod(path: string, mode: number, callback?: Function): void; export function lchmodSync(path: string, mode: number): void; export function lchmodSync(path: string, mode: string): void; export function stat(path: string, callback?: (err: Error, stats: Stats) =>any): Stats; export function lstat(path: string, callback?: (err: Error, stats: Stats) =>any): Stats; export function fstat(fd: number, callback?: (err: Error, stats: Stats) =>any): Stats; export function statSync(path: string): Stats; export function lstatSync(path: string): Stats; export function fstatSync(fd: number): Stats; export function link(srcpath: string, dstpath: string, callback?: Function): void; export function linkSync(srcpath: string, dstpath: string): void; export function symlink(srcpath: string, dstpath: string, type?: string, callback?: Function): void; export function symlinkSync(srcpath: string, dstpath: string, type?: string): void; export function readlink(path: string, callback?: (err: Error, linkString: string) =>any): void; export function realpath(path: string, callback?: (err: Error, resolvedPath: string) =>any): void; export function realpath(path: string, cache: string, callback: (err: Error, resolvedPath: string) =>any): void; export function realpathSync(path: string, cache?: string): string; export function unlink(path: string, callback?: Function): void; export function unlinkSync(path: string): void; export function rmdir(path: string, callback?: Function): void; export function rmdirSync(path: string): void; export function mkdir(path: string, mode?: number, callback?: Function): void; export function mkdir(path: string, mode?: string, callback?: Function): void; export function mkdirSync(path: string, mode?: number): void; export function mkdirSync(path: string, mode?: string): void; export function readdir(path: string, callback?: (err: Error, files: string[]) => void): void; export function readdirSync(path: string): string[]; export function close(fd: number, callback?: Function): void; export function closeSync(fd: number): void; export function open(path: string, flags: string, mode?: string, callback?: (err: Error, fd: number) =>any): void; export function openSync(path: string, flags: string, mode?: string): number; export function utimes(path: string, atime: number, mtime: number, callback?: Function): void; export function utimesSync(path: string, atime: number, mtime: number): void; export function futimes(fd: number, atime: number, mtime: number, callback?: Function): void; export function futimesSync(fd: number, atime: number, mtime: number): void; export function fsync(fd: number, callback?: Function): void; export function fsyncSync(fd: number): void; export function write(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number, callback?: (err: Error, written: number, buffer: NodeBuffer) =>any): void; export function writeSync(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number): number; export function read(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number, callback?: (err: Error, bytesRead: number, buffer: NodeBuffer) => void): void; export function readSync(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number): number; export function readFile(filename: string, encoding: string, callback: (err: Error, data: string) => void ): void; export function readFile(filename: string, callback: (err: Error, data: NodeBuffer) => void ): void; export function readFileSync(filename: string): NodeBuffer; export function readFileSync(filename: string, encoding: string): string; export function writeFile(filename: string, data: any, encoding?: string, callback?: Function): void; export function writeFileSync(filename: string, data: any, encoding?: string): void; export function appendFile(filename: string, data: any, encoding?: string, callback?: Function): void; export function appendFileSync(filename: string, data: any, encoding?: string): void; export function watchFile(filename: string, listener: { curr: Stats; prev: Stats; }): void; export function watchFile(filename: string, options: { persistent?: bool; interval?: number; }, listener: { curr: Stats; prev: Stats; }): void; export function unwatchFile(filename: string, listener?: Stats): void; export function watch(filename: string, options?: { persistent?: bool; }, listener?: (event: string, filename: string) =>any): FSWatcher; export function exists(path: string, callback?: (exists: bool) =>void ): void; export function existsSync(path: string): bool; export function createReadStream(path: string, options?: { flags?: string; encoding?: string; fd?: string; mode?: number; bufferSize?: number; }): ReadStream; export function createWriteStream(path: string, options?: { flags?: string; encoding?: string; string?: string; }): WriteStream; } declare module "path" { export function normalize(p: string): string; export function join(...paths: any[]): string; export function resolve(from: string, to: string): string; export function resolve(from: string, from2: string, to: string): string; export function resolve(from: string, from2: string, from3: string, to: string): string; export function resolve(from: string, from2: string, from3: string, from4: string, to: string): string; export function resolve(from: string, from2: string, from3: string, from4: string, from5: string, to: string): string; export function relative(from: string, to: string): string; export function dirname(p: string): string; export function basename(p: string, ext?: string): string; export function extname(p: string): string; export var sep: string; } declare module "string_decoder" { export interface NodeStringDecoder { write(buffer: NodeBuffer): string; detectIncompleteChar(buffer: NodeBuffer): number; } export var StringDecoder: { new (encoding: string): NodeStringDecoder; }; } declare module "tls" { import crypto = module("crypto"); import net = module("net"); import stream = module("stream"); var CLIENT_RENEG_LIMIT: number; var CLIENT_RENEG_WINDOW: number; export interface TlsOptions { pfx?: any; //string or buffer key?: any; //string or buffer passphrase?: string; cert?: any; ca?: any; //string or buffer crl?: any; //string or string array ciphers?: string; honorCipherOrder?: any; requestCert?: bool; rejectUnauthorized?: bool; NPNProtocols?: any; //array or Buffer; SNICallback?: (servername: string) => any; } export interface ConnectionOptions { host?: string; port?: number; socket?: net.NodeSocket; pfx?: any; //string | Buffer key?: any; //string | Buffer passphrase?: string; cert?: any; //string | Buffer ca?: any; //Array of string | Buffer rejectUnauthorized?: bool; NPNProtocols?: any; //Array of string | Buffer servername?: string; } export interface Server extends net.Server { // Extended base methods listen(port: number, host?: string, backlog?: number, listeningListener?: Function): void; listen(path: string, listeningListener?: Function): void; listen(handle: any, listeningListener?: Function): void; listen(port: number, host?: string, callback?: Function): void; close(): void; address(): { port: number; family: string; address: string; }; addContext(hostName: string, credentials: { key: string; cert: string; ca: string; }): void; maxConnections: number; connections: number; } export interface ClearTextStream extends stream.ReadWriteStream { authorized: bool; authorizationError: Error; getPeerCertificate(): any; getCipher: { name: string; version: string; }; address: { port: number; family: string; address: string; }; remoteAddress: string; remotePort: number; } export interface SecurePair { encrypted: any; cleartext: any; } export function createServer(options: TlsOptions, secureConnectionListener?: (cleartextStream: ClearTextStream) =>void ): Server; export function connect(options: TlsOptions, secureConnectionListener?: () =>void ): ClearTextStream; export function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () =>void ): ClearTextStream; export function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () =>void ): ClearTextStream; export function createSecurePair(credentials?: crypto.Credentials, isServer?: bool, requestCert?: bool, rejectUnauthorized?: bool): SecurePair; } declare module "crypto" { export interface CredentialDetails { pfx: string; key: string; passphrase: string; cert: string; ca: any; //string | string array crl: any; //string | string array ciphers: string; } export interface Credentials { context?: any; } export function createCredentials(details: CredentialDetails): Credentials; export function createHash(algorithm: string): Hash; export function createHmac(algorithm: string, key: string): Hmac; interface Hash { update(data: any, input_encoding?: string): void; digest(encoding?: string): string; } interface Hmac { update(data: any): void; digest(encoding?: string): void; } export function createCipher(algorithm: string, password: any): Cipher; export function createCipheriv(algorithm: string, key: any, iv: any): Cipher; interface Cipher { update(data: any, input_encoding?: string, output_encoding?: string): string; final(output_encoding?: string): string; setAutoPadding(auto_padding: bool): void; createDecipher(algorithm: string, password: any): Decipher; createDecipheriv(algorithm: string, key: any, iv: any): Decipher; } interface Decipher { update(data: any, input_encoding?: string, output_encoding?: string): void; final(output_encoding?: string): string; setAutoPadding(auto_padding: bool): void; } export function createSign(algorithm: string): Signer; interface Signer { update(data: any): void; sign(private_key: string, output_format: string): string; } export function createVerify(algorith: string): Verify; interface Verify { update(data: any): void; verify(object: string, signature: string, signature_format?: string): bool; } export function createDiffieHellman(prime_length: number): DiffieHellman; export function createDiffieHellman(prime: number, encoding?: string): DiffieHellman; interface DiffieHellman { generateKeys(encoding?: string): string; computeSecret(other_public_key: string, input_encoding?: string, output_encoding?: string): string; getPrime(encoding?: string): string; getGenerator(encoding: string): string; getPublicKey(encoding?: string): string; getPrivateKey(encoding?: string): string; setPublicKey(public_key: string, encoding?: string): void; setPrivateKey(public_key: string, encoding?: string): void; } export function getDiffieHellman(group_name: string): DiffieHellman; export function pbkdf2(password: string, salt: string, iterations: number, keylen: number, callback: (err: Error, derivedKey: string) => any): void; export function randomBytes(size: number, callback?: (err: Error, buf: NodeBuffer) =>void ); } declare module "stream" { import events = module("events"); export interface WritableStream extends events.NodeEventEmitter { writable: bool; write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; end(): void; end(str: string, enconding: string): void; end(buffer: NodeBuffer): void; destroy(): void; destroySoon(): void; } export interface ReadableStream extends events.NodeEventEmitter { readable: bool; setEncoding(encoding: string): void; pause(): void; resume(): void; destroy(): void; pipe(destination: WritableStream, options?: { end?: bool; }): void; } export interface ReadWriteStream extends ReadableStream, WritableStream { } } declare module "util" { export function format(format: any, ...param: any[]): string; export function debug(string: string): void; export function error(...param: any[]): void; export function puts(...param: any[]): void; export function print(...param: any[]): void; export function log(string: string): void; export function inspect(object: any, showHidden?: bool, depth?: number, color?: bool): void; export function isArray(object: any): bool; export function isRegExp(object: any): bool; export function isDate(object: any): bool; export function isError(object: any): bool; export function inherits(constructor: any, superConstructor: any): void; } declare module "assert" { export function fail(actual: any, expected: any, message: string, operator: string): void; export function assert(value: any, message: string): void; export function ok(value: any, message?: string): void; export function equal(actual: any, expected: any, message?: string): void; export function notEqual(actual: any, expected: any, message?: string): void; export function deepEqual(actual: any, expected: any, message?: string): void; export function notDeepEqual(acutal: any, expected: any, message?: string): void; export function strictEqual(actual: any, expected: any, message?: string): void; export function notStrictEqual(actual: any, expected: any, message?: string): void; export function throws(block: any, error?: any, messsage?: string): void; export function doesNotThrow(block: any, error?: any, messsage?: string): void; export function ifError(value: any): void; } declare module "tty" { import net = module("net"); export function isatty(fd: string): bool; export interface ReadStream extends net.NodeSocket { isRaw: bool; setRawMode(mode: bool): void; } export interface WriteStream extends net.NodeSocket { columns: number; rows: number; } } declare module "domain" { import events = module("events"); export interface Domain extends events.NodeEventEmitter { } export function create(): Domain; export function run(fn: Function): void; export function add(emitter: events.NodeEventEmitter): void; export function remove(emitter: events.NodeEventEmitter): void; export function bind(cb: (er: Error, data: any) =>any): any; export function intercept(cb: (data: any) => any): any; export function dispose(): void; }
jonathanmarvens/typescript
samples/node/node.d.ts
TypeScript
apache-2.0
45,576
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.crawler.connectors.sharedrive; import jcifs.ACE; import jcifs.smb.NtlmPasswordAuthenticator; import jcifs.smb.SmbException; import jcifs.smb.SmbFile; import jcifs.smb.SmbFileFilter; import jcifs.context.SingletonContext; import org.apache.manifoldcf.agents.interfaces.RepositoryDocument; import org.apache.manifoldcf.agents.interfaces.ServiceInterruption; import org.apache.manifoldcf.connectorcommon.extmimemap.ExtensionMimeMap; import org.apache.manifoldcf.connectorcommon.interfaces.IKeystoreManager; import org.apache.manifoldcf.connectorcommon.interfaces.KeystoreManagerFactory; import org.apache.manifoldcf.core.common.DateParser; import org.apache.manifoldcf.core.interfaces.ConfigParams; import org.apache.manifoldcf.core.interfaces.Configuration; import org.apache.manifoldcf.core.interfaces.ConfigurationNode; import org.apache.manifoldcf.core.interfaces.IHTTPOutput; import org.apache.manifoldcf.core.interfaces.IPostParameters; import org.apache.manifoldcf.core.interfaces.IThreadContext; import org.apache.manifoldcf.core.interfaces.LockManagerFactory; import org.apache.manifoldcf.core.interfaces.ManifoldCFException; import org.apache.manifoldcf.core.interfaces.Specification; import org.apache.manifoldcf.core.interfaces.SpecificationNode; import org.apache.manifoldcf.core.util.URLEncoder; import org.apache.manifoldcf.crawler.interfaces.IExistingVersions; import org.apache.manifoldcf.crawler.interfaces.IFingerprintActivity; import org.apache.manifoldcf.crawler.interfaces.IProcessActivity; import org.apache.manifoldcf.crawler.interfaces.ISeedingActivity; import org.apache.manifoldcf.crawler.system.Logging; import org.apache.manifoldcf.crawler.system.ManifoldCF; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.net.MalformedURLException; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; /** This is the "repository connector" for a smb/cifs shared drive file system. It's a relative of the share crawler, and should have * comparable basic functionality. */ public class SharedDriveConnector extends org.apache.manifoldcf.crawler.connectors.BaseRepositoryConnector { public static final String _rcsid = "@(#)$Id: SharedDriveConnector.java 996524 2010-09-13 13:38:01Z kwright $"; // Activities we log public final static String ACTIVITY_ACCESS = "access"; // These are the share connector nodes and attributes in the document specification public static final String NODE_STARTPOINT = "startpoint"; public static final String NODE_INCLUDE = "include"; public static final String NODE_EXCLUDE = "exclude"; public static final String NODE_PATHNAMEATTRIBUTE = "pathnameattribute"; public static final String NODE_PATHMAP = "pathmap"; public static final String NODE_FILEMAP = "filemap"; public static final String NODE_URIMAP = "urimap"; public static final String NODE_SHAREACCESS = "shareaccess"; public static final String NODE_SHARESECURITY = "sharesecurity"; public static final String NODE_PARENTFOLDERACCESS = "parentfolderaccess"; public static final String NODE_PARENTFOLDERSECURITY = "parentfoldersecurity"; public static final String NODE_MAXLENGTH = "maxlength"; public static final String NODE_ACCESS = "access"; public static final String NODE_SECURITY = "security"; public static final String ATTRIBUTE_PATH = "path"; public static final String ATTRIBUTE_TYPE = "type"; public static final String ATTRIBUTE_INDEXABLE = "indexable"; public static final String ATTRIBUTE_FILESPEC = "filespec"; public static final String ATTRIBUTE_VALUE = "value"; public static final String ATTRIBUTE_TOKEN = "token"; public static final String ATTRIBUTE_MATCH = "match"; public static final String ATTRIBUTE_REPLACE = "replace"; public static final String VALUE_DIRECTORY = "directory"; public static final String VALUE_FILE = "file"; // Properties this connector needs (that can only be configured once) public final static String PROPERTY_JCIFS_USE_NTLM_V1 = "org.apache.manifoldcf.crawler.connectors.jcifs.usentlmv1"; // Static initialization of various system properties. This hopefully takes place // before jcifs is loaded. static { if (System.getProperty("jcifs.resolveOrder") == null) { System.setProperty("jcifs.resolveOrder","LMHOSTS,DNS,WINS"); } if (System.getProperty("jcifs.smb.client.soTimeout") == null) { System.setProperty("jcifs.smb.client.soTimeout","150000"); } if (System.getProperty("jcifs.smb.client.responseTimeout") == null) { System.setProperty("jcifs.smb.client.responseTimeout","120000"); } if (System.getProperty("jcifs.smb.client.minVersion") == null) { System.setProperty("jcifs.smb.client.minVersion","SMB1"); } if (System.getProperty("jcifs.smb.client.maxVersion") == null) { System.setProperty("jcifs.smb.client.maxVersion","SMB210"); } if (System.getProperty("jcifs.traceResources") == null) { System.setProperty("jcifs.traceResources","true"); } if (System.getProperty("jcifs.smb.client.ipcSigningEnforced") == null) { System.setProperty("jcifs.smb.client.ipcSigningEnforced","true"); } // Don't change these!! System.setProperty("jcifs.smb.client.listCount","20"); System.setProperty("jcifs.smb.client.dfs.strictView","true"); } private String smbconnectionPath = null; private String server = null; private String domain = null; private String username = null; private String password = null; private boolean useSIDs = true; private String binName = null; private NtlmPasswordAuthenticator pa; /** Deny access token for default authority */ private final static String defaultAuthorityDenyToken = GLOBAL_DENY_TOKEN; /** Constructor. */ public SharedDriveConnector() { } /** Set thread context. * Use the opportunity to set the system properties we'll need. */ @Override public void setThreadContext(IThreadContext threadContext) throws ManifoldCFException { super.setThreadContext(threadContext); // We need to know whether to operate in NTLMv2 mode, or in NTLM mode. We do this before jcifs called the first time. boolean useV1 = LockManagerFactory.getBooleanProperty(threadContext, PROPERTY_JCIFS_USE_NTLM_V1, false); if (!useV1) { System.setProperty("jcifs.smb.lmCompatibility","3"); System.setProperty("jcifs.smb.client.useExtendedSecurity","true"); } else { System.setProperty("jcifs.smb.lmCompatibility","0"); System.setProperty("jcifs.smb.client.useExtendedSecurity","false"); } } /** Establish a "session". In the case of the jcifs connector, this just builds the appropriate smbconnectionPath string, and does the necessary checks. */ protected void getSession() throws ManifoldCFException { if (smbconnectionPath == null) { // Get the server if (server == null || server.length() == 0) throw new ManifoldCFException("Missing parameter '"+SharedDriveParameters.server+"'"); // make the smb connection to the server String authenticationString; if (domain == null || domain.length() == 0) domain = null; if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("Connecting to: " + "smb://" + ((domain==null)?"":domain)+";"+username+":<password>@" + server + "/"); try { // use NtlmPasswordAuthentication so that we can reuse credential for DFS support pa = new NtlmPasswordAuthenticator(domain,username,password); SmbFile smbconnection = new SmbFile("smb://" + server + "/",SingletonContext.getInstance().withCredentials(pa)); smbconnectionPath = getFileCanonicalPath(smbconnection); } catch (MalformedURLException e) { Logging.connectors.error("Unable to access SMB/CIFS share: "+"smb://" + ((domain==null)?"":domain)+";"+username+":<password>@"+ server + "/\n" + e); throw new ManifoldCFException("Unable to access SMB/CIFS share: "+server, e, ManifoldCFException.REPOSITORY_CONNECTION_ERROR); } } } /** Return the list of activities that this connector supports (i.e. writes into the log). *@return the list. */ @Override public String[] getActivitiesList() { return new String[]{ACTIVITY_ACCESS}; } /** Close the connection. Call this before discarding the repository connector. */ @Override public void disconnect() throws ManifoldCFException { server = null; domain = null; username = null; password = null; pa = null; smbconnectionPath = null; binName = null; super.disconnect(); } /** Connect. *@param configParameters is the set of configuration parameters, which * in this case describe the root directory. */ @Override public void connect(ConfigParams configParameters) { super.connect(configParameters); // Get the server server = configParameters.getParameter(SharedDriveParameters.server); domain = configParameters.getParameter(SharedDriveParameters.domain); username = configParameters.getParameter(SharedDriveParameters.username); if (username == null) username = ""; password = configParameters.getObfuscatedParameter(SharedDriveParameters.password); if (password == null) password = ""; String useSIDsString = configParameters.getParameter(SharedDriveParameters.useSIDs); if (useSIDsString == null) useSIDsString = "true"; useSIDs = "true".equals(useSIDsString); String configBinName = configParameters.getParameter(SharedDriveParameters.binName); binName = (configBinName == null || configBinName.length() == 0) ? server : configBinName; if (binName.length() > 255) // trim the bin name to fit in the database binName = binName.substring(0, 255); // Rejigger the username/domain to be sure we PASS in a domain and we do not include the domain attached to the user! // (This became essential at jcifs 1.3.0) int index = username.indexOf("@"); if (index != -1) { // Strip off the domain from the user String userDomain = username.substring(index+1); if (domain == null || domain.length() == 0) domain = userDomain; username = username.substring(0,index); } index = username.indexOf("\\"); if (index != -1) { String userDomain = username.substring(0,index); if (domain == null || domain.length() == 0) domain = userDomain; username = username.substring(index+1); } } /** Get the bin name string for a document identifier. The bin name describes the queue to which the * document will be assigned for throttling purposes. Throttling controls the rate at which items in a * given queue are fetched; it does not say anything about the overall fetch rate, which may operate on * multiple queues or bins. * For example, if you implement a web crawler, a good choice of bin name would be the server name, since * that is likely to correspond to a real resource that will need real throttle protection. *@param documentIdentifier is the document identifier. *@return the bin name. */ @Override public String[] getBinNames(String documentIdentifier) { return new String[]{binName}; } /** * Convert a document identifier to a URI. The URI is the URI that will be * the unique key from the search index, and will be presented to the user * as part of the search results. * * @param documentIdentifier * is the document identifier. * @return the document uri. */ protected static String convertToURI(String documentIdentifier, MatchMap fileMap, MatchMap uriMap) throws ManifoldCFException { // // Note well: This MUST be a legal URI!! // e.g. // smb://10.33.65.1/Test Folder/PPT Docs/Dearman_University of Texas 20030220.ppt // file:////10.33.65.1/Test Folder/PPT Docs/Dearman_University of Texas 20030220.ppt String serverPath = documentIdentifier.substring("smb://".length()); // The first mapping converts one server path to another. // If not present, we leave the original path alone. serverPath = fileMap.translate(serverPath); // The second mapping, if present, creates a URI, using certain rules. If not present, the old standard IRI conversion is done. if (uriMap.getMatchCount() != 0) { // URI translation. // First step is to perform utf-8 translation and %-encoding. byte[] byteArray = serverPath.getBytes(StandardCharsets.UTF_8); StringBuilder output = new StringBuilder(); int i = 0; while (i < byteArray.length) { int x = ((int)byteArray[i++]) & 0xff; if (x >= 0x80 || (x >= 0 && x <= ' ') || x == ':' || x == '?' || x == '^' || x == '{' || x == '}' || x == '%' || x == '#' || x == '`' || x == ';' || x == '@' || x == '&' || x == '=' || x == '+' || x == '$' || x == ',') { output.append('%'); String hexValue = Integer.toHexString((int)x).toUpperCase(Locale.ROOT); if (hexValue.length() == 1) output.append('0'); output.append(hexValue); } else output.append((char)x); } // Second step is to perform the mapping. This strips off the server name and glues on the protocol and web server name, most likely. return uriMap.translate(output.toString()); } else { // Convert to a URI that begins with file://///. This used to be done according to the following IE7 specification: // http://blogs.msdn.com/ie/archive/2006/12/06/file-uris-in-windows.aspx // However, two factors required change. First, IE8 decided to no longer adhere to the same specification as IE7. // Second, the ingestion API does not (and will never) accept anything other than a well-formed URI. Thus, file // specifications are ingested in a canonical form (which happens to be pretty much what this connector used prior to // 3.9.0), and the various clients are responsible for converting that form into something the browser will accept. StringBuilder output = new StringBuilder(); int i = 0; while (i < serverPath.length()) { int pos = serverPath.indexOf("/",i); if (pos == -1) pos = serverPath.length(); String piece = serverPath.substring(i,pos); // Note well. This does *not* %-encode some characters such as '#', which are legal in URI's but have special meanings! String replacePiece = URLEncoder.encode(piece); // Convert the +'s back to %20's int j = 0; while (j < replacePiece.length()) { int plusPos = replacePiece.indexOf("+",j); if (plusPos == -1) plusPos = replacePiece.length(); output.append(replacePiece.substring(j,plusPos)); if (plusPos < replacePiece.length()) { output.append("%20"); plusPos++; } j = plusPos; } if (pos < serverPath.length()) { output.append("/"); pos++; } i = pos; } return "file://///"+output.toString(); } } /** Request arbitrary connector information. * This method is called directly from the API in order to allow API users to perform any one of several connector-specific * queries. *@param output is the response object, to be filled in by this method. *@param command is the command, which is taken directly from the API request. *@return true if the resource is found, false if not. In either case, output may be filled in. */ @Override public boolean requestInfo(Configuration output, String command) throws ManifoldCFException { if (command.startsWith("folders/")) { String parentFolder = command.substring("folders/".length()); try { String[] folders = getChildFolderNames(parentFolder); int i = 0; while (i < folders.length) { String folder = folders[i++]; ConfigurationNode node = new ConfigurationNode("folder"); node.setValue(folder); output.addChild(output.getChildCount(),node); } } catch (ManifoldCFException e) { ManifoldCF.createErrorNode(output,e); } } else if (command.startsWith("folder/")) { String folder = command.substring("folder/".length()); try { String canonicalFolder = validateFolderName(folder); if (canonicalFolder != null) { ConfigurationNode node = new ConfigurationNode("folder"); node.setValue(canonicalFolder); output.addChild(output.getChildCount(),node); } } catch (ManifoldCFException e) { ManifoldCF.createErrorNode(output,e); } } else return super.requestInfo(output,command); return true; } /** Queue "seed" documents. Seed documents are the starting places for crawling activity. Documents * are seeded when this method calls appropriate methods in the passed in ISeedingActivity object. * * This method can choose to find repository changes that happen only during the specified time interval. * The seeds recorded by this method will be viewed by the framework based on what the * getConnectorModel() method returns. * * It is not a big problem if the connector chooses to create more seeds than are * strictly necessary; it is merely a question of overall work required. * * The end time and seeding version string passed to this method may be interpreted for greatest efficiency. * For continuous crawling jobs, this method will * be called once, when the job starts, and at various periodic intervals as the job executes. * * When a job's specification is changed, the framework automatically resets the seeding version string to null. The * seeding version string may also be set to null on each job run, depending on the connector model returned by * getConnectorModel(). * * Note that it is always ok to send MORE documents rather than less to this method. * The connector will be connected before this method can be called. *@param activities is the interface this method should use to perform whatever framework actions are desired. *@param spec is a document specification (that comes from the job). *@param seedTime is the end of the time range of documents to consider, exclusive. *@param lastSeedVersion is the last seeding version string for this job, or null if the job has no previous seeding version string. *@param jobMode is an integer describing how the job is being run, whether continuous or once-only. *@return an updated seeding version string, to be stored with the job. */ @Override public String addSeedDocuments(ISeedingActivity activities, Specification spec, String lastSeedVersion, long seedTime, int jobMode) throws ManifoldCFException, ServiceInterruption { getSession(); try { for (int i = 0; i < spec.getChildCount(); i++) { SpecificationNode n = spec.getChild(i); if (n.getType().equals(NODE_STARTPOINT)) { // The id returned MUST be in canonical form!!! String seed = mapToIdentifier(n.getAttributeValue(ATTRIBUTE_PATH)); if (Logging.connectors.isDebugEnabled()) { Logging.connectors.debug("Seed = '"+seed+"'"); } activities.addSeedDocument(seed); } } } catch (MalformedURLException e) { throw new ManifoldCFException("Could not get a canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Could not get a canonical path: "+e.getMessage(),e); } return ""; } /** Process a set of documents. * This is the method that should cause each document to be fetched, processed, and the results either added * to the queue of documents for the current job, and/or entered into the incremental ingestion manager. * The document specification allows this class to filter what is done based on the job. * The connector will be connected before this method can be called. *@param documentIdentifiers is the set of document identifiers to process. *@param statuses are the currently-stored document versions for each document in the set of document identifiers * passed in above. *@param activities is the interface this method should use to queue up new document references * and ingest documents. *@param jobMode is an integer describing how the job is being run, whether continuous or once-only. *@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one. */ @Override public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec, IProcessActivity activities, int jobMode, boolean usesDefaultAuthority) throws ManifoldCFException, ServiceInterruption { // Read the forced acls. A null return indicates that security is disabled!!! // A zero-length return indicates that the native acls should be used. // All of this is germane to how we ingest the document, so we need to note it in // the version string completely. String[] acls = getForcedAcls(spec); String[] shareAcls = getForcedShareAcls(spec); String[] parentFolderAcls = getForcedParentFolderAcls(spec); String pathAttributeName = null; MatchMap matchMap = new MatchMap(); MatchMap fileMap = new MatchMap(); MatchMap uriMap = new MatchMap(); int i = 0; while (i < spec.getChildCount()) { SpecificationNode n = spec.getChild(i++); if (n.getType().equals(NODE_PATHNAMEATTRIBUTE)) pathAttributeName = n.getAttributeValue(ATTRIBUTE_VALUE); else if (n.getType().equals(NODE_PATHMAP)) { // Path mapping info also needs to be looked at, because it affects what is // ingested. String pathMatch = n.getAttributeValue(ATTRIBUTE_MATCH); String pathReplace = n.getAttributeValue(ATTRIBUTE_REPLACE); matchMap.appendMatchPair(pathMatch,pathReplace); } else if (n.getType().equals(NODE_FILEMAP)) { String pathMatch = n.getAttributeValue(ATTRIBUTE_MATCH); String pathReplace = n.getAttributeValue(ATTRIBUTE_REPLACE); fileMap.appendMatchPair(pathMatch,pathReplace); } else if (n.getType().equals(NODE_URIMAP)) { String pathMatch = n.getAttributeValue(ATTRIBUTE_MATCH); String pathReplace = n.getAttributeValue(ATTRIBUTE_REPLACE); uriMap.appendMatchPair(pathMatch,pathReplace); } } for (String documentIdentifier : documentIdentifiers) { getSession(); if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Processing '"+documentIdentifier+"'"); String versionString; SmbFile file; String ingestionURI = null; String pathAttributeValue = null; String[] shareAllow = null; String[] shareDeny = null; boolean shareSecurityOn = false; String[] parentAllow = null; String[] parentDeny = null; boolean parentSecurityOn = false; String[] documentAllow = null; String[] documentDeny = null; boolean documentSecurityOn = false; // Common info we really need to fetch only once long fileLength = 0L; long lastModified = 0L; boolean fileExists = false; boolean fileIsDirectory = false; try { file = new SmbFile(documentIdentifier,SingletonContext.getInstance().withCredentials(pa)); fileExists = fileExists(file); // File has to exist AND have a non-null canonical path to be readable. If the canonical path is // null, it means that the windows permissions are not right and directory/file is not readable!!! String newPath = getFileCanonicalPath(file); // We MUST check the specification here, otherwise a recrawl may not delete what it's supposed to! if (fileExists && newPath != null) { fileIsDirectory = fileIsDirectory(file); if (checkInclude(fileIsDirectory,newPath,spec)) { if (fileIsDirectory) { // Hmm, this is not correct; version string should be empty for windows directories, since // they are not hierarchical in modified date propagation. // It's a directory. The version ID will be the // last modified date. //long lastModified = fileLastModified(file); //versionString = new Long(lastModified).toString(); versionString = ""; } else { fileLength = fileLength(file); if (checkIncludeFile(fileLength,newPath,spec,activities)) { // It's a file of acceptable length. // The ability to get ACLs, list files, and an inputstream under DFS all work now. // The SmbFile for parentFolder acls. SmbFile parentFolder = new SmbFile(file.getParent(),SingletonContext.getInstance().withCredentials(pa)); // Compute the security information String[] modelArray = new String[0]; List<String> allowList = new ArrayList<String>(); List<String> denyList = new ArrayList<String>(); shareSecurityOn = getFileShareSecuritySet(allowList, denyList, file, shareAcls); shareAllow = allowList.toArray(modelArray); shareDeny = denyList.toArray(modelArray); allowList.clear(); denyList.clear(); parentSecurityOn = getFileSecuritySet(allowList, denyList, parentFolder, parentFolderAcls); parentAllow = allowList.toArray(modelArray); parentDeny = denyList.toArray(modelArray); allowList.clear(); denyList.clear(); documentSecurityOn = getFileSecuritySet(allowList, denyList, file, acls); documentAllow = allowList.toArray(modelArray); documentDeny = denyList.toArray(modelArray); // This is stuff we need for computing the version string AND for indexing lastModified = fileLastModified(file); // The format of this string changed on 11/8/2006 to be comformant with the standard way // acls and metadata descriptions are being stuffed into the version string across connectors. // The format of this string changed again on 7/3/2009 to permit the ingestion uri/iri to be included. // This was to support filename/uri mapping functionality. StringBuilder sb = new StringBuilder(); addSecuritySet(sb,shareSecurityOn,shareAllow,shareDeny); addSecuritySet(sb,parentSecurityOn,parentAllow,parentDeny); addSecuritySet(sb,documentSecurityOn,documentAllow,documentDeny); // Include the path attribute name and value in the parseable area. if (pathAttributeName != null) { sb.append('+'); pack(sb,pathAttributeName,'+'); // Calculate path string; we'll include that wholesale in the version pathAttributeValue = documentIdentifier; // 3/13/2008 // In looking at what comes into the path metadata attribute by default, and cogitating a bit, I've concluded that // the smb:// and the server/domain name at the start of the path are just plain old noise, and should be stripped. // This changes a behavior that has been around for a while, so there is a risk, but a quick back-and-forth with the // SE's leads me to believe that this is safe. if (pathAttributeValue.startsWith("smb://")) { int index = pathAttributeValue.indexOf("/","smb://".length()); if (index == -1) index = pathAttributeValue.length(); pathAttributeValue = pathAttributeValue.substring(index); } // Now, translate pathAttributeValue = matchMap.translate(pathAttributeValue); pack(sb,pathAttributeValue,'+'); } else sb.append('-'); // Calculate the ingestion IRI/URI, and include that in the parseable area. ingestionURI = convertToURI(documentIdentifier,fileMap,uriMap); pack(sb,ingestionURI,'+'); // The stuff from here on down is non-parseable. sb.append(new Long(lastModified).toString()).append(":") .append(new Long(fileLength).toString()); // Also include the specification-based answer for the question of whether fingerprinting is // going to be done. Although we may not consider this to truly be "version" information, the // specification does affect whether anything is ingested or not, so it really is. The alternative // is to fingerprint right here, in the version part of the world, but that's got a performance // downside, because it means that we'd have to suck over pretty much everything just to determine // what we wanted to ingest. boolean ifIndexable = wouldFileBeIncluded(newPath,spec,true); boolean ifNotIndexable = wouldFileBeIncluded(newPath,spec,false); if (ifIndexable == ifNotIndexable) sb.append("I"); else sb.append(ifIndexable?"Y":"N"); versionString = sb.toString(); } else { activities.deleteDocument(documentIdentifier); continue; } } } else { activities.deleteDocument(documentIdentifier); continue; } } else { activities.deleteDocument(documentIdentifier); continue; } } catch (jcifs.smb.SmbAuthException e) { Logging.connectors.warn("JCIFS: Authorization exception reading version information for "+documentIdentifier+" - skipping"); if(e.getMessage().equals("Logon failure: unknown user name or bad password.")) throw new ManifoldCFException( "SmbAuthException thrown: " + e.getMessage(), e ); else { activities.deleteDocument(documentIdentifier ); continue; } } catch (MalformedURLException mue) { Logging.connectors.error("JCIFS: MalformedURLException thrown: "+mue.getMessage(),mue); throw new ManifoldCFException("MalformedURLException thrown: "+mue.getMessage(),mue); } catch (SmbException se) { processSMBException(se,documentIdentifier,"getting document version","fetching share security"); activities.deleteDocument(documentIdentifier); continue; } catch (java.net.SocketTimeoutException e) { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: Socket timeout reading version information for document "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: I/O error reading version information for document "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } if (versionString.length() == 0 || activities.checkDocumentNeedsReindexing(documentIdentifier,versionString)) { String errorCode = null; String errorDesc = null; Long fileLengthLong = null; long startFetchTime = System.currentTimeMillis(); try { byte[] transferBuffer = null; try { if (fileExists) { if (fileIsDirectory) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: '"+documentIdentifier+"' is a directory"); // Queue up stuff for directory // DFS special support no longer needed, because JCifs now does the right thing. // This is the string we replace in the child canonical paths. // String matchPrefix = ""; // This is what we replace it with, to get back to a DFS path. // String matchReplace = ""; // DFS resolved. // Use a filter to actually do the work here. This prevents large arrays from being // created when there are big directories. ProcessDocumentsFilter filter = new ProcessDocumentsFilter(activities,spec); fileListFiles(file,filter); filter.checkAndThrow(); } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: '"+documentIdentifier+"' is a file"); // We've already avoided queuing documents that we // don't want, based on file specifications. // We still need to check based on file data. // DFS support is now implicit in JCifs. String fileName = getFileCanonicalPath(file); if (fileName != null && !file.isHidden()) { String uri = ingestionURI; String fileNameString = file.getName(); Date lastModifiedDate = new Date(lastModified); Date creationDate = new Date(file.createTime()); Long originalLength = new Long(fileLength); String contentType = mapExtensionToMimeType(fileNameString); if (!activities.checkURLIndexable(uri)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept URL ('"+uri+"')"); errorCode = activities.EXCLUDED_URL; errorDesc = "Rejected due to URL ('"+uri+"')"; activities.noDocument(documentIdentifier,versionString); continue; } if (!activities.checkMimeTypeIndexable(contentType)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept content type ('"+contentType+"')"); errorCode = activities.EXCLUDED_MIMETYPE; errorDesc = "Rejected due to mime type ("+contentType+")"; activities.noDocument(documentIdentifier,versionString); continue; } if (!activities.checkDateIndexable(lastModifiedDate)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept date ("+lastModifiedDate+")"); errorCode = activities.EXCLUDED_DATE; errorDesc = "Rejected due to date ("+lastModifiedDate+")"; activities.noDocument(documentIdentifier,versionString); continue; } // Initialize repository document with common stuff, and find the URI RepositoryDocument rd = new RepositoryDocument(); //If using the lastAccess patched/Google version of jcifs then this can be uncommented //Date lastAccessDate = new Date(file.lastAccess()); Integer attributes = file.getAttributes(); String shareName = file.getShare(); rd.setFileName(fileNameString); rd.setOriginalSize(originalLength); if (contentType != null) rd.setMimeType(contentType); rd.addField("lastModified", lastModifiedDate.toString()); rd.addField("fileLastModified",DateParser.formatISO8601Date(lastModifiedDate)); rd.setModifiedDate(lastModifiedDate); // Add extra obtainable fields to the field map rd.addField("createdOn", creationDate.toString()); rd.addField("fileCreatedOn",DateParser.formatISO8601Date(creationDate)); rd.setCreatedDate(creationDate); //rd.addField("lastAccess", lastModifiedDate.toString()); rd.addField("attributes", Integer.toString(attributes)); rd.addField("shareName", shareName); setDocumentSecurity(rd,shareAllow,shareDeny,parentAllow,parentDeny,documentAllow,documentDeny); setPathMetadata(rd,pathAttributeName,pathAttributeValue); // manipulate path to include the DFS alias, not the literal path // String newPath = matchPrefix + fileName.substring(matchReplace.length()); String newPath = fileName; if (checkNeedFileData(newPath, spec)) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Local file data needed for '"+documentIdentifier+"'"); // Create a temporary file, and use that for the check and then the ingest File tempFile = File.createTempFile("_sdc_",null); try { FileOutputStream os = new FileOutputStream(tempFile); try { // Now, make a local copy so we can fingerprint InputStream inputStream = getFileInputStream(file); try { // Copy! if (transferBuffer == null) transferBuffer = new byte[65536]; while (true) { int amt = inputStream.read(transferBuffer,0,transferBuffer.length); if (amt == -1) break; os.write(transferBuffer,0,amt); } } finally { inputStream.close(); } } finally { os.close(); } if (checkIngest(tempFile, newPath, spec, activities)) { // Not needed; fetched earlier: long fileLength = tempFile.length(); if (!activities.checkLengthIndexable(fileLength)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept length ("+fileLength+")"); errorCode = activities.EXCLUDED_LENGTH; errorDesc = "Rejected due to length ("+fileLength+")"; activities.noDocument(documentIdentifier,versionString); continue; } if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Decided to ingest '"+documentIdentifier+"'"); // OK, do ingestion itself! InputStream inputStream = new FileInputStream(tempFile); try { rd.setBinary(inputStream, fileLength); activities.ingestDocumentWithException(documentIdentifier, versionString, uri, rd); errorCode = "OK"; fileLengthLong = new Long(fileLength); } finally { inputStream.close(); } } else { // We must actively remove the document here, because the getDocumentVersions() // method has no way of signalling this, since it does not do the fingerprinting. if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Decided to remove '"+documentIdentifier+"'"); activities.noDocument(documentIdentifier, versionString); errorCode = "NOWORKNEEDED"; errorDesc = "No indexing needed for document at this time"; } } finally { tempFile.delete(); } } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Local file data not needed for '"+documentIdentifier+"'"); // Not needed; fetched earlier: long fileLength = fileLength(file); if (!activities.checkLengthIndexable(fileLength)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept length ("+fileLength+")"); errorCode = activities.EXCLUDED_LENGTH; errorDesc = "Rejected because of length ("+fileLength+")"; activities.noDocument(documentIdentifier,versionString); continue; } // Presume that since the file was queued that it fulfilled the needed criteria. // Go off and ingest the fast way. // Ingest the document. InputStream inputStream = getFileInputStream(file); try { rd.setBinary(inputStream, fileLength); activities.ingestDocumentWithException(documentIdentifier, versionString, uri, rd); errorCode = "OK"; fileLengthLong = new Long(fileLength); } finally { inputStream.close(); } } } else { Logging.connectors.debug("JCIFS: Skipping file because canonical path is null, or because file is hidden"); errorCode = "NULLORHIDDEN"; errorDesc = "Null canonical path or hidden file"; activities.noDocument(documentIdentifier,versionString); continue; } } } } catch (MalformedURLException mue) { Logging.connectors.error("MalformedURLException tossed: "+mue.getMessage(),mue); errorCode = mue.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Malformed URL: "+mue.getMessage(); throw new ManifoldCFException("MalformedURLException tossed: "+mue.getMessage(),mue); } catch (jcifs.smb.SmbAuthException e) { Logging.connectors.warn("JCIFS: Authorization exception reading document/directory "+documentIdentifier+" - skipping"); errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Authorization: "+e.getMessage(); if(e.getMessage().equals("Logon failure: unknown user name or bad password.")) throw new ManifoldCFException( "SmbAuthException thrown: " + e.getMessage(), e ); else { activities.noDocument(documentIdentifier, versionString); continue; } } catch (SmbException se) { // At least some of these are transport errors, and should be treated as service // interruptions. long currentTime = System.currentTimeMillis(); Throwable cause = se.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw new ManifoldCFException(te.getRootCause().getMessage(),te.getRootCause(),ManifoldCFException.INTERRUPTED); Logging.connectors.warn("JCIFS: Timeout processing document/directory "+documentIdentifier+": retrying...",se); errorCode = cause.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Transport: "+cause.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+cause.getMessage(),cause,currentTime + 300000L, currentTime + 12 * 60 * 60000L,-1,false); } if (se.getMessage().toLowerCase(Locale.ROOT).indexOf("reset by peer") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("busy") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("file in use") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("is being used") != -1 || se.getMessage().indexOf("0xC0000054") != -1) { Logging.connectors.warn("JCIFS: 'Busy' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Busy: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("handle is invalid") != -1) { Logging.connectors.warn("JCIFS: 'Handle is invalid' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Expiration: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("parameter is incorrect") != -1) { Logging.connectors.warn("JCIFS: 'Parameter is incorrect' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Expiration: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("no longer available") != -1) { Logging.connectors.warn("JCIFS: 'No longer available' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Expiration: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("cannot find") != -1 || se.getMessage().indexOf("cannot be found") != -1) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Skipping document/directory "+documentIdentifier+" because it cannot be found"); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Not found: "+se.getMessage(); activities.noDocument(documentIdentifier, versionString); } else if (se.getMessage().indexOf("0xC0000205") != -1) { Logging.connectors.warn("JCIFS: Out of resources exception reading document/directory "+documentIdentifier+" - skipping"); // We call the delete even if it's a directory; this is harmless and it cleans up the jobqueue row. errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Resources: "+se.getMessage(); activities.noDocument(documentIdentifier, versionString); } else if (se.getMessage().indexOf("is denied") != -1) { Logging.connectors.warn("JCIFS: Access exception reading document/directory "+documentIdentifier+" - skipping"); // We call the delete even if it's a directory; this is harmless and it cleans up the jobqueue row. errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Authorization: "+se.getMessage(); activities.noDocument(documentIdentifier, versionString); } else { Logging.connectors.error("JCIFS: SmbException tossed processing "+documentIdentifier,se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Unknown: "+se.getMessage(); throw new ServiceInterruption("Unknown SMBException thrown: "+se.getMessage(),se,currentTime + 3 * 60 * 60000L, -1L,1,true); } } catch (IOException e) { errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = e.getMessage(); handleIOException(documentIdentifier,e); } } catch (ManifoldCFException e) { if (e.getErrorCode() == ManifoldCFException.INTERRUPTED) errorCode = null; throw e; } finally { if (errorCode != null) activities.recordActivity(new Long(startFetchTime),ACTIVITY_ACCESS, fileLengthLong,documentIdentifier,errorCode,errorDesc,null); } } } } protected static void handleIOException(String documentIdentifier, IOException e) throws ManifoldCFException, ServiceInterruption { if (e instanceof java.net.SocketTimeoutException) { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: Socket timeout processing "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (e instanceof InterruptedIOException) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } else { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: IO error processing "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } } /** Map an extension to a mime type */ protected static String mapExtensionToMimeType(String fileName) { int slashIndex = fileName.lastIndexOf("/"); if (slashIndex != -1) fileName = fileName.substring(slashIndex+1); int dotIndex = fileName.lastIndexOf("."); if (dotIndex == -1) return null; return ExtensionMimeMap.mapToMimeType(fileName.substring(dotIndex+1).toLowerCase(java.util.Locale.ROOT)); } protected static void addSecuritySet(StringBuilder description, boolean enabled, String[] allowTokens, String[] denyTokens) { if (enabled) { description.append("+"); java.util.Arrays.sort(allowTokens); java.util.Arrays.sort(denyTokens); // Stuff the acls into the description string. packList(description,allowTokens,'+'); packList(description,denyTokens,'+'); } else description.append("-"); } protected boolean getFileSecuritySet(List<String> allowList, List<String> denyList, SmbFile file, String[] forced) throws ManifoldCFException, IOException { if (forced != null) { if (forced.length == 0) { convertACEs(allowList,denyList,getFileSecurity(file, useSIDs)); } else { for (String forcedToken : forced) { allowList.add(forcedToken); } denyList.add(defaultAuthorityDenyToken); } return true; } else return false; } protected boolean getFileShareSecuritySet(List<String> allowList, List<String> denyList, SmbFile file, String[] forced) throws ManifoldCFException, IOException { if (forced != null) { if (forced.length == 0) { convertACEs(allowList,denyList,getFileShareSecurity(file, useSIDs)); } else { for (String forcedToken : forced) { allowList.add(forcedToken); } denyList.add(defaultAuthorityDenyToken); } return true; } else return false; } protected void convertACEs(List<String> allowList, List<String> denyList, ACE[] aces) { if (aces == null) { // "Public" share: S-1-1-0 allowList.add("S-1-1-0"); denyList.add(defaultAuthorityDenyToken); } else { denyList.add(defaultAuthorityDenyToken); for (ACE ace : aces) { if ((ace.getAccessMask() & ACE.FILE_READ_DATA) != 0) { if (ace.isAllow()) allowList.add(useSIDs ? ace.getSID().toString() : ace.getSID().getAccountName()); else denyList.add(useSIDs ? ace.getSID().toString() : ace.getSID().getAccountName()); } } } } protected static void processSMBException(SmbException se, String documentIdentifier, String activity, String operation) throws ManifoldCFException, ServiceInterruption { // At least some of these are transport errors, and should be treated as service // interruptions. long currentTime = System.currentTimeMillis(); Throwable cause = se.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw new ManifoldCFException(te.getRootCause().getMessage(),te.getRootCause(),ManifoldCFException.INTERRUPTED); Logging.connectors.warn("JCIFS: Timeout "+activity+" for "+documentIdentifier+": retrying...",se); // Transport exceptions no longer abort when they give up, so we can't get notified that there is a problem. throw new ServiceInterruption("Timeout or other service interruption: "+cause.getMessage(),cause,currentTime + 300000L, currentTime + 12 * 60 * 60000L,-1,false); } if (se.getMessage().indexOf("busy") != -1) { Logging.connectors.warn("JCIFS: 'Busy' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Busy exceptions just skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("0xC0000054") != -1) { Logging.connectors.warn("JCIFS: 'Busy' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Busy exceptions just skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("handle is invalid") != -1) { Logging.connectors.warn("JCIFS: 'Handle is invalid' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Invalid handle errors treated like "busy" throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("parameter is incorrect") != -1) { Logging.connectors.warn("JCIFS: 'Parameter is incorrect' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Invalid handle errors treated like "busy" throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("no longer available") != -1) { Logging.connectors.warn("JCIFS: 'No longer available' response when "+activity+" for "+documentIdentifier+": retrying...",se); // No longer available == busy throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if(se.getMessage().indexOf("No process is on the other end of the pipe") != -1) { Logging.connectors.warn("JCIFS: 'No process is on the other end of the pipe' response when "+activity+" for "+documentIdentifier+": retrying...",se); // 'No process is on the other end of the pipe' skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().toLowerCase(Locale.ROOT).indexOf("busy") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("file in use") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("is being used") != -1) { Logging.connectors.warn("JCIFS: 'File in Use' response when "+activity+" for "+documentIdentifier+": retrying...",se); // 'File in Use' skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("cannot find") != -1 || se.getMessage().indexOf("cannot be found") != -1) { return; } else if (se.getMessage().indexOf("is denied") != -1) { Logging.connectors.warn("JCIFS: Access exception when "+activity+" for "+documentIdentifier+" - skipping"); return; } else if (se.getMessage().indexOf("Incorrect function") != -1) { Logging.connectors.error("JCIFS: Server does not support a required operation ("+operation+"?) for "+documentIdentifier); throw new ManifoldCFException("Server does not support a required operation ("+operation+", possibly?) accessing document "+documentIdentifier,se); } else { Logging.connectors.error("Unrecognized SmbException thrown "+activity+" for "+documentIdentifier,se); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 3 * 60 * 60000L, -1,1,true); } } protected static void setDocumentSecurity(RepositoryDocument rd, String[] shareAllow, String[] shareDeny, String[] parentAllow, String[] parentDeny, String[] allow, String[] deny) { // set share acls if (shareAllow.length > 0 || shareDeny.length > 0) rd.setSecurity(RepositoryDocument.SECURITY_TYPE_SHARE,shareAllow,shareDeny); // set parent folder acls if (parentAllow.length > 0 || parentDeny.length > 0) rd.setSecurity(RepositoryDocument.SECURITY_TYPE_PARENT,parentAllow,parentDeny); // set native file acls if (allow.length > 0 || deny.length > 0) rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT,allow,deny); } protected static void setPathMetadata(RepositoryDocument rd, String pathAttributeName, String pathAttributeValue) throws ManifoldCFException { if (pathAttributeName != null && pathAttributeValue != null) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Path attribute name is '"+pathAttributeName+"'"); if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Path attribute value is '"+pathAttributeValue+"'"); rd.addField(pathAttributeName,pathAttributeValue); } else Logging.connectors.debug("JCIFS: Path attribute name is null"); } /** Check status of connection. */ @Override public String check() throws ManifoldCFException { getSession(); String serverURI = smbconnectionPath; SmbFile server = null; try { server = new SmbFile(serverURI,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { return "Malformed URL: '"+serverURI+"': "+e1.getMessage(); } try { // check to make sure it's a server or a folder int type = getFileType(server); if (type==SmbFile.TYPE_SERVER || type==SmbFile.TYPE_SHARE || type==SmbFile.TYPE_FILESYSTEM) { try { server.connect(); if (!server.exists()) return "Server or path does not exist"; } catch (java.net.SocketTimeoutException e) { return "Timeout connecting to server: "+e.getMessage(); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { return "Couldn't connect to server: "+e.getMessage(); } return super.check(); } else return "URI is not a server URI: '"+serverURI+"'"; } catch (SmbException e) { return "Could not connect: "+e.getMessage(); } } // Protected methods /** Check if a file's stats are OK for inclusion. */ protected static boolean checkIncludeFile(long fileLength, String fileName, Specification documentSpecification, IFingerprintActivity activities) throws ManifoldCFException, ServiceInterruption { // If it's a file, make sure the maximum length is not exceeded if (!activities.checkLengthIndexable(fileLength) || !activities.checkMimeTypeIndexable(mapExtensionToMimeType(fileName))) return false; long maxFileLength = Long.MAX_VALUE; for (int i = 0; i < documentSpecification.getChildCount(); i++) { SpecificationNode sn = documentSpecification.getChild(i); if (sn.getType().equals(NODE_MAXLENGTH)) { try { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value != null && value.length() > 0) maxFileLength = new Long(value).longValue(); } catch (NumberFormatException e) { throw new ManifoldCFException("Bad number: "+e.getMessage(),e); } } } if (fileLength > maxFileLength) return false; return true; } /** Check if a file or directory should be included, given a document specification. *@param isDirectory is true if the file is a directory. *@param fileName is the canonical file name. *@param documentSpecification is the specification. *@return true if it should be included. */ protected boolean checkInclude(boolean isDirectory, String fileName, Specification documentSpecification) throws ManifoldCFException { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: In checkInclude for '"+fileName+"'"); // This method does not attempt to do any fingerprinting. Instead, it will opt to include any // file that may depend on fingerprinting, and exclude everything else. The actual setup for // the fingerprinting test is in checkNeedFileData(), while the actual code that determines in vs. // out using the file data is in checkIngest(). try { String pathPart; String filePart; if (isDirectory) { pathPart = fileName; filePart = null; } else { int lastSlash = fileName.lastIndexOf("/"); if (lastSlash == -1) { pathPart = ""; filePart = fileName; } else { // Pathpart has to include the slash pathPart = fileName.substring(0,lastSlash+1); filePart = fileName.substring(lastSlash+1); } } int i; // Scan until we match a startpoint i = 0; while (i < documentSpecification.getChildCount()) { SpecificationNode sn = documentSpecification.getChild(i++); if (sn.getType().equals(NODE_STARTPOINT)) { // Prepend the server URL to the path, since that's what pathpart will have. String path = mapToIdentifier(sn.getAttributeValue(ATTRIBUTE_PATH)); // Compare with filename if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Matching startpoint '"+path+"' against actual '"+pathPart+"'"); int matchEnd = matchSubPath(path,pathPart); if (matchEnd == -1) { Logging.connectors.debug("JCIFS: No match"); continue; } Logging.connectors.debug("JCIFS: Startpoint found!"); // If this is the root, it's always included. if (matchEnd == fileName.length()) { Logging.connectors.debug("JCIFS: Startpoint: always included"); return true; } // matchEnd is the start of the rest of the path (after the match) in fileName. // We need to walk through the rules and see whether it's in or out. int j = 0; while (j < sn.getChildCount()) { SpecificationNode node = sn.getChild(j++); String flavor = node.getType(); if (flavor.equals(NODE_INCLUDE) || flavor.equals(NODE_EXCLUDE)) { String type = node.getAttributeValue(ATTRIBUTE_TYPE); if (type == null) type = ""; String indexable = node.getAttributeValue(ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; String match = node.getAttributeValue(ATTRIBUTE_FILESPEC); // Check if there's a match against the filespec if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Checking '"+match+"' against '"+fileName.substring(matchEnd-1)+"'"); boolean isMatch = checkMatch(fileName,matchEnd-1,match); boolean isKnown = true; // Check the directory/file criteria if (isMatch) { Logging.connectors.debug("JCIFS: Match found."); isMatch = type.length() == 0 || (type.equals(VALUE_DIRECTORY) && isDirectory) || (type.equals(VALUE_FILE) && !isDirectory); } else Logging.connectors.debug("JCIFS: No match!"); // Check the indexable criteria if (isMatch) { if (indexable.length() != 0) { // Directories are never considered indexable. // But if this is not a directory, things become ambiguous. boolean isIndexable; if (isDirectory) { isIndexable = false; isMatch = (indexable.equals("yes") && isIndexable) || (indexable.equals("no") && !isIndexable); } else isKnown = false; } } if (isKnown) { if (isMatch) { if (flavor.equals(NODE_INCLUDE)) return true; else return false; } } else { // Not known // What we do depends on whether this is an include rule or an exclude one. // We want to err on the side of inclusion, which means for include rules // we return true, and for exclude rules we simply continue. if (flavor.equals(NODE_INCLUDE)) return true; // Continue } } } } } return false; } catch (MalformedURLException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } finally { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Leaving checkInclude for '"+fileName+"'"); } } /** Pretend that a file is either indexable or not, and return whether or not it would be ingested. * This is only ever called for files. *@param fileName is the canonical file name. *@param documentSpecification is the specification. *@param pretendIndexable should be set to true if the document's contents would be fingerprinted as "indexable", * or false otherwise. *@return true if the file would be ingested given the parameters. */ protected boolean wouldFileBeIncluded(String fileName, Specification documentSpecification, boolean pretendIndexable) throws ManifoldCFException { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: In wouldFileBeIncluded for '"+fileName+"', pretendIndexable="+(pretendIndexable?"true":"false")); // This file was flagged as needing file data. However, that doesn't tell us *for what* we need it. // So we need to redo the decision tree, but this time do everything completely. try { String pathPart; String filePart; boolean isDirectory = false; int lastSlash = fileName.lastIndexOf("/"); if (lastSlash == -1) { pathPart = ""; filePart = fileName; } else { pathPart = fileName.substring(0,lastSlash+1); filePart = fileName.substring(lastSlash+1); } // Scan until we match a startpoint int i = 0; while (i < documentSpecification.getChildCount()) { SpecificationNode sn = documentSpecification.getChild(i++); if (sn.getType().equals(NODE_STARTPOINT)) { // Prepend the server URL to the path, since that's what pathpart will have. String path = mapToIdentifier(sn.getAttributeValue(ATTRIBUTE_PATH)); // Compare with filename int matchEnd = matchSubPath(path,pathPart); if (matchEnd == -1) { continue; } // matchEnd is the start of the rest of the path (after the match) in fileName. // We need to walk through the rules and see whether it's in or out. int j = 0; while (j < sn.getChildCount()) { SpecificationNode node = sn.getChild(j++); String flavor = node.getType(); if (flavor.equals(NODE_INCLUDE) || flavor.equals(NODE_EXCLUDE)) { String type = node.getAttributeValue(ATTRIBUTE_TYPE); if (type == null) type = ""; String indexable = node.getAttributeValue(ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; String match = node.getAttributeValue(ATTRIBUTE_FILESPEC); // Check if there's a match against the filespec boolean isMatch = checkMatch(fileName,matchEnd-1,match); // Check the directory/file criteria if (isMatch) { isMatch = type.length() == 0 || (type.equals(VALUE_DIRECTORY) && isDirectory) || (type.equals(VALUE_FILE) && !isDirectory); } // Check the indexable criteria if (isMatch) { if (indexable.length() != 0) { // Directories are never considered indexable. // But if this is not a directory, things become ambiguous. boolean isIndexable; if (isDirectory) isIndexable = false; else { // Evaluate the parts of being indexable that are based on the filename, mime type, and url isIndexable = pretendIndexable; } isMatch = (indexable.equals("yes") && isIndexable) || (indexable.equals("no") && !isIndexable); } } if (isMatch) { if (flavor.equals(NODE_INCLUDE)) return true; else return false; } } } } } return false; } catch (MalformedURLException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } finally { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Leaving wouldFileBeIncluded for '"+fileName+"'"); } } /** Check to see whether we need the contents of the file for anything. We do this by assuming that * the file is indexable, and assuming that it's not, and seeing if the same thing would happen. *@param fileName is the name of the file. *@param documentSpecification is the document specification. *@return true if the file needs to be fingerprinted. */ protected boolean checkNeedFileData(String fileName, Specification documentSpecification) throws ManifoldCFException { return wouldFileBeIncluded(fileName,documentSpecification,true) != wouldFileBeIncluded(fileName,documentSpecification,false); } /** Check if a file should be ingested, given a document specification and a local copy of the * file. It is presumed that only files that passed checkInclude() and were also flagged as needing * file data by checkNeedFileData() will be checked by this method. *@param localFile is the file. *@param fileName is the JCIFS file name. *@param documentSpecification is the specification. *@param activities are the activities available to determine indexability. *@return true if the file should be ingested. */ protected boolean checkIngest(File localFile, String fileName, Specification documentSpecification, IFingerprintActivity activities) throws ManifoldCFException, ServiceInterruption { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: In checkIngest for '"+fileName+"'"); // This file was flagged as needing file data. However, that doesn't tell us *for what* we need it. // So we need to redo the decision tree, but this time do everything completely. try { String pathPart; String filePart; boolean isDirectory = false; int lastSlash = fileName.lastIndexOf("/"); if (lastSlash == -1) { pathPart = ""; filePart = fileName; } else { pathPart = fileName.substring(0,lastSlash+1); filePart = fileName.substring(lastSlash+1); } // Scan until we match a startpoint int i = 0; while (i < documentSpecification.getChildCount()) { SpecificationNode sn = documentSpecification.getChild(i++); if (sn.getType().equals(NODE_STARTPOINT)) { // Prepend the server URL to the path, since that's what pathpart will have. String path = mapToIdentifier(sn.getAttributeValue(ATTRIBUTE_PATH)); // Compare with filename int matchEnd = matchSubPath(path,pathPart); if (matchEnd == -1) { continue; } // matchEnd is the start of the rest of the path (after the match) in fileName. // We need to walk through the rules and see whether it's in or out. int j = 0; while (j < sn.getChildCount()) { SpecificationNode node = sn.getChild(j++); String flavor = node.getType(); if (flavor.equals(NODE_INCLUDE) || flavor.equals(NODE_EXCLUDE)) { String type = node.getAttributeValue(ATTRIBUTE_TYPE); if (type == null) type = ""; String indexable = node.getAttributeValue(ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; String match = node.getAttributeValue(ATTRIBUTE_FILESPEC); // Check if there's a match against the filespec boolean isMatch = checkMatch(fileName,matchEnd-1,match); // Check the directory/file criteria if (isMatch) { isMatch = type.length() == 0 || (type.equals(VALUE_DIRECTORY) && isDirectory) || (type.equals(VALUE_FILE) && !isDirectory); } // Check the indexable criteria if (isMatch) { if (indexable.length() != 0) { // Directories are never considered indexable. // But if this is not a directory, things become ambiguous. boolean isIndexable; if (isDirectory) isIndexable = false; else { isIndexable = activities.checkDocumentIndexable(localFile); } isMatch = (indexable.equals("yes") && isIndexable) || (indexable.equals("no") && !isIndexable); } } if (isMatch) { if (flavor.equals(NODE_INCLUDE)) return true; else return false; } } } } } return false; } catch (MalformedURLException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } finally { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Leaving checkIngest for '"+fileName+"'"); } } /** Match a sub-path. The sub-path must match the complete starting part of the full path, in a path * sense. The returned value should point into the file name beyond the end of the matched path, or * be -1 if there is no match. *@param subPath is the sub path. *@param fullPath is the full path. *@return the index of the start of the remaining part of the full path, or -1. */ protected static int matchSubPath(String subPath, String fullPath) { if (subPath.length() > fullPath.length()) return -1; if (fullPath.startsWith(subPath) == false) return -1; int rval = subPath.length(); if (fullPath.length() == rval) return rval; char x = fullPath.charAt(rval); if (x == File.separatorChar) rval++; return rval; } /** Check a match between two strings with wildcards. *@param sourceMatch is the expanded string (no wildcards) *@param sourceIndex is the starting point in the expanded string. *@param match is the wildcard-based string. *@return true if there is a match. */ protected static boolean checkMatch(String sourceMatch, int sourceIndex, String match) { // Note: The java regex stuff looks pretty heavyweight for this purpose. // I've opted to try and do a simple recursive version myself, which is not compiled. // Basically, the match proceeds by recursive descent through the string, so that all *'s cause // recursion. boolean caseSensitive = false; return processCheck(caseSensitive, sourceMatch, sourceIndex, match, 0); } /** Recursive worker method for checkMatch. Returns 'true' if there is a path that consumes both * strings in their entirety in a matched way. *@param caseSensitive is true if file names are case sensitive. *@param sourceMatch is the source string (w/o wildcards) *@param sourceIndex is the current point in the source string. *@param match is the match string (w/wildcards) *@param matchIndex is the current point in the match string. *@return true if there is a match. */ protected static boolean processCheck(boolean caseSensitive, String sourceMatch, int sourceIndex, String match, int matchIndex) { // Logging.connectors.debug("Matching '"+sourceMatch+"' position "+Integer.toString(sourceIndex)+ // " against '"+match+"' position "+Integer.toString(matchIndex)); // Match up through the next * we encounter while (true) { // If we've reached the end, it's a match. if (sourceMatch.length() == sourceIndex && match.length() == matchIndex) return true; // If one has reached the end but the other hasn't, no match if (match.length() == matchIndex) return false; if (sourceMatch.length() == sourceIndex) { if (match.charAt(matchIndex) != '*') return false; matchIndex++; continue; } char x = sourceMatch.charAt(sourceIndex); char y = match.charAt(matchIndex); if (!caseSensitive) { if (x >= 'A' && x <= 'Z') x -= 'A'-'a'; if (y >= 'A' && y <= 'Z') y -= 'A'-'a'; } if (y == '*') { // Wildcard! // We will recurse at this point. // Basically, we want to combine the results for leaving the "*" in the match string // at this point and advancing the source index, with skipping the "*" and leaving the source // string alone. return processCheck(caseSensitive,sourceMatch,sourceIndex+1,match,matchIndex) || processCheck(caseSensitive,sourceMatch,sourceIndex,match,matchIndex+1); } if (y == '?' || x == y) { sourceIndex++; matchIndex++; } else return false; } } /** Grab forced acl out of document specification. *@param spec is the document specification. *@return the acls. */ protected static String[] getForcedAcls(Specification spec) { HashMap map = new HashMap(); int i = 0; boolean securityOn = true; while (i < spec.getChildCount()) { SpecificationNode sn = spec.getChild(i++); if (sn.getType().equals(NODE_ACCESS)) { String token = sn.getAttributeValue(ATTRIBUTE_TOKEN); map.put(token,token); } else if (sn.getType().equals(NODE_SECURITY)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value.equals("on")) securityOn = true; else if (value.equals("off")) securityOn = false; } } if (!securityOn) return null; String[] rval = new String[map.size()]; Iterator iter = map.keySet().iterator(); i = 0; while (iter.hasNext()) { rval[i++] = (String)iter.next(); } return rval; } /** Grab forced share acls out of document specification. *@param spec is the document specification. *@return the acls. */ protected static String[] getForcedShareAcls(Specification spec) { HashMap map = new HashMap(); int i = 0; boolean securityOn = true; while (i < spec.getChildCount()) { SpecificationNode sn = spec.getChild(i++); if (sn.getType().equals(NODE_SHAREACCESS)) { String token = sn.getAttributeValue(ATTRIBUTE_TOKEN); map.put(token,token); } else if (sn.getType().equals(NODE_SHARESECURITY)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value.equals("on")) securityOn = true; else if (value.equals("off")) securityOn = false; } } if (!securityOn) return null; String[] rval = new String[map.size()]; Iterator iter = map.keySet().iterator(); i = 0; while (iter.hasNext()) { rval[i++] = (String)iter.next(); } return rval; } /** Grab forced parent folder acls out of document specification. *@param spec is the document specification. *@return the acls. */ protected static String[] getForcedParentFolderAcls(Specification spec) { HashMap map = new HashMap(); int i = 0; boolean securityOn = false; while (i < spec.getChildCount()) { SpecificationNode sn = spec.getChild(i++); if (sn.getType().equals(NODE_PARENTFOLDERACCESS)) { String token = sn.getAttributeValue(ATTRIBUTE_TOKEN); map.put(token,token); } else if (sn.getType().equals(NODE_PARENTFOLDERSECURITY)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value.equals("on")) securityOn = true; else if (value.equals("off")) securityOn = false; } } if (!securityOn) return null; String[] rval = new String[map.size()]; Iterator iter = map.keySet().iterator(); i = 0; while (iter.hasNext()) { rval[i++] = (String)iter.next(); } return rval; } /** Map a "path" specification to a full identifier. */ protected String mapToIdentifier(String path) throws MalformedURLException, UnknownHostException { String smburi = smbconnectionPath; String uri = smburi + path + "/"; return getFileCanonicalPath(new SmbFile(uri,SingletonContext.getInstance().withCredentials(pa))); } // These methods allow me to experiment with cluster-mandated error handling on an entirely local level. They correspond to individual SMBFile methods. /** Get canonical path */ protected static String getFileCanonicalPath(SmbFile file) { return file.getCanonicalPath(); } /** Check for file/directory existence */ protected static boolean fileExists(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.exists(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while checking if file exists: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Check if file is a directory */ protected static boolean fileIsDirectory(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.isDirectory(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while seeing if file is a directory: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get last modified date for file */ protected static long fileLastModified(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.lastModified(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file last-modified date: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get file length */ protected static long fileLength(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.length(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file length: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** List files */ protected static SmbFile[] fileListFiles(SmbFile file, SmbFileFilter filter) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.listFiles(filter); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } if (e.getMessage().equals("0x8000002D")) { // Symlink Logging.connectors.warn("JCIFS: Symlink detected: "+file); return new SmbFile[0]; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while listing files: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get input stream for file */ protected static InputStream getFileInputStream(SmbFile file) throws IOException { int totalTries = 0; int retriesRemaining = 3; IOException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getInputStream(); } catch (java.net.SocketTimeoutException e) { throw e; } catch (InterruptedIOException e) { throw e; } catch (IOException e) { Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file input stream: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentIOExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get file security */ protected static ACE[] getFileSecurity(SmbFile file, boolean useSIDs) throws IOException { int totalTries = 0; int retriesRemaining = 3; IOException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getSecurity(!useSIDs); } catch (java.net.SocketTimeoutException e) { throw e; } catch (InterruptedIOException e) { throw e; } catch (IOException e) { Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file security: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentIOExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get share security */ protected static ACE[] getFileShareSecurity(SmbFile file, boolean useSIDs) throws IOException { int totalTries = 0; int retriesRemaining = 3; IOException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getShareSecurity(!useSIDs); } catch (java.net.SocketTimeoutException e) { throw e; } catch (InterruptedIOException e) { throw e; } catch (IOException e) { Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting share security: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentIOExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get file type */ protected static int getFileType(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getType(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file type: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Check if two SmbExceptions are equivalent */ protected static boolean equivalentSmbExceptions(SmbException e1, SmbException e2) { // The thing we want to compare is the message. This is a little risky in that if there are (for example) object addresses in the message, the comparison will always fail. // However, I don't think we expect any such thing in this case. String e1m = e1.getMessage(); String e2m = e2.getMessage(); if (e1m == null) e1m = ""; if (e2m == null) e2m = ""; return e1m.equals(e2m); } /** Check if two IOExceptions are equivalent */ protected static boolean equivalentIOExceptions(IOException e1, IOException e2) { // The thing we want to compare is the message. This is a little risky in that if there are (for example) object addresses in the message, the comparison will always fail. // However, I don't think we expect any such thing in this case. String e1m = e1.getMessage(); String e2m = e2.getMessage(); if (e1m == null) e1m = ""; if (e2m == null) e2m = ""; return e1m.equals(e2m); } // UI support methods. // // These support methods come in two varieties. The first bunch is involved in setting up connection configuration information. The second bunch // is involved in presenting and editing document specification information for a job. The two kinds of methods are accordingly treated differently, // in that the first bunch cannot assume that the current connector object is connected, while the second bunch can. That is why the first bunch // receives a thread context argument for all UI methods, while the second bunch does not need one (since it has already been applied via the connect() // method, above). /** Output the configuration header section. * This method is called in the head section of the connector's configuration page. Its purpose is to add the required tabs to the list, and to output any * javascript methods that might be needed by the configuration editing HTML. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabsArray is an array of tab names. Add to this array any tab names that are specific to the connector. */ @Override public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException { tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Server")); out.print( "<script type=\"text/javascript\">\n"+ "<!--\n"+ "function checkConfigForSave()\n"+ "{\n"+ " if (editconnection.server.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NeedAServerName") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.Server2") + "\");\n"+ " editconnection.server.focus();\n"+ " return false;\n"+ " }\n"+ "\n"+ " if (editconnection.server.value.indexOf(\"/\") != -1)\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.ServerNameCannotIncludePathInformation") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.Server2") + "\");\n"+ " editconnection.server.focus();\n"+ " return false;\n"+ " }\n"+ " \n"+ " if (editconnection.username.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NeedAUserName") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.Server2") + "\");\n"+ " editconnection.username.focus();\n"+ " return false;\n"+ " }\n"+ "\n"+ " return true;\n"+ "}\n"+ "\n"+ "//-->\n"+ "</script>\n" ); } /** Output the configuration body section. * This method is called in the body section of the connector's configuration page. Its purpose is to present the required form elements for editing. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt;, &lt;body&gt;, and &lt;form&gt; tags. The name of the * form is "editconnection". *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabName is the current tab name. */ @Override public void outputConfigurationBody(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException { String server = parameters.getParameter(SharedDriveParameters.server); if (server==null) server = ""; String domain = parameters.getParameter(SharedDriveParameters.domain); if (domain==null) domain = ""; String username = parameters.getParameter(SharedDriveParameters.username); if (username==null) username = ""; String password = parameters.getObfuscatedParameter(SharedDriveParameters.password); if (password==null) password = ""; else password = out.mapPasswordToKey(password); String resolvesids = parameters.getParameter(SharedDriveParameters.useSIDs); if (resolvesids==null) resolvesids = "true"; String binName = parameters.getParameter(SharedDriveParameters.binName); if (binName == null) binName = ""; // "Server" tab if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Server"))) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.Server3") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"server\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(server)+"\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.AuthenticationDomain") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"domain\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(domain)+"\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.UserName") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"username\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(username)+"\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.Password") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"password\" size=\"32\" name=\"password\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(password)+"\"/></td>\n"+ " </tr>\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.UseSIDSForSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\"resolvesidspresent\" value=\"true\"/><input type=\"checkbox\" value=\"true\" name=\"resolvesids\" "+("true".equals(resolvesids)?"checked=\"true\"":"")+"/></td>\n"+ " </tr>\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.BinName") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"binname\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(binName)+"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\"server\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(server)+"\"/>\n"+ "<input type=\"hidden\" name=\"domain\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(domain)+"\"/>\n"+ "<input type=\"hidden\" name=\"username\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(username)+"\"/>\n"+ "<input type=\"hidden\" name=\"password\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(password)+"\"/>\n"+ "<input type=\"hidden\" name=\"resolvesidspresent\" value=\"true\"/>\n"+ "<input type=\"hidden\" name=\"resolvesids\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(resolvesids)+"\"/>\n"+ "<input type=\"hidden\" name=\"binname\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(binName)+"\"/>\n" ); } } /** Process a configuration post. * This method is called at the start of the connector's configuration page, whenever there is a possibility that form data for a connection has been * posted. Its purpose is to gather form information and modify the configuration parameters accordingly. * The name of the posted form is "editconnection". *@param threadContext is the local thread context. *@param variableContext is the set of variables available from the post, including binary file post information. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@return null if all is well, or a string error message if there is an error that should prevent saving of the connection (and cause a redirection to an error page). */ @Override public String processConfigurationPost(IThreadContext threadContext, IPostParameters variableContext, Locale locale, ConfigParams parameters) throws ManifoldCFException { String server = variableContext.getParameter("server"); if (server != null) parameters.setParameter(SharedDriveParameters.server,server); String domain = variableContext.getParameter("domain"); if (domain != null) parameters.setParameter(SharedDriveParameters.domain,domain); String username = variableContext.getParameter("username"); if (username != null) parameters.setParameter(SharedDriveParameters.username,username); String password = variableContext.getParameter("password"); if (password != null) parameters.setObfuscatedParameter(SharedDriveParameters.password,variableContext.mapKeyToPassword(password)); String resolvesidspresent = variableContext.getParameter("resolvesidspresent"); if (resolvesidspresent != null) { parameters.setParameter(SharedDriveParameters.useSIDs,"false"); String resolvesids = variableContext.getParameter("resolvesids"); if (resolvesids != null) parameters.setParameter(SharedDriveParameters.useSIDs, resolvesids); } String binName = variableContext.getParameter("binname"); if (binName != null) parameters.setParameter(SharedDriveParameters.binName, binName); return null; } /** View configuration. * This method is called in the body section of the connector's view configuration page. Its purpose is to present the connection information to the user. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt; and &lt;body&gt;tags. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. */ @Override public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException { out.print( "<table class=\"displaytable\">\n"+ " <tr>\n"+ " <td class=\"description\" colspan=\"1\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.Parameters") + "</nobr></td>\n"+ " <td class=\"value\" colspan=\"3\">\n" ); Iterator iter = parameters.listParameters(); while (iter.hasNext()) { String param = (String)iter.next(); String value = parameters.getParameter(param); if (param.length() >= "password".length() && param.substring(param.length()-"password".length()).equalsIgnoreCase("password")) { out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param)+"=********</nobr><br/>\n" ); } else if (param.length() >="keystore".length() && param.substring(param.length()-"keystore".length()).equalsIgnoreCase("keystore")) { IKeystoreManager kmanager = KeystoreManagerFactory.make("",value); out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param)+"=&lt;"+Integer.toString(kmanager.getContents().length)+ Messages.getBodyString(locale,"SharedDriveConnector.certificate") + "&gt;</nobr><br/>\n" ); } else { out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param)+"="+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(value)+"</nobr><br/>\n" ); } } out.print( " </td>\n"+ " </tr>\n"+ "</table>\n" ); } /** Output the specification header section. * This method is called in the head section of a job page which has selected a repository connection of the * current type. Its purpose is to add the required tabs to the list, and to output any javascript methods * that might be needed by the job editing HTML. * The connector will be connected before this method can be called. *@param out is the output to which any HTML should be sent. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@param tabsArray is an array of tab names. Add to this array any tab names that are specific to the connector. */ @Override public void outputSpecificationHeader(IHTTPOutput out, Locale locale, Specification ds, int connectionSequenceNumber, List<String> tabsArray) throws ManifoldCFException, IOException { tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Paths")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Security")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Metadata")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.ContentLength")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.FileMapping")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.URLMapping")); String seqPrefix = "s"+connectionSequenceNumber+"_"; out.print( "<script type=\"text/javascript\">\n"+ "//<!--\n"+ "\n"+ "function "+seqPrefix+"checkSpecification()\n"+ "{\n"+ " if (editjob."+seqPrefix+"specmaxlength.value != \"\" && !isInteger(editjob."+seqPrefix+"specmaxlength.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NeedAValidNumberForMaximumDocumentLength") + "\");\n"+ " editjob."+seqPrefix+"specmaxlength.focus();\n"+ " return false;\n"+ " }\n"+ " return true;\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecOp(n, opValue, anchorvalue)\n"+ "{\n"+ " eval(\"editjob.\"+n+\".value = \\\"\"+opValue+\"\\\"\");\n"+ " postFormSetAnchor(anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddToPath(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"pathaddon.value == \"\" && editjob."+seqPrefix+"pathtypein.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.SelectAFolderOrTypeInAPathFirst") + "\");\n"+ " editjob."+seqPrefix+"pathaddon.focus();\n"+ " return;\n"+ " }\n"+ " if (editjob."+seqPrefix+"pathaddon.value != \"\" && editjob."+seqPrefix+"pathtypein.value != \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.EitherSelectAFolderORTypeInAPath") + "\");\n"+ " editjob."+seqPrefix+"pathaddon.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\",\"AddToPath\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddSpec(suffix,anchorvalue)\n"+ "{\n"+ " if (eval(\"editjob."+seqPrefix+"specfile\"+suffix+\".value\") == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.EnterAFileSpecificationFirst") + "\");\n"+ " eval(\"editjob."+seqPrefix+"specfile\"+suffix+\".focus()\");\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\"+suffix,\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecInsertSpec(postfix,anchorvalue)\n"+ "{\n"+ " if (eval(\"editjob."+seqPrefix+"specfile_i\"+postfix+\".value\") == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.EnterAFileSpecificationFirst") + "\");\n"+ " eval(\"editjob."+seqPrefix+"specfile_i\"+postfix+\".focus()\");\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specop\"+postfix,\"Insert Here\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddToken(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"spectoken.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NullAccessTokensNotAllowed") + "\");\n"+ " editjob."+seqPrefix+"spectoken.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"accessop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddMapping(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"specmatch.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringCannotBeEmpty") + "\");\n"+ " editjob."+seqPrefix+"specmatch.focus();\n"+ " return;\n"+ " }\n"+ " if (!isRegularExpression(editjob."+seqPrefix+"specmatch.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringMustBeValidRegularExpression") + "\");\n"+ " editjob."+seqPrefix+"specmatch.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specmappingop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddFMap(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"specfmapmatch.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringCannotBeEmpty") + "\");\n"+ " editjob."+seqPrefix+"specfmapmatch.focus();\n"+ " return;\n"+ " }\n"+ " if (!isRegularExpression(editjob."+seqPrefix+"specfmapmatch.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringMustBeValidRegularExpression") + "\");\n"+ " editjob."+seqPrefix+"specfmapmatch.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specfmapop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddUMap(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"specumapmatch.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringCannotBeEmpty") + "\");\n"+ " editjob."+seqPrefix+"specumapmatch.focus();\n"+ " return;\n"+ " }\n"+ " if (!isRegularExpression(editjob."+seqPrefix+"specumapmatch.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringMustBeValidRegularExpression") + "\");\n"+ " editjob."+seqPrefix+"specumapmatch.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specumapop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "//-->\n"+ "</script>\n" ); } /** Output the specification body section. * This method is called in the body section of a job page which has selected a repository connection of the * current type. Its purpose is to present the required form elements for editing. * The coder can presume that the HTML that is output from this configuration will be within appropriate * &lt;html&gt;, &lt;body&gt;, and &lt;form&gt; tags. The name of the form is always "editjob". * The connector will be connected before this method can be called. *@param out is the output to which any HTML should be sent. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@param actualSequenceNumber is the connection within the job that has currently been selected. *@param tabName is the current tab name. (actualSequenceNumber, tabName) form a unique tuple within * the job. */ @Override public void outputSpecificationBody(IHTTPOutput out, Locale locale, Specification ds, int connectionSequenceNumber, int actualSequenceNumber, String tabName) throws ManifoldCFException, IOException { String seqPrefix = "s"+connectionSequenceNumber+"_"; int i; int k; // "Content Length" tab i = 0; String maxLength = null; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH)) maxLength = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } if (maxLength == null) maxLength = ""; if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.ContentLength")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.MaximumDocumentLength") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" name=\""+seqPrefix+"specmaxlength\" size=\"10\" value=\""+maxLength+"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specmaxlength\" value=\""+maxLength+"\"/>\n" ); } // Check for Paths tab if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Paths")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" ); // Now, loop through paths. There will be a row in the current table for each one. // The row will contain a delete button on the left. On the right will be the startpoint itself at the top, // and underneath it the table where the filter criteria are edited. i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) { String pathDescription = "_"+Integer.toString(k); String pathOpName = seqPrefix+"pathop"+pathDescription; String startPath = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"path_"+Integer.toString(k)+"\">\n"+ " <input type=\"button\" value=\"Delete\" alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeletePath")+Integer.toString(k)+"\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+pathOpName+"\",\"Delete\",\""+seqPrefix+"path_"+Integer.toString(k)+"\")'/>\n"+ " </a>&nbsp;\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specpath"+pathDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH))+"\"/>\n"+ " <input type=\"hidden\" name=\""+pathOpName+"\" value=\"\"/>\n"+ " <nobr>"+((startPath.length() == 0)?"(root)":org.apache.manifoldcf.ui.util.Encoder.bodyEscape(startPath))+"</nobr>\n"+ " </td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"boxcell\">\n"+ " <table class=\"displaytable\">\n" ); // Now go through the include/exclude children of this node, and display one line per node, followed // an "add" line. int j = 0; while (j < sn.getChildCount()) { SpecificationNode excludeNode = sn.getChild(j); String instanceDescription = "_"+Integer.toString(k)+"_"+Integer.toString(j); String instanceOpName = seqPrefix + "specop" + instanceDescription; String nodeFlavor = excludeNode.getType(); String nodeType = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE); if (nodeType == null) nodeType = ""; String filespec = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC); String indexable = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"button\" value=\"Insert\" onClick='Javascript:"+seqPrefix+"SpecInsertSpec(\""+instanceDescription+"\",\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j+1)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.InsertNewMatchForPath")+Integer.toString(k)+" before position #"+Integer.toString(j)+"\"/>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n"+ " <select name=\""+seqPrefix+"specfl_i"+instanceDescription+"\">\n"+ " <option value=\"include\">" + Messages.getBodyString(locale,"SharedDriveConnector.Include") + "</option>\n"+ " <option value=\"exclude\">" + Messages.getBodyString(locale,"SharedDriveConnector.Exclude") + "</option>\n"+ " </select>&nbsp;\n"+ " <select name=\""+seqPrefix+"spectin_i"+instanceDescription+"\">\n"+ " <option value=\"\" selected=\"selected\">" + Messages.getBodyString(locale,"SharedDriveConnector.AnyFileOrDirectory") + "</option>\n"+ " <option value=\"file\">" + Messages.getBodyString(locale,"SharedDriveConnector.files") + "</option>\n"+ " <option value=\"indexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.indexableFiles") + "</option>\n"+ " <option value=\"unindexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.unindexableFiles") + "</option>\n"+ " <option value=\"directory\">" + Messages.getBodyString(locale,"SharedDriveConnector.directorys") + "</option>\n"+ " </select>&nbsp;" + Messages.getBodyString(locale,"SharedDriveConnector.matching") + "&nbsp;\n"+ " <input type=\"text\" size=\"20\" name=\""+seqPrefix+"specfile_i"+instanceDescription+"\" value=\"\"/>\n"+ " </nobr>\n"+ " </td>\n"+ "\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j)+"\">\n"+ " <input type=\"button\" value=\"Delete\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+instanceOpName+"\",\"Delete\",\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeletePath")+Integer.toString(k)+Messages.getAttributeString(locale,"SharedDriveConnector.matchSpec")+Integer.toString(j)+"\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"hidden\" name=\""+instanceOpName+"\" value=\"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specfl"+instanceDescription+"\" value=\""+nodeFlavor+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specty"+instanceDescription+"\" value=\""+nodeType+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specin"+instanceDescription+"\" value=\""+indexable+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specfile"+instanceDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(filespec)+"\"/>\n"+ " "+Integer.toString(j+1)+".&nbsp;"+(nodeFlavor.equals("include")?"Include":"")+""+(nodeFlavor.equals("exclude")?"Exclude":"")+""+(indexable.equals("yes")?"&nbsp;indexable":"")+""+(indexable.equals("no")?"&nbsp;un-indexable":"")+""+(nodeType.equals("file")?"&nbsp;file(s)":"")+""+(nodeType.equals("directory")?"&nbsp;directory(s)":"")+""+(nodeType.equals("")?"&nbsp;file(s)&nbsp;or&nbsp;directory(s)":"")+"&nbsp;matching&nbsp;"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(filespec)+"\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n" ); j++; } if (j == 0) { out.print( " <tr><td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoRulesDefined") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specchildcount"+pathDescription+"\" value=\""+Integer.toString(j)+"\"/>\n"+ " <a name=\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j)+"\">\n"+ " <input type=\"button\" value=\"Add\" onClick='Javascript:"+seqPrefix+"SpecAddSpec(\""+pathDescription+"\",\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j+1)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.AddNewMatchForPath")+Integer.toString(k)+"\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n"+ " <select name=\""+seqPrefix+"specfl"+pathDescription+"\">\n"+ " <option value=\"include\">" + Messages.getBodyString(locale,"SharedDriveConnector.Include") + "</option>\n"+ " <option value=\"exclude\">" + Messages.getBodyString(locale,"SharedDriveConnector.Exclude") + "</option>\n"+ " </select>&nbsp;\n"+ " <select name=\""+seqPrefix+"spectin"+pathDescription+"\">\n"+ " <option value=\"\">" + Messages.getBodyString(locale,"SharedDriveConnector.AnyFileOrDirectory") + "</option>\n"+ " <option value=\"file\">" + Messages.getBodyString(locale,"SharedDriveConnector.files") + "</option>\n"+ " <option value=\"indexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.indexableFiles") + "</option>\n"+ " <option value=\"unindexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.unindexableFiles") + "</option>\n"+ " <option value=\"directory\">" + Messages.getBodyString(locale,"SharedDriveConnector.directorys") + "</option>\n"+ " </select>&nbsp;" + Messages.getBodyString(locale,"SharedDriveConnector.matching") + "&nbsp;\n"+ " <input type=\"text\" size=\"20\" name=\""+seqPrefix+"specfile"+pathDescription+"\" value=\"\"/>\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ " </table>\n"+ " </td>\n"+ " </tr>\n"+ " </table>\n"+ " </td>\n"+ " </tr>\n" ); k++; } } if (k == 0) { out.print( " <tr>\n"+ " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoStartingPointsDefined") + "</td>\n"+ " </tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"value\" colspan=\"2\">\n"+ " <nobr>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"pathcount\" value=\""+Integer.toString(k)+"\"/>\n"+ " <a name=\""+seqPrefix+"path_"+Integer.toString(k)+"\">\n" ); String pathSoFar = (String)currentContext.get(seqPrefix+"specpath"); if (pathSoFar == null) pathSoFar = ""; // Grab next folder/project list try { String[] childList; childList = getChildFolderNames(pathSoFar); if (childList == null) { // Illegal path - set it back pathSoFar = ""; childList = getChildFolderNames(""); if (childList == null) throw new ManifoldCFException("Can't find any children for root folder"); } out.print( " <input type=\"hidden\" name=\""+seqPrefix+"specpath\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(pathSoFar)+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"pathop\" value=\"\"/>\n"+ " <input type=\"button\" value=\"Add\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddPath") + "\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\",\"Add\",\""+seqPrefix+"path_"+Integer.toString(k+1)+"\")'/>\n"+ " &nbsp;"+((pathSoFar.length()==0)?"(root)":org.apache.manifoldcf.ui.util.Encoder.bodyEscape(pathSoFar))+"\n" ); if (pathSoFar.length() > 0) { out.print( " <input type=\"button\" value=\"-\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.RemoveFromPath") + "\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\",\"Up\",\""+seqPrefix+"path_"+Integer.toString(k)+"\")'/>\n" ); } if (childList.length > 0) { out.print( " <nobr>\n"+ " <input type=\"button\" value=\"+\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddPath") + "\" onClick='Javascript:"+seqPrefix+"SpecAddToPath(\""+seqPrefix+"path_"+Integer.toString(k)+"\")'/>&nbsp;\n"+ " <select name=\""+seqPrefix+"pathaddon\">\n"+ " <option value=\"\" selected=\"selected\">" + Messages.getBodyString(locale,"SharedDriveConnector.PickAFolder") + "</option>\n" ); int j = 0; while (j < childList.length) { String folder = org.apache.manifoldcf.ui.util.Encoder.attributeEscape(childList[j]); out.print( " <option value=\""+folder+"\">"+folder+"</option>\n" ); j++; } out.print( " </select>" + Messages.getBodyString(locale,"SharedDriveConnector.orTypeAPath") + " <input type=\"text\" name=\""+seqPrefix+"pathtypein\" size=\"16\" value=\"\"/>\n"+ " </nobr>\n" ); } } catch (ManifoldCFException e) { e.printStackTrace(); out.println(org.apache.manifoldcf.ui.util.Encoder.bodyEscape(e.getMessage())); } out.print( " </a>\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "</table>\n" ); } else { // Generate hiddens for the pathspec tab i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) { String pathDescription = "_"+Integer.toString(k); String startPath = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specpath"+pathDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(startPath)+"\"/>\n" ); // Now go through the include/exclude children of this node. int j = 0; while (j < sn.getChildCount()) { SpecificationNode excludeNode = sn.getChild(j); String instanceDescription = "_"+Integer.toString(k)+"_"+Integer.toString(j); String nodeFlavor = excludeNode.getType(); String nodeType = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE); if (nodeType == null) nodeType = ""; String filespec = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC); String indexable = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfl"+instanceDescription+"\" value=\""+nodeFlavor+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specty"+instanceDescription+"\" value=\""+nodeType+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specin"+instanceDescription+"\" value=\""+indexable+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specfile"+instanceDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(filespec)+"\"/>\n" ); j++; } k++; out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specchildcount"+pathDescription+"\" value=\""+Integer.toString(j)+"\"/>\n" ); } } out.print( "<input type=\"hidden\" name=\""+seqPrefix+"pathcount\" value=\""+Integer.toString(k)+"\"/>\n" ); } // Security tab // Find whether security is on or off i = 0; boolean securityOn = true; boolean shareSecurityOn = true; boolean parentFolderSecurityOn = false; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) securityOn = false; else if (securityValue.equals("on")) securityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) shareSecurityOn = false; else if (securityValue.equals("on")) shareSecurityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) parentFolderSecurityOn = false; else if (securityValue.equals("on")) parentFolderSecurityOn = true; } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Security")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileSecurity") + "</nobr></td>\n"+ " <td colspan=\"3\" class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsecurity\" value=\"on\" "+(securityOn?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Enabled") + "&nbsp;\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsecurity\" value=\"off\" "+((securityOn==false)?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Disabled") + "\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n" ); // Finally, go through forced ACL i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) { String accessDescription = "_"+Integer.toString(k); String accessOpName = seqPrefix+"accessop"+accessDescription; String token = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN); out.print( " <tr>\n"+ " <td class=\"description\" colspan=\"1\">\n"+ " <input type=\"hidden\" name=\""+accessOpName+"\" value=\"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"spectoken"+accessDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(token)+"\"/>\n"+ " <a name=\""+seqPrefix+"token_"+Integer.toString(k)+"\">\n"+ " <input type=\"button\" value=\"Delete\" alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteToken")+Integer.toString(k)+"\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+accessOpName+"\",\"Delete\",\""+seqPrefix+"token_"+Integer.toString(k)+"\")'/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\" colspan=\"3\">\n"+ " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(token)+"</nobr>\n"+ " </td>\n"+ " </tr>\n" ); k++; } } if (k == 0) { out.print( " <tr>\n"+ " <td class=\"message\" colspan=\"4\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileAccessTokensPresent") + "</td>\n"+ " </tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\" colspan=\"1\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"tokencount\" value=\""+Integer.toString(k)+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"accessop\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"token_"+Integer.toString(k)+"\">\n"+ " <input type=\"button\" value=\"Add\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToken") + "\" onClick='Javascript:"+seqPrefix+"SpecAddToken(\""+seqPrefix+"token_"+Integer.toString(k+1)+"\")'/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\" colspan=\"3\">\n"+ " <nobr><input type=\"text\" size=\"30\" name=\""+seqPrefix+"spectoken\" value=\"\"/></nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ShareSecurity") + "</nobr></td>\n"+ " <td colspan=\"3\" class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsharesecurity\" value=\"on\" "+(shareSecurityOn?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Enabled") + "&nbsp;\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsharesecurity\" value=\"off\" "+((shareSecurityOn==false)?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Disabled") + "\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ParentFolderSecurity") + "</nobr></td>\n"+ " <td colspan=\"3\" class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specparentfoldersecurity\" value=\"on\" "+(parentFolderSecurityOn?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Enabled") + "&nbsp;\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specparentfoldersecurity\" value=\"off\" "+((parentFolderSecurityOn==false)?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Disabled") + "\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specsecurity\" value=\""+(securityOn?"on":"off")+"\"/>\n" ); // Finally, go through forced ACL i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) { String accessDescription = "_"+Integer.toString(k); String token = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"spectoken"+accessDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(token)+"\"/>\n" ); k++; } } out.print( "<input type=\"hidden\" name=\""+seqPrefix+"tokencount\" value=\""+Integer.toString(k)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specsharesecurity\" value=\""+(shareSecurityOn?"on":"off")+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specparentfoldersecurity\" value=\""+(parentFolderSecurityOn?"on":"off")+"\"/>\n" ); } // Metadata tab // Find the path-value metadata attribute name // Find the path-value mapping data i = 0; String pathNameAttribute = ""; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap matchMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE)) { pathNameAttribute = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } else if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); matchMap.appendMatchPair(pathMatch,pathReplace); } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Metadata")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specmappingcount\" value=\""+Integer.toString(matchMap.getMatchCount())+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specmappingop\" value=\"\"/>\n"+ "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\" colspan=\"1\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.PathAttributeName") + "</nobr></td>\n"+ " <td class=\"value\" colspan=\"3\">\n"+ " <input type=\"text\" name=\""+seqPrefix+"specpathnameattribute\" size=\"20\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(pathNameAttribute)+"\"/>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n" ); i = 0; while (i < matchMap.getMatchCount()) { String matchString = matchMap.getMatchString(i); String replaceString = matchMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specmappingop_"+Integer.toString(i)+"\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"mapping_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"specmappingop_"+Integer.toString(i)+"\",\"Delete\",\""+seqPrefix+"mapping_"+Integer.toString(i)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteMapping")+Integer.toString(i)+"\" value=\"Delete\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</td>\n"+ " </tr>\n" ); i++; } if (i == 0) { out.print( " <tr><td colspan=\"4\" class=\"message\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoMappingsSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"mapping_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecAddMapping(\""+seqPrefix+"mapping_"+Integer.toString(i+1)+"\")' alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToMappings") + "\" value=\"Add\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.MatchRegexp") + "<input type=\"text\" name=\""+seqPrefix+"specmatch\" size=\"32\" value=\"\"/></nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ReplaceString") + "<input type=\"text\" name=\""+seqPrefix+"specreplace\" size=\"32\" value=\"\"/></nobr></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specpathnameattribute\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(pathNameAttribute)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specmappingcount\" value=\""+Integer.toString(matchMap.getMatchCount())+"\"/>\n" ); i = 0; while (i < matchMap.getMatchCount()) { String matchString = matchMap.getMatchString(i); String replaceString = matchMap.getReplaceString(i); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n" ); i++; } } // File and URL Mapping tabs // Find the filename mapping data // Find the URL mapping data org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap fileMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap uriMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); fileMap.appendMatchPair(pathMatch,pathReplace); } else if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); uriMap.appendMatchPair(pathMatch,pathReplace); } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.FileMapping")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfmapcount\" value=\""+Integer.toString(fileMap.getMatchCount())+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specfmapop\" value=\"\"/>\n"+ "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n" ); i = 0; while (i < fileMap.getMatchCount()) { String matchString = fileMap.getMatchString(i); String replaceString = fileMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specfmapop_"+Integer.toString(i)+"\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"fmap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"specfmapop_"+Integer.toString(i)+"\",\"Delete\",\""+seqPrefix+"fmap_"+Integer.toString(i)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteFileMapping")+Integer.toString(i)+"\" value=\"Delete\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specfmapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specfmapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</td>\n"+ " </tr>\n" ); i++; } if (i == 0) { out.print( " <tr><td colspan=\"4\" class=\"message\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileMappingsSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"fmap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecAddFMap(\""+seqPrefix+"fmap_"+Integer.toString(i+1)+"\")' alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToFileMappings") + "\" value=\"Add\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.MatchRegexp") + "<input type=\"text\" name=\""+seqPrefix+"specfmapmatch\" size=\"32\" value=\"\"/></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.ReplaceString") + "<input type=\"text\" name=\""+seqPrefix+"specfmapreplace\" size=\"32\" value=\"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfmapcount\" value=\""+Integer.toString(fileMap.getMatchCount())+"\"/>\n" ); i = 0; while (i < fileMap.getMatchCount()) { String matchString = fileMap.getMatchString(i); String replaceString = fileMap.getReplaceString(i); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfmapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specfmapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n" ); i++; } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.URLMapping")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specumapcount\" value=\""+Integer.toString(uriMap.getMatchCount())+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specumapop\" value=\"\"/>\n"+ "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n" ); i = 0; while (i < uriMap.getMatchCount()) { String matchString = uriMap.getMatchString(i); String replaceString = uriMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specumapop_"+Integer.toString(i)+"\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"umap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"specumapop_"+Integer.toString(i)+"\",\"Delete\",\""+seqPrefix+"umap_"+Integer.toString(i)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteUrlMapping")+Integer.toString(i)+"\" value=\"Delete\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specumapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ " "+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"\n"+ " </td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specumapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n"+ " "+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"\n"+ " </td>\n"+ " </tr>\n" ); i++; } if (i == 0) { out.print( " <tr><td colspan=\"4\" class=\"message\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoURLMappingsSpecifiedWillProduceAFileIRI") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ " \n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"umap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecAddUMap(\""+seqPrefix+"umap_"+Integer.toString(i+1)+"\")' alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToURLMappings") + "\" value=\"Add\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.MatchRegexp") + "<input type=\"text\" name=\""+seqPrefix+"specumapmatch\" size=\"32\" value=\"\"/></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.ReplaceString") + "<input type=\"text\" name=\""+seqPrefix+"specumapreplace\" size=\"32\" value=\"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specumapcount\" value=\""+Integer.toString(uriMap.getMatchCount())+"\"/>\n" ); i = 0; while (i < uriMap.getMatchCount()) { String matchString = uriMap.getMatchString(i); String replaceString = uriMap.getReplaceString(i); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specumapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specumapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n" ); i++; } } } /** Process a specification post. * This method is called at the start of job's edit or view page, whenever there is a possibility that form * data for a connection has been posted. Its purpose is to gather form information and modify the * document specification accordingly. The name of the posted form is always "editjob". * The connector will be connected before this method can be called. *@param variableContext contains the post data, including binary file-upload information. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@return null if all is well, or a string error message if there is an error that should prevent saving of * the job (and cause a redirection to an error page). */ @Override public String processSpecificationPost(IPostParameters variableContext, Locale locale, Specification ds, int connectionSequenceNumber) throws ManifoldCFException { String seqPrefix = "s"+connectionSequenceNumber+"_"; String x = variableContext.getParameter(seqPrefix+"pathcount"); if (x != null) { // Delete all path specs first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) ds.removeChild(i); else i++; } // Find out how many children were sent int pathCount = Integer.parseInt(x); // Gather up these i = 0; while (i < pathCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"pathop"+pathDescription; x = variableContext.getParameter(pathOpName); if (x != null && x.equals("Delete")) { // Skip to the next i++; continue; } // Path inserts won't happen until the very end String path = variableContext.getParameter(seqPrefix+"specpath"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH,path); // Now, get the number of children String y = variableContext.getParameter(seqPrefix+"specchildcount"+pathDescription); int childCount = Integer.parseInt(y); int j = 0; int w = 0; while (j < childCount) { String instanceDescription = "_"+Integer.toString(i)+"_"+Integer.toString(j); // Look for an insert or a delete at this point String instanceOp = seqPrefix+"specop"+instanceDescription; String z = variableContext.getParameter(instanceOp); String flavor; String type; String indexable; String match; SpecificationNode sn; if (z != null && z.equals("Delete")) { // Process the deletion as we gather j++; continue; } if (z != null && z.equals("Insert Here")) { // Process the insertion as we gather. flavor = variableContext.getParameter(seqPrefix+"specfl_i"+instanceDescription); indexable = ""; type = ""; String xxx = variableContext.getParameter(seqPrefix+"spectin_i"+instanceDescription); if (xxx.equals("file") || xxx.equals("directory")) type = xxx; else if (xxx.equals("indexable-file")) { indexable = "yes"; type = "file"; } else if (xxx.equals("unindexable-file")) { indexable = "no"; type = "file"; } match = variableContext.getParameter(seqPrefix+"specfile_i"+instanceDescription); sn = new SpecificationNode(flavor); if (type != null && type.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,type); if (indexable != null && indexable.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,indexable); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,match); node.addChild(w++,sn); } flavor = variableContext.getParameter(seqPrefix+"specfl"+instanceDescription); type = variableContext.getParameter(seqPrefix+"specty"+instanceDescription); match = variableContext.getParameter(seqPrefix+"specfile"+instanceDescription); indexable = variableContext.getParameter(seqPrefix+"specin"+instanceDescription); sn = new SpecificationNode(flavor); if (type != null && type.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,type); if (indexable != null && indexable.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,indexable); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,match); node.addChild(w++,sn); j++; } if (x != null && x.equals("Add")) { // Process adds to the end of the rules in-line String match = variableContext.getParameter(seqPrefix+"specfile"+pathDescription); String indexable = ""; String type = ""; String xxx = variableContext.getParameter(seqPrefix+"spectin"+pathDescription); if (xxx.equals("file") || xxx.equals("directory")) type = xxx; else if (xxx.equals("indexable-file")) { indexable = "yes"; type = "file"; } else if (xxx.equals("unindexable-file")) { indexable = "no"; type = "file"; } String flavor = variableContext.getParameter(seqPrefix+"specfl"+pathDescription); SpecificationNode sn = new SpecificationNode(flavor); if (type != null && type.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,type); if (indexable != null && indexable.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,indexable); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,match); node.addChild(w,sn); } ds.addChild(ds.getChildCount(),node); i++; } // See if there's a global add operation String op = variableContext.getParameter(seqPrefix+"pathop"); if (op != null && op.equals("Add")) { String path = variableContext.getParameter(seqPrefix+"specpath"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH,path); ds.addChild(ds.getChildCount(),node); // Now add in the defaults; these will be "include all directories" and "include all indexable files". SpecificationNode sn = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_INCLUDE); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,"file"); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,"yes"); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,"*"); node.addChild(node.getChildCount(),sn); sn = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_INCLUDE); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,"directory"); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,"*"); node.addChild(node.getChildCount(),sn); } else if (op != null && op.equals("Up")) { // Strip off end String path = variableContext.getParameter(seqPrefix+"specpath"); int k = path.lastIndexOf("/"); if (k == -1) path = ""; else path = path.substring(0,k); currentContext.save(seqPrefix+"specpath",path); } else if (op != null && op.equals("AddToPath")) { String path = variableContext.getParameter(seqPrefix+"specpath"); String addon = variableContext.getParameter(seqPrefix+"pathaddon"); String typein = variableContext.getParameter(seqPrefix+"pathtypein"); if (addon != null && addon.length() > 0) { if (path.length() == 0) path = addon; else path += "/" + addon; } else if (typein != null && typein.length() > 0) { String trialPath = path; if (trialPath.length() == 0) trialPath = typein; else trialPath += "/" + typein; // Validate trial path try { trialPath = validateFolderName(trialPath); if (trialPath != null) path = trialPath; } catch (ManifoldCFException e) { // Effectively, this just means we can't add a typein to the path right now. } } currentContext.save(seqPrefix+"specpath",path); } } x = variableContext.getParameter(seqPrefix+"specmaxlength"); if (x != null) { // Delete max length entry int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH)) ds.removeChild(i); else i++; } if (x.length() > 0) { SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } } x = variableContext.getParameter(seqPrefix+"specsecurity"); if (x != null) { // Delete all security entries first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY)) ds.removeChild(i); else i++; } SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } x = variableContext.getParameter(seqPrefix+"tokencount"); if (x != null) { // Delete all file specs first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) ds.removeChild(i); else i++; } int accessCount = Integer.parseInt(x); i = 0; while (i < accessCount) { String accessDescription = "_"+Integer.toString(i); String accessOpName = seqPrefix+"accessop"+accessDescription; x = variableContext.getParameter(accessOpName); if (x != null && x.equals("Delete")) { // Next row i++; continue; } // Get the stuff we need String accessSpec = variableContext.getParameter(seqPrefix+"spectoken"+accessDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN,accessSpec); ds.addChild(ds.getChildCount(),node); i++; } String op = variableContext.getParameter(seqPrefix+"accessop"); if (op != null && op.equals("Add")) { String accessspec = variableContext.getParameter(seqPrefix+"spectoken"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN,accessspec); ds.addChild(ds.getChildCount(),node); } } x = variableContext.getParameter(seqPrefix+"specsharesecurity"); if (x != null) { // Delete all security entries first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY)) ds.removeChild(i); else i++; } SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } x = variableContext.getParameter(seqPrefix+"specparentfoldersecurity"); if (x != null) { // Delete all security entries first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY)) ds.removeChild(i); else i++; } SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } String xc = variableContext.getParameter(seqPrefix+"specpathnameattribute"); if (xc != null) { // Delete old one int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE)) ds.removeChild(i); else i++; } if (xc.length() > 0) { SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,xc); ds.addChild(ds.getChildCount(),node); } } xc = variableContext.getParameter(seqPrefix+"specmappingcount"); if (xc != null) { // Delete old spec int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP)) ds.removeChild(i); else i++; } // Now, go through the data and assemble a new list. int mappingCount = Integer.parseInt(xc); // Gather up these i = 0; while (i < mappingCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"specmappingop"+pathDescription; xc = variableContext.getParameter(pathOpName); if (xc != null && xc.equals("Delete")) { // Skip to the next i++; continue; } // Inserts won't happen until the very end String match = variableContext.getParameter(seqPrefix+"specmatch"+pathDescription); String replace = variableContext.getParameter(seqPrefix+"specreplace"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); i++; } // Check for add xc = variableContext.getParameter(seqPrefix+"specmappingop"); if (xc != null && xc.equals("Add")) { String match = variableContext.getParameter(seqPrefix+"specmatch"); String replace = variableContext.getParameter(seqPrefix+"specreplace"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); } } xc = variableContext.getParameter(seqPrefix+"specfmapcount"); if (xc != null) { // Delete old spec int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP)) ds.removeChild(i); else i++; } // Now, go through the data and assemble a new list. int mappingCount = Integer.parseInt(xc); // Gather up these i = 0; while (i < mappingCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"specfmapop"+pathDescription; xc = variableContext.getParameter(pathOpName); if (xc != null && xc.equals("Delete")) { // Skip to the next i++; continue; } // Inserts won't happen until the very end String match = variableContext.getParameter(seqPrefix+"specfmapmatch"+pathDescription); String replace = variableContext.getParameter(seqPrefix+"specfmapreplace"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); i++; } // Check for add xc = variableContext.getParameter(seqPrefix+"specfmapop"); if (xc != null && xc.equals("Add")) { String match = variableContext.getParameter(seqPrefix+"specfmapmatch"); String replace = variableContext.getParameter(seqPrefix+"specfmapreplace"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); } } xc = variableContext.getParameter(seqPrefix+"specumapcount"); if (xc != null) { // Delete old spec int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP)) ds.removeChild(i); else i++; } // Now, go through the data and assemble a new list. int mappingCount = Integer.parseInt(xc); // Gather up these i = 0; while (i < mappingCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"specumapop"+pathDescription; xc = variableContext.getParameter(pathOpName); if (xc != null && xc.equals("Delete")) { // Skip to the next i++; continue; } // Inserts won't happen until the very end String match = variableContext.getParameter(seqPrefix+"specumapmatch"+pathDescription); String replace = variableContext.getParameter(seqPrefix+"specumapreplace"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); i++; } // Check for add xc = variableContext.getParameter(seqPrefix+"specumapop"); if (xc != null && xc.equals("Add")) { String match = variableContext.getParameter(seqPrefix+"specumapmatch"); String replace = variableContext.getParameter(seqPrefix+"specumapreplace"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); } } return null; } /** View specification. * This method is called in the body section of a job's view page. Its purpose is to present the document * specification information to the user. The coder can presume that the HTML that is output from * this configuration will be within appropriate &lt;html&gt; and &lt;body&gt;tags. * The connector will be connected before this method can be called. *@param out is the output to which any HTML should be sent. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. */ @Override public void viewSpecification(IHTTPOutput out, Locale locale, Specification ds, int connectionSequenceNumber) throws ManifoldCFException, IOException { out.print( "<table class=\"displaytable\">\n" ); int i = 0; boolean seenAny = false; while (i < ds.getChildCount()) { SpecificationNode spn = ds.getChild(i++); if (spn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) { if (seenAny == false) { seenAny = true; } out.print( " <tr>\n"+ " <td class=\"description\">\n"+ " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(spn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH))+":"+"</nobr>\n"+ " </td>\n"+ " <td class=\"value\">\n" ); int j = 0; while (j < spn.getChildCount()) { SpecificationNode sn = spn.getChild(j++); // This is "include" or "exclude" String nodeFlavor = sn.getType(); // This is the file/directory name match String filespec = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC); // This has a value of null, "", "file", or "directory". String nodeType = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE); if (nodeType == null) nodeType = ""; // This has a value of null, "", "yes", or "no". String ingestableFlag = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE); if (ingestableFlag == null) ingestableFlag = ""; out.print( " <nobr>\n"+ " "+Integer.toString(j)+".\n"+ " "+(nodeFlavor.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_INCLUDE)?"Include":"")+"\n"+ " "+(nodeFlavor.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_EXCLUDE)?"Exclude":"")+"\n"+ " "+(ingestableFlag.equals("yes")?"&nbsp;indexable":"")+"\n"+ " "+(ingestableFlag.equals("no")?"&nbsp;un-indexable":"")+"\n"+ " "+(nodeType.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.VALUE_FILE)?"&nbsp;file(s)":"")+"\n"+ " "+(nodeType.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.VALUE_DIRECTORY)?"&nbsp;directory(s)":"")+"\n"+ " "+(nodeType.equals("")?"&nbsp;file(s)&nbsp;or&nbsp;directory(s)":"")+"&nbsp;matching&nbsp;\n"+ " "+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(filespec)+"\n"+ " </nobr>\n"+ " <br/>\n" ); } out.print( " </td>\n"+ " </tr>\n" ); } } if (seenAny == false) { out.print( " <tr><td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoDocumentsSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n" ); // Find whether security is on or off i = 0; boolean securityOn = true; boolean shareSecurityOn = true; boolean parentFolderSecurityOn = false; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) securityOn = false; else if (securityValue.equals("on")) securityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) shareSecurityOn = false; else if (securityValue.equals("on")) shareSecurityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) parentFolderSecurityOn = false; else if (securityValue.equals("on")) parentFolderSecurityOn = true; } } out.print( "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+(securityOn?Messages.getBodyString(locale,"SharedDriveConnector.Enabled"):Messages.getBodyString(locale,"SharedDriveConnector.Disabled"))+"</nobr></td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" ); // Go through looking for access tokens seenAny = false; i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) { if (seenAny == false) { out.print( " <tr><td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileAccessTokens") + "</nobr></td>\n"+ " <td class=\"value\">\n" ); seenAny = true; } String token = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN); out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(token)+"</nobr><br/>\n" ); } } if (seenAny) { out.print( " </td>\n"+ " </tr>\n" ); } else { out.print( " <tr><td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileAccessTokensSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " \n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ShareSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+(shareSecurityOn?Messages.getBodyString(locale,"SharedDriveConnector.Enabled"):Messages.getBodyString(locale,"SharedDriveConnector.Disabled"))+"</nobr></td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " \n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ParentFolderSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+(parentFolderSecurityOn?Messages.getBodyString(locale,"SharedDriveConnector.Enabled"):Messages.getBodyString(locale,"SharedDriveConnector.Disabled"))+"</nobr></td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" ); // Find the path-name metadata attribute name i = 0; String pathNameAttribute = ""; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE)) { pathNameAttribute = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } } out.print( " <tr>\n" ); if (pathNameAttribute.length() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.PathNameMetadataAttribute") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(pathNameAttribute)+"</nobr></td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoPathNameMetadataAttributeSpecified") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ "\n" ); // Find the path-value mapping data i = 0; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap matchMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); matchMap.appendMatchPair(pathMatch,pathReplace); } } if (matchMap.getMatchCount() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.PathValueMapping") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n" ); i = 0; while (i < matchMap.getMatchCount()) { String matchString = matchMap.getMatchString(i); String replaceString = matchMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</nobr></td>\n"+ " </tr>\n" ); i++; } out.print( " </table>\n"+ " </td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoMappingsSpecified") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n" ); // Find the file name mapping data i = 0; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap fileMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); fileMap.appendMatchPair(pathMatch,pathReplace); } } if (fileMap.getMatchCount() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileNameMapping") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n" ); i = 0; while (i < fileMap.getMatchCount()) { String matchString = fileMap.getMatchString(i); String replaceString = fileMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</nobr></td>\n"+ " </tr>\n" ); i++; } out.print( " </table>\n"+ " </td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileNameMappingsSpecified") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n" ); // Find the url mapping data i = 0; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap uriMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); uriMap.appendMatchPair(pathMatch,pathReplace); } } if (uriMap.getMatchCount() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.URLMappingColon") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n" ); i = 0; while (i < uriMap.getMatchCount()) { String matchString = uriMap.getMatchString(i); String replaceString = uriMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</nobr></td>\n"+ " </tr>\n" ); i++; } out.print( " </table>\n"+ " </td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoURLMappingsSpecifiedWillProduceAFileIRI") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.MaximumDocumentLength") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n" ); // Find the path-value mapping data i = 0; String maxLength = null; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH)) { maxLength = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } } if (maxLength == null || maxLength.length() == 0) maxLength = "Unlimited"; out.print( " "+maxLength+"\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "</table>\n" ); } /* The following are additional methods used by the UI */ /** * given a server uri, return all shares * * @param serverURI - * @return an array of SmbFile */ public SmbFile[] getShareNames(String serverURI) throws ManifoldCFException { getSession(); SmbFile server = null; try { server = new SmbFile(serverURI,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { throw new ManifoldCFException("MalformedURLException tossed",e1); } SmbFile[] shares = null; try { // check to make sure it's a server if (getFileType(server)==SmbFile.TYPE_SERVER) { shares = fileListFiles(server,new ShareFilter()); } } catch (SmbException e) { throw new ManifoldCFException("SmbException tossed: "+e.getMessage(),e); } return shares; } /** * Given a folder path, determine if the folder is in fact legal and accessible (and is a folder). * @param folder is the relative folder from the network root * @return the canonical folder name if valid, or null if not. * @throws ManifoldCFException */ public String validateFolderName(String folder) throws ManifoldCFException { getSession(); //create new connection by appending to the old connection String smburi = smbconnectionPath; String uri = smburi; if (folder.length() > 0) { uri = smburi + folder + "/"; } SmbFile currentDirectory = null; try { currentDirectory = new SmbFile(uri,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { throw new ManifoldCFException("validateFolderName: Can't get parent file: " + uri,e1); } try { currentDirectory.connect(); if (fileIsDirectory(currentDirectory) == false) return null; String newCanonicalPath = currentDirectory.getCanonicalPath(); String rval = newCanonicalPath.substring(smburi.length()); if (rval.endsWith("/")) rval = rval.substring(0,rval.length()-1); return rval; } catch (SmbException se) { try { processSMBException(se, folder, "checking folder", "getting canonical path"); return null; } catch (ServiceInterruption si) { throw new ManifoldCFException("Service interruption: "+si.getMessage(),si); } } catch (MalformedURLException e) { throw new ManifoldCFException("MalformedURLException tossed: "+e.getMessage(),e); } catch (java.net.SocketTimeoutException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } } /** * given a smb uri, return all children directories * * @param folder is the relative folder from the network root * @return array of child folder names * @throws ManifoldCFException */ public String[] getChildFolderNames(String folder) throws ManifoldCFException { getSession(); //create new connection by appending to the old connection String smburi = smbconnectionPath; String uri = smburi; if (folder.length() > 0) { uri = smburi + folder + "/"; } SmbFile currentDirectory = null; try { currentDirectory = new SmbFile(uri,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { throw new ManifoldCFException("getChildFolderNames: Can't get parent file: " + uri,e1); } // add DFS support SmbFile[] children = null; try { currentDirectory.connect(); children = currentDirectory.listFiles(new DirectoryFilter()); } catch (SmbException se) { try { processSMBException(se, folder, "getting child folder names", "listing files"); children = new SmbFile[0]; } catch (ServiceInterruption si) { throw new ManifoldCFException("Service interruption: "+si.getMessage(),si); } } catch (MalformedURLException e) { throw new ManifoldCFException("MalformedURLException tossed: "+e.getMessage(),e); } catch (java.net.SocketTimeoutException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } // populate a String array String[] directories = new String[children.length]; for (int i=0;i < children.length;i++){ String directoryName = children[i].getName(); // strip the trailing slash directoryName = directoryName.replaceAll("/",""); directories[i] = directoryName; } java.util.Arrays.sort(directories); return directories; } /** * inner class which returns only shares. used by listfiles(SmbFileFilter) * * @author James Maupin */ class ShareFilter implements SmbFileFilter { /* (non-Javadoc) * @see jcifs.smb.SmbFileFilter#accept(jcifs.smb.SmbFile) */ public boolean accept(SmbFile arg0) throws SmbException { if (getFileType(arg0)==SmbFile.TYPE_SHARE){ return true; } else { return false; } } } /** * inner class which returns only directories. used by listfiles(SmbFileFilter) * * @author James Maupin */ class DirectoryFilter implements SmbFileFilter { /* (non-Javadoc) * @see jcifs.smb.SmbFileFilter#accept(jcifs.smb.SmbFile) */ public boolean accept(SmbFile arg0) throws SmbException { int type = getFileType(arg0); if (type==SmbFile.TYPE_SHARE || (type==SmbFile.TYPE_FILESYSTEM && fileIsDirectory(arg0))){ return true; } else { return false; } } } /** This is the filter class that actually receives the files in batches. We do it this way * so that the client won't run out of memory loading a huge directory. */ protected class ProcessDocumentsFilter implements SmbFileFilter { /** This is the activities object, where matching references will be logged */ protected final IProcessActivity activities; /** Document specification */ protected final Specification spec; /** Exceptions that we saw. These are saved here so that they can be rethrown when done */ protected ManifoldCFException lcfException = null; protected ServiceInterruption serviceInterruption = null; /** Constructor */ public ProcessDocumentsFilter(IProcessActivity activities, Specification spec) { this.activities = activities; this.spec = spec; } /** Decide if we accept the file. This is where we will actually do the work. */ public boolean accept(SmbFile f) throws SmbException { if (lcfException != null || serviceInterruption != null) return false; try { int type = f.getType(); if (type != SmbFile.TYPE_SERVER && type != SmbFile.TYPE_FILESYSTEM && type != SmbFile.TYPE_SHARE) return false; String canonicalPath = getFileCanonicalPath(f); if (canonicalPath != null) { // manipulate path to include the DFS alias, not the literal path // String newPath = matchPrefix + canonicalPath.substring(matchReplace.length()); String newPath = canonicalPath; // Check against the current specification. This is a nicety to avoid queuing // documents that we will immediately turn around and remove. However, if this // check was not here, everything should still function, provided the getDocumentVersions() // method does the right thing. boolean fileIsDirectory = fileIsDirectory(f); if (checkInclude(fileIsDirectory, newPath, spec)) { if (fileIsDirectory) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path is '" + newPath + "' and is included."); activities.addDocumentReference(newPath); } else { long fileLength = fileLength(f); if (checkIncludeFile(fileLength, newPath, spec, activities)) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path is '" + newPath + "' and is included."); activities.addDocumentReference(newPath); } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path '"+newPath+"' is excluded!"); } } } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path '"+newPath+"' is excluded!"); } } else Logging.connectors.debug("JCIFS: Excluding a child file because canonical path is null"); return false; } catch (ManifoldCFException e) { if (lcfException == null) lcfException = e; return false; } catch (ServiceInterruption e) { if (serviceInterruption == null) serviceInterruption = e; return false; } } /** Check for exception, and throw if there is one */ public void checkAndThrow() throws ServiceInterruption, ManifoldCFException { if (lcfException != null) throw lcfException; if (serviceInterruption != null) throw serviceInterruption; } } }
apache/manifoldcf
connectors/jcifs/connector/src/main/java/org/apache/manifoldcf/crawler/connectors/sharedrive/SharedDriveConnector.java
Java
apache-2.0
211,260
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import unicode_literals from six.moves.urllib import parse as urlparse from keystoneclient import base from keystoneclient.v3.contrib.oauth1 import utils try: from oauthlib import oauth1 except ImportError: oauth1 = None class RequestToken(base.Resource): def authorize(self, roles): try: retval = self.manager.authorize(self.id, roles) self = retval except Exception: retval = None return retval class RequestTokenManager(base.CrudManager): """Manager class for manipulating identity OAuth request tokens.""" resource_class = RequestToken def authorize(self, request_token, roles): """Authorize a request token with specific roles. Utilize Identity API operation: PUT /OS-OAUTH1/authorize/$request_token_id :param request_token: a request token that will be authorized, and can be exchanged for an access token. :param roles: a list of roles, that will be delegated to the user. """ request_id = urlparse.quote(base.getid(request_token)) endpoint = utils.OAUTH_PATH + '/authorize/%s' % (request_id) body = {'roles': [{'id': base.getid(r_id)} for r_id in roles]} return self._put(endpoint, body, "token") def create(self, consumer_key, consumer_secret, project): endpoint = utils.OAUTH_PATH + '/request_token' headers = {'requested_project_id': base.getid(project)} oauth_client = oauth1.Client(consumer_key, client_secret=consumer_secret, signature_method=oauth1.SIGNATURE_HMAC, callback_uri="oob") url = self.client.auth_url.rstrip("/") + endpoint url, headers, body = oauth_client.sign(url, http_method='POST', headers=headers) resp, body = self.client.post(endpoint, headers=headers) token = utils.get_oauth_token_from_body(resp.content) return self.resource_class(self, token)
alexpilotti/python-keystoneclient
keystoneclient/v3/contrib/oauth1/request_tokens.py
Python
apache-2.0
2,659
public class Tetrahedron extends ThreeDimensionalShapes { public Tetrahedron ( double s) { super ( s ); } @Override public double areaOf3DShapes ( ){ return Math.sqrt(3) * getSide() * getSide(); } @Override public double volumeOf3DShapes(){ return (Math.pow(getSide(),3))/ ( 6 * Math.sqrt(2) ); } @Override public String toString ( ) { return super.toString () + "=> Tetrahedron"; } }
Kinza59/educate-for-oop
java/book-dietel/kinza/10_9_h_Tetrahedron.java
Java
apache-2.0
453
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.examples.interactive.form; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.PDAppearanceContentStream; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.PDPageContentStream; import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.apache.pdfbox.pdmodel.font.PDType1Font; import org.apache.pdfbox.pdmodel.font.Standard14Fonts.FontName; import org.apache.pdfbox.pdmodel.graphics.color.PDColor; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceRGB; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotationWidget; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceCharacteristicsDictionary; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceDictionary; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceEntry; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceStream; import org.apache.pdfbox.pdmodel.interactive.annotation.PDBorderStyleDictionary; import org.apache.pdfbox.pdmodel.interactive.form.PDAcroForm; import org.apache.pdfbox.pdmodel.interactive.form.PDRadioButton; /** * Example to create radio buttons. * * @author Tilman Hausherr */ public class CreateRadioButtons { private CreateRadioButtons() { } public static void main(String[] args) throws IOException { try (PDDocument document = new PDDocument()) { PDPage page = new PDPage(PDRectangle.A4); document.addPage(page); PDAcroForm acroForm = new PDAcroForm(document); // if you want to see what Adobe does, activate this, open with Adobe // save the file, and then open it with PDFDebugger //acroForm.setNeedAppearances(true) document.getDocumentCatalog().setAcroForm(acroForm); List<String> options = Arrays.asList("a", "b", "c"); PDRadioButton radioButton = new PDRadioButton(acroForm); radioButton.setPartialName("MyRadioButton"); radioButton.setExportValues(options); PDAppearanceCharacteristicsDictionary appearanceCharacteristics = new PDAppearanceCharacteristicsDictionary(new COSDictionary()); appearanceCharacteristics.setBorderColour(new PDColor(new float[] { 1, 0, 0 }, PDDeviceRGB.INSTANCE)); appearanceCharacteristics.setBackground(new PDColor(new float[]{0, 1, 0.3f}, PDDeviceRGB.INSTANCE)); // no caption => round // with caption => see checkbox example List<PDAnnotationWidget> widgets = new ArrayList<>(); for (int i = 0; i < options.size(); i++) { PDAnnotationWidget widget = new PDAnnotationWidget(); widget.setRectangle(new PDRectangle(30, PDRectangle.A4.getHeight() - 40 - i * 35, 30, 30)); widget.setAppearanceCharacteristics(appearanceCharacteristics); PDBorderStyleDictionary borderStyleDictionary = new PDBorderStyleDictionary(); borderStyleDictionary.setWidth(2); borderStyleDictionary.setStyle(PDBorderStyleDictionary.STYLE_SOLID); widget.setBorderStyle(borderStyleDictionary); widget.setPage(page); COSDictionary apNDict = new COSDictionary(); apNDict.setItem(COSName.Off, createAppearanceStream(document, widget, false)); apNDict.setItem(options.get(i), createAppearanceStream(document, widget, true)); PDAppearanceDictionary appearance = new PDAppearanceDictionary(); PDAppearanceEntry appearanceNEntry = new PDAppearanceEntry(apNDict); appearance.setNormalAppearance(appearanceNEntry); widget.setAppearance(appearance); widget.setAppearanceState("Off"); // don't forget this, or button will be invisible widgets.add(widget); page.getAnnotations().add(widget); } radioButton.setWidgets(widgets); acroForm.getFields().add(radioButton); // Set the texts PDType1Font helvetica = new PDType1Font(FontName.HELVETICA); try (PDPageContentStream contents = new PDPageContentStream(document, page)) { for (int i = 0; i < options.size(); i++) { contents.beginText(); contents.setFont(helvetica, 15); contents.newLineAtOffset(70, PDRectangle.A4.getHeight() - 30 - i * 35); contents.showText(options.get(i)); contents.endText(); } } radioButton.setValue("c"); document.save("target/RadioButtonsSample.pdf"); } } private static PDAppearanceStream createAppearanceStream( final PDDocument document, PDAnnotationWidget widget, boolean on) throws IOException { PDRectangle rect = widget.getRectangle(); PDAppearanceStream onAP = new PDAppearanceStream(document); onAP.setBBox(new PDRectangle(rect.getWidth(), rect.getHeight())); try (PDAppearanceContentStream onAPCS = new PDAppearanceContentStream(onAP)) { PDAppearanceCharacteristicsDictionary appearanceCharacteristics = widget.getAppearanceCharacteristics(); PDColor backgroundColor = appearanceCharacteristics.getBackground(); PDColor borderColor = appearanceCharacteristics.getBorderColour(); float lineWidth = getLineWidth(widget); onAPCS.setBorderLine(lineWidth, widget.getBorderStyle(), widget.getBorder()); onAPCS.setNonStrokingColor(backgroundColor); float radius = Math.min(rect.getWidth() / 2, rect.getHeight() / 2); drawCircle(onAPCS, rect.getWidth() / 2, rect.getHeight() / 2, radius); onAPCS.fill(); onAPCS.setStrokingColor(borderColor); drawCircle(onAPCS, rect.getWidth() / 2, rect.getHeight() / 2, radius - lineWidth / 2); onAPCS.stroke(); if (on) { onAPCS.setNonStrokingColor(0f); drawCircle(onAPCS, rect.getWidth() / 2, rect.getHeight() / 2, (radius - lineWidth) / 2); onAPCS.fill(); } } return onAP; } static float getLineWidth(PDAnnotationWidget widget) { PDBorderStyleDictionary bs = widget.getBorderStyle(); if (bs != null) { return bs.getWidth(); } return 1; } static void drawCircle(PDAppearanceContentStream cs, float x, float y, float r) throws IOException { // http://stackoverflow.com/a/2007782/535646 float magic = r * 0.551784f; cs.moveTo(x, y + r); cs.curveTo(x + magic, y + r, x + r, y + magic, x + r, y); cs.curveTo(x + r, y - magic, x + magic, y - r, x, y - r); cs.curveTo(x - magic, y - r, x - r, y - magic, x - r, y); cs.curveTo(x - r, y + magic, x - magic, y + r, x, y + r); cs.closePath(); } }
apache/pdfbox
examples/src/main/java/org/apache/pdfbox/examples/interactive/form/CreateRadioButtons.java
Java
apache-2.0
8,207
/* * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.auto.value.processor; import static com.google.common.truth.Truth.assertThat; import static com.google.testing.compile.CompilationSubject.assertThat; import static com.google.testing.compile.CompilationSubject.compilations; import static com.google.testing.compile.Compiler.javac; import static java.util.stream.Collectors.joining; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.truth.Expect; import com.google.testing.compile.Compilation; import com.google.testing.compile.JavaFileObjects; import java.io.IOException; import java.io.PrintWriter; import java.io.UncheckedIOException; import java.io.Writer; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.Arrays; import java.util.Set; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.RoundEnvironment; import javax.annotation.processing.SupportedAnnotationTypes; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.TypeElement; import javax.lang.model.element.TypeParameterElement; import javax.lang.model.util.ElementFilter; import javax.tools.JavaFileObject; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** @author emcmanus@google.com (Éamonn McManus) */ @RunWith(JUnit4.class) public class AutoValueCompilationTest { @Rule public final Expect expect = Expect.create(); @Test public void simpleSuccess() { // Positive test case that ensures we generate the expected code for at least one case. // Most AutoValue code-generation tests are functional, meaning that we check that the generated // code does the right thing rather than checking what it looks like, but this test checks that // we are not generating correct but weird code. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " public abstract long buh();", "", " public static Baz create(long buh) {", " return new AutoValue_Baz(buh);", " }", "}"); JavaFileObject expectedOutput = JavaFileObjects.forSourceLines( "foo.bar.AutoValue_Baz", "package foo.bar;", "", GeneratedImport.importGeneratedAnnotationType(), "", "@Generated(\"" + AutoValueProcessor.class.getName() + "\")", "final class AutoValue_Baz extends Baz {", " private final long buh;", "", " AutoValue_Baz(long buh) {", " this.buh = buh;", " }", "", " @Override public long buh() {", " return buh;", " }", "", " @Override public String toString() {", " return \"Baz{\"", " + \"buh=\" + buh", " + \"}\";", " }", "", " @Override public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Baz) {", " Baz that = (Baz) o;", " return this.buh == that.buh();", " }", " return false;", " }", "", " @Override public int hashCode() {", " int h$ = 1;", " h$ *= 1000003;", " h$ ^= (int) ((buh >>> 32) ^ buh);", " return h$;", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-A" + Nullables.NULLABLE_OPTION + "=") .compile(javaFileObject); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Baz") .hasSourceEquivalentTo(expectedOutput); } @Test public void importTwoWays() { // Test that referring to the same class in two different ways does not confuse the import logic // into thinking it is two different classes and that therefore it can't import. The code here // is nonsensical but successfully reproduces a real problem, which is that a TypeMirror that is // extracted using Elements.getTypeElement(name).asType() does not compare equal to one that is // extracted from ExecutableElement.getReturnType(), even though Types.isSameType considers them // equal. So unless we are careful, the java.util.Arrays that we import explicitly to use its // methods will appear different from the java.util.Arrays that is the return type of the // arrays() method here. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "import java.util.Arrays;", "", "@AutoValue", "public abstract class Baz {", " @SuppressWarnings(\"mutable\")", " public abstract int[] ints();", " public abstract Arrays arrays();", "", " public static Baz create(int[] ints, Arrays arrays) {", " return new AutoValue_Baz(ints, arrays);", " }", "}"); JavaFileObject expectedOutput = JavaFileObjects.forSourceLines( "foo.bar.AutoValue_Baz", "package foo.bar;", "", "import java.util.Arrays;", GeneratedImport.importGeneratedAnnotationType(), "", "@Generated(\"" + AutoValueProcessor.class.getName() + "\")", "final class AutoValue_Baz extends Baz {", " private final int[] ints;", " private final Arrays arrays;", "", " AutoValue_Baz(int[] ints, Arrays arrays) {", " if (ints == null) {", " throw new NullPointerException(\"Null ints\");", " }", " this.ints = ints;", " if (arrays == null) {", " throw new NullPointerException(\"Null arrays\");", " }", " this.arrays = arrays;", " }", "", " @SuppressWarnings(\"mutable\")", " @Override public int[] ints() {", " return ints;", " }", "", " @Override public Arrays arrays() {", " return arrays;", " }", "", " @Override public String toString() {", " return \"Baz{\"", " + \"ints=\" + Arrays.toString(ints) + \", \"", " + \"arrays=\" + arrays", " + \"}\";", " }", "", " @Override public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Baz) {", " Baz that = (Baz) o;", " return Arrays.equals(this.ints, (that instanceof AutoValue_Baz) " + "? ((AutoValue_Baz) that).ints : that.ints())", " && this.arrays.equals(that.arrays());", " }", " return false;", " }", "", " @Override public int hashCode() {", " int h$ = 1;", " h$ *= 1000003;", " h$ ^= Arrays.hashCode(ints);", " h$ *= 1000003;", " h$ ^= arrays.hashCode();", " return h$;", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-A" + Nullables.NULLABLE_OPTION + "=") .compile(javaFileObject); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Baz") .hasSourceEquivalentTo(expectedOutput); } @Test public void testNoWarningsFromGenerics() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "import com.google.auto.value.AutoValue;", "@AutoValue", "public abstract class Baz<T extends Number, U extends T> {", " public abstract T t();", " public abstract U u();", " public static <T extends Number, U extends T> Baz<T, U> create(T t, U u) {", " return new AutoValue_Baz<T, U>(t, u);", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(javaFileObject); assertThat(compilation).succeededWithoutWarnings(); } @Test public void testNestedParameterizedTypesWithTypeAnnotations() { JavaFileObject annotFileObject = JavaFileObjects.forSourceLines( "foo.bar.Annot", "package foo.bar;", "", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "@Target(ElementType.TYPE_USE)", "public @interface Annot {", " int value();", "}"); JavaFileObject outerFileObject = JavaFileObjects.forSourceLines( "foo.baz.OuterWithTypeParam", "package foo.baz;", "", "public class OuterWithTypeParam<T extends Number> {", " public class InnerWithTypeParam<U> {}", "}"); JavaFileObject nestyFileObject = JavaFileObjects.forSourceLines( "com.example.Nesty", "package com.example;", "", "import com.google.auto.value.AutoValue;", "import foo.bar.Annot;", "import foo.baz.OuterWithTypeParam;", "", "@AutoValue", "abstract class Nesty {", " abstract @Annot(1) OuterWithTypeParam<@Annot(2) Double>", " .@Annot(3) InnerWithTypeParam<@Annot(4) String> inner();", "", " static Nesty of(", " @Annot(1) OuterWithTypeParam<@Annot(2) Double>", " .@Annot(3) InnerWithTypeParam<@Annot(4) String> inner) {", " return new AutoValue_Nesty(inner);", " }", "}"); JavaFileObject expectedOutput = JavaFileObjects.forSourceLines( "com.example.AutoValue_Nesty", "package com.example;", "", "import foo.bar.Annot;", "import foo.baz.OuterWithTypeParam;", GeneratedImport.importGeneratedAnnotationType(), "", "@Generated(\"com.google.auto.value.processor.AutoValueProcessor\")", "final class AutoValue_Nesty extends Nesty {", " private final @Annot(1) OuterWithTypeParam<@Annot(2) Double>" + ".@Annot(3) InnerWithTypeParam<@Annot(4) String> inner;", "", " AutoValue_Nesty(", " @Annot(1) OuterWithTypeParam<@Annot(2) Double>" + ".@Annot(3) InnerWithTypeParam<@Annot(4) String> inner) {", " if (inner == null) {", " throw new NullPointerException(\"Null inner\");", " }", " this.inner = inner;", " }", "", " @Override", " @Annot(1) OuterWithTypeParam<@Annot(2) Double>" + ".@Annot(3) InnerWithTypeParam<@Annot(4) String> inner() {", " return inner;", " }", "", " @Override", " public String toString() {", " return \"Nesty{\"", " + \"inner=\" + inner", " + \"}\";", " }", "", " @Override", " public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Nesty) {", " Nesty that = (Nesty) o;", " return this.inner.equals(that.inner());", " }", " return false;", " }", "", " @Override", " public int hashCode() {", " int h$ = 1;", " h$ *= 1000003;", " h$ ^= inner.hashCode();", " return h$;", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions( "-Xlint:-processing", "-implicit:none", "-A" + Nullables.NULLABLE_OPTION + "=") .compile(annotFileObject, outerFileObject, nestyFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("com.example.AutoValue_Nesty") .hasSourceEquivalentTo(expectedOutput); } // Tests that type annotations are correctly copied from the bounds of type parameters in the // @AutoValue class to the bounds of the corresponding parameters in the generated class. For // example, if we have `@AutoValue abstract class Foo<T extends @NullableType Object>`, then the // generated class should be `class AutoValue_Foo<T extends @NullableType Object> extends Foo<T>`. // Some buggy versions of javac do not report type annotations correctly in this context. // AutoValue can't copy them if it can't see them, so we make a special annotation processor to // detect if we are in the presence of this bug and if so we don't fail. @Test public void testTypeParametersWithAnnotationsOnBounds() { @SupportedAnnotationTypes("*") class CompilerBugProcessor extends AbstractProcessor { boolean checkedAnnotationsOnTypeBounds; boolean reportsAnnotationsOnTypeBounds; @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latestSupported(); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { TypeElement test = processingEnv.getElementUtils().getTypeElement("com.example.Test"); TypeParameterElement t = test.getTypeParameters().get(0); this.checkedAnnotationsOnTypeBounds = true; this.reportsAnnotationsOnTypeBounds = !t.getBounds().get(0).getAnnotationMirrors().isEmpty(); } return false; } } CompilerBugProcessor compilerBugProcessor = new CompilerBugProcessor(); JavaFileObject nullableTypeFileObject = JavaFileObjects.forSourceLines( "foo.bar.NullableType", "package foo.bar;", "", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "@Target(ElementType.TYPE_USE)", "public @interface NullableType {}"); JavaFileObject autoValueFileObject = JavaFileObjects.forSourceLines( "com.example.Test", "package com.example;", "", "import com.google.auto.value.AutoValue;", "import foo.bar.NullableType;", "", "@AutoValue", "abstract class Test<T extends @NullableType Object & @NullableType Cloneable> {}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), compilerBugProcessor) .withOptions("-Xlint:-processing", "-implicit:none") .compile(nullableTypeFileObject, autoValueFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilerBugProcessor.checkedAnnotationsOnTypeBounds).isTrue(); if (compilerBugProcessor.reportsAnnotationsOnTypeBounds) { assertThat(compilation) .generatedSourceFile("com.example.AutoValue_Test") .contentsAsUtf8String() .contains( "class AutoValue_Test<T extends @NullableType Object & @NullableType Cloneable>" + " extends Test<T> {"); } } // In the following few tests, see AutoValueProcessor.validateMethods for why unrecognized // abstract methods provoke only a warning rather than an error. Compilation will fail anyway // because the generated class is not abstract and does not implement the unrecognized methods. @Test public void testAbstractVoid() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "import com.google.auto.value.AutoValue;", "@AutoValue", "public abstract class Baz {", " public abstract void foo();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation).failed(); assertThat(compilation) .hadWarningContaining( "Abstract method is neither a property getter nor a Builder converter") .inFile(javaFileObject) .onLineContaining("void foo()"); } @Test public void testAbstractWithParams() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "import com.google.auto.value.AutoValue;", "@AutoValue", "public abstract class Baz {", " public abstract int foo(int bar);", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation).failed(); assertThat(compilation) .hadWarningContaining( "Abstract method is neither a property getter nor a Builder converter") .inFile(javaFileObject) .onLineContaining("int foo(int bar)"); } @Test public void testPrimitiveArrayWarning() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "import com.google.auto.value.AutoValue;", "@AutoValue", "public abstract class Baz {", " public abstract byte[] bytes();", " public static Baz create(byte[] bytes) {", " return new AutoValue_Baz(bytes);", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation).succeeded(); assertThat(compilation) .hadWarningContaining( "An @AutoValue property that is a primitive array returns the original array") .inFile(javaFileObject) .onLineContaining("byte[] bytes()"); } @Test public void testPrimitiveArrayWarningFromParent() { // If the array-valued property is defined by an ancestor then we shouldn't try to attach // the warning to the method that defined it, but rather to the @AutoValue class itself. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "import com.google.auto.value.AutoValue;", "public abstract class Baz {", " public abstract byte[] bytes();", "", " @AutoValue", " public abstract static class BazChild extends Baz {", " public static BazChild create(byte[] bytes) {", " return new AutoValue_Baz_BazChild(bytes);", " }", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation).succeeded(); assertThat(compilation) .hadWarningContainingMatch( "An @AutoValue property that is a primitive array returns the original array" + ".*foo\\.bar\\.Baz\\.bytes") .inFile(javaFileObject) .onLineContaining("BazChild extends Baz"); } @Test public void testPrimitiveArrayWarningSuppressed() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "import com.google.auto.value.AutoValue;", "@AutoValue", "public abstract class Baz {", " @SuppressWarnings(\"mutable\")", " public abstract byte[] bytes();", " public static Baz create(byte[] bytes) {", " return new AutoValue_Baz(bytes);", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(javaFileObject); assertThat(compilation).succeededWithoutWarnings(); } @Test public void autoValueMustBeClass() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public interface Baz {", " String buh();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue only applies to classes") .inFile(javaFileObject) .onLineContaining("interface Baz"); } @Test public void autoValueMustNotBeFinal() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public final class Baz {", " public Baz create() {", " return new AutoValue_Baz();", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue class must not be final") .inFile(javaFileObject) .onLineContaining("class Baz"); } @Test public void autoValueMustBeStatic() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "public class Baz {", " @AutoValue", " public abstract class NotStatic {", " public abstract String buh();", " public NotStatic create(String buh) {", " return new AutoValue_Baz_NotStatic(buh);", " }", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Nested @AutoValue class must be static") .inFile(javaFileObject) .onLineContaining("abstract class NotStatic"); } @Test public void autoValueMustNotBePrivate() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "public class Baz {", " @AutoValue", " private abstract static class Private {", " public abstract String buh();", " public Private create(String buh) {", " return new AutoValue_Baz_Private(buh);", " }", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue class must not be private") .inFile(javaFileObject) .onLineContaining("class Private"); } @Test public void autoValueMustBeNotBeNestedInPrivate() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "public class Baz {", " private static class Private {", " @AutoValue", " abstract static class Nested {", " public abstract String buh();", " public Nested create(String buh) {", " return new AutoValue_Baz_Private_Nested(buh);", " }", " }", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue class must not be nested in a private class") .inFile(javaFileObject) .onLineContaining("class Nested"); } @Test public void autoValueMustHaveNoArgConstructor() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " Baz(int buh) {}", "", " public abstract int buh();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue class must have a non-private no-arg constructor") .inFile(javaFileObject) .onLineContaining("class Baz"); } @Test public void autoValueMustHaveVisibleNoArgConstructor() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " private Baz() {}", "", " public abstract int buh();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue class must have a non-private no-arg constructor") .inFile(javaFileObject) .onLineContaining("class Baz"); } @Test public void noMultidimensionalPrimitiveArrays() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " public abstract int[][] ints();", "", " public static Baz create(int[][] ints) {", " return new AutoValue_Baz(ints);", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "@AutoValue class cannot define an array-valued property " + "unless it is a primitive array") .inFile(javaFileObject) .onLineContaining("int[][] ints()"); } @Test public void noObjectArrays() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " public abstract String[] strings();", "", " public static Baz create(String[] strings) {", " return new AutoValue_Baz(strings);", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "@AutoValue class cannot define an array-valued property " + "unless it is a primitive array") .inFile(javaFileObject) .onLineContaining("String[] strings()"); } @Test public void annotationOnInterface() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public interface Baz {}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("AutoValue only applies to classes") .inFile(javaFileObject) .onLineContaining("interface Baz"); } @Test public void annotationOnEnum() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public enum Baz {}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("AutoValue only applies to classes") .inFile(javaFileObject) .onLineContaining("enum Baz"); } @Test public void extendAutoValue() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Outer", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "public class Outer {", " @AutoValue", " static abstract class Parent {", " static Parent create(int randomProperty) {", " return new AutoValue_Outer_Parent(randomProperty);", " }", "", " abstract int randomProperty();", " }", "", " @AutoValue", " static abstract class Child extends Parent {", " static Child create(int randomProperty) {", " return new AutoValue_Outer_Child(randomProperty);", " }", "", " abstract int randomProperty();", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("may not extend") .inFile(javaFileObject) .onLineContaining("Child extends Parent"); } @Test public void bogusSerialVersionUID() { String[] mistakes = { "final long serialVersionUID = 1234L", // not static "static long serialVersionUID = 1234L", // not final "static final Long serialVersionUID = 1234L", // not long "static final long serialVersionUID = (Long) 1234L", // not a compile-time constant }; for (String mistake : mistakes) { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz implements java.io.Serializable {", " " + mistake + ";", "", " public abstract int foo();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); expect .about(compilations()) .that(compilation) .hadErrorContaining("serialVersionUID must be a static final long compile-time constant") .inFile(javaFileObject) .onLineContaining(mistake); } } @Test public void nonExistentSuperclass() { // The main purpose of this test is to check that AutoValueProcessor doesn't crash the // compiler in this case. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Existent extends NonExistent {", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("NonExistent") .inFile(javaFileObject) .onLineContaining("NonExistent"); } @Test public void cannotImplementAnnotation() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.RetentionImpl", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import java.lang.annotation.Retention;", "import java.lang.annotation.RetentionPolicy;", "", "@AutoValue", "public abstract class RetentionImpl implements Retention {", " public static Retention create(RetentionPolicy policy) {", " return new AutoValue_RetentionImpl(policy);", " }", "", " @Override public Class<? extends Retention> annotationType() {", " return Retention.class;", " }", "", " @Override public boolean equals(Object o) {", " return (o instanceof Retention && value().equals((Retention) o).value());", " }", "", " @Override public int hashCode() {", " return (\"value\".hashCode() * 127) ^ value().hashCode();", " }", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("may not be used to implement an annotation interface") .inFile(javaFileObject) .onLineContaining("RetentionImpl implements Retention"); } @Test public void missingPropertyType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " public abstract MissingType missingType();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("MissingType") .inFile(javaFileObject) .onLineContaining("MissingType"); } @Test public void missingGenericPropertyType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " public abstract MissingType<?> missingType();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("MissingType") .inFile(javaFileObject) .onLineContaining("MissingType"); } @Test public void missingComplexGenericPropertyType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "import java.util.Map;", "import java.util.Set;", "", "@AutoValue", "public abstract class Baz {", " public abstract Map<Set<?>, MissingType<?>> missingType();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("MissingType") .inFile(javaFileObject) .onLineContaining("MissingType"); } @Test public void missingSuperclassGenericParameter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<T extends MissingType<?>> {", " public abstract int foo();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("MissingType") .inFile(javaFileObject) .onLineContaining("MissingType"); } @Test public void nullablePrimitive() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " @interface Nullable {}", " public abstract @Nullable int foo();", "}"); Compilation compilation = javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Primitive types cannot be @Nullable") .inFile(javaFileObject) .onLineContaining("@Nullable int"); } @Test public void correctBuilder() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.base.Optional;", "import com.google.common.collect.ImmutableMap;", "", "import java.util.ArrayList;", "import java.util.List;", "import java.util.Map;", "import javax.annotation.Nullable;", "", "@AutoValue", "public abstract class Baz<T extends Number> {", " public abstract int anInt();", " @SuppressWarnings(\"mutable\")", " public abstract byte[] aByteArray();", " @SuppressWarnings(\"mutable\")", " @Nullable public abstract int[] aNullableIntArray();", " public abstract List<T> aList();", " public abstract ImmutableMap<T, String> anImmutableMap();", " public abstract Optional<String> anOptionalString();", " public abstract NestedAutoValue<T> aNestedAutoValue();", "", " public abstract Builder<T> toBuilder();", "", " @AutoValue.Builder", " public abstract static class Builder<T extends Number> {", " public abstract Builder<T> anInt(int x);", " public abstract Builder<T> aByteArray(byte[] x);", " public abstract Builder<T> aNullableIntArray(@Nullable int[] x);", " public abstract Builder<T> aList(List<T> x);", " public abstract Builder<T> anImmutableMap(Map<T, String> x);", " public abstract ImmutableMap.Builder<T, String> anImmutableMapBuilder();", " public abstract Builder<T> anOptionalString(Optional<String> s);", " public abstract Builder<T> anOptionalString(String s);", " public abstract NestedAutoValue.Builder<T> aNestedAutoValueBuilder();", "", " public Builder<T> aList(ArrayList<T> x) {", // ArrayList should not be imported in the generated class. " return aList((List<T>) x);", " }", "", " public abstract Optional<Integer> anInt();", " public abstract List<T> aList();", " public abstract ImmutableMap<T, String> anImmutableMap();", "", " public abstract Baz<T> build();", " }", "", " public static <T extends Number> Builder<T> builder() {", " return AutoValue_Baz.builder();", " }", "}"); JavaFileObject nestedJavaFileObject = JavaFileObjects.forSourceLines( "foo.bar.NestedAutoValue", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class NestedAutoValue<T extends Number> {", " public abstract T t();", "", " public abstract Builder<T> toBuilder();", "", " @AutoValue.Builder", " public abstract static class Builder<T extends Number> {", " public abstract Builder<T> t(T t);", " public abstract NestedAutoValue<T> build();", " }", "", " public static <T extends Number> Builder<T> builder() {", " return AutoValue_NestedAutoValue.builder();", " }", "}"); JavaFileObject expectedOutput = JavaFileObjects.forSourceLines( "foo.bar.AutoValue_Baz", "package foo.bar;", "", "import com.google.common.base.Optional;", "import com.google.common.collect.ImmutableMap;", "import java.util.Arrays;", "import java.util.List;", "import java.util.Map;", sorted( GeneratedImport.importGeneratedAnnotationType(), "import javax.annotation.Nullable;"), "", "@Generated(\"" + AutoValueProcessor.class.getName() + "\")", "final class AutoValue_Baz<T extends Number> extends Baz<T> {", " private final int anInt;", " private final byte[] aByteArray;", " private final int[] aNullableIntArray;", " private final List<T> aList;", " private final ImmutableMap<T, String> anImmutableMap;", " private final Optional<String> anOptionalString;", " private final NestedAutoValue<T> aNestedAutoValue;", "", " private AutoValue_Baz(", " int anInt,", " byte[] aByteArray,", " @Nullable int[] aNullableIntArray,", " List<T> aList,", " ImmutableMap<T, String> anImmutableMap,", " Optional<String> anOptionalString,", " NestedAutoValue<T> aNestedAutoValue) {", " this.anInt = anInt;", " this.aByteArray = aByteArray;", " this.aNullableIntArray = aNullableIntArray;", " this.aList = aList;", " this.anImmutableMap = anImmutableMap;", " this.anOptionalString = anOptionalString;", " this.aNestedAutoValue = aNestedAutoValue;", " }", "", " @Override public int anInt() {", " return anInt;", " }", "", " @SuppressWarnings(\"mutable\")", " @Override public byte[] aByteArray() {", " return aByteArray;", " }", "", " @SuppressWarnings(\"mutable\")", " @Nullable", " @Override public int[] aNullableIntArray() {", " return aNullableIntArray;", " }", "", " @Override public List<T> aList() {", " return aList;", " }", "", " @Override public ImmutableMap<T, String> anImmutableMap() {", " return anImmutableMap;", " }", "", " @Override public Optional<String> anOptionalString() {", " return anOptionalString;", " }", "", " @Override public NestedAutoValue<T> aNestedAutoValue() {", " return aNestedAutoValue;", " }", "", " @Override public String toString() {", " return \"Baz{\"", " + \"anInt=\" + anInt + \", \"", " + \"aByteArray=\" + Arrays.toString(aByteArray) + \", \"", " + \"aNullableIntArray=\" + Arrays.toString(aNullableIntArray) + \", \"", " + \"aList=\" + aList + \", \"", " + \"anImmutableMap=\" + anImmutableMap + \", \"", " + \"anOptionalString=\" + anOptionalString + \", \"", " + \"aNestedAutoValue=\" + aNestedAutoValue", " + \"}\";", " }", "", " @Override public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Baz) {", " Baz<?> that = (Baz<?>) o;", " return this.anInt == that.anInt()", " && Arrays.equals(this.aByteArray, " + "(that instanceof AutoValue_Baz) " + "? ((AutoValue_Baz<?>) that).aByteArray : that.aByteArray())", " && Arrays.equals(this.aNullableIntArray, " + "(that instanceof AutoValue_Baz) " + "? ((AutoValue_Baz<?>) that).aNullableIntArray : that.aNullableIntArray())", " && this.aList.equals(that.aList())", " && this.anImmutableMap.equals(that.anImmutableMap())", " && this.anOptionalString.equals(that.anOptionalString())", " && this.aNestedAutoValue.equals(that.aNestedAutoValue());", " }", " return false;", " }", "", " @Override public int hashCode() {", " int h$ = 1;", " h$ *= 1000003;", " h$ ^= anInt;", " h$ *= 1000003;", " h$ ^= Arrays.hashCode(aByteArray);", " h$ *= 1000003;", " h$ ^= Arrays.hashCode(aNullableIntArray);", " h$ *= 1000003;", " h$ ^= aList.hashCode();", " h$ *= 1000003;", " h$ ^= anImmutableMap.hashCode();", " h$ *= 1000003;", " h$ ^= anOptionalString.hashCode();", " h$ *= 1000003;", " h$ ^= aNestedAutoValue.hashCode();", " return h$;", " }", "", " @Override public Baz.Builder<T> toBuilder() {", " return new Builder<T>(this);", " }", "", " static final class Builder<T extends Number> extends Baz.Builder<T> {", " private int anInt;", " private byte[] aByteArray;", " private int[] aNullableIntArray;", " private List<T> aList;", " private ImmutableMap.Builder<T, String> anImmutableMapBuilder$;", " private ImmutableMap<T, String> anImmutableMap;", " private Optional<String> anOptionalString = Optional.absent();", " private NestedAutoValue.Builder<T> aNestedAutoValueBuilder$;", " private NestedAutoValue<T> aNestedAutoValue;", " private byte set$0;", "", " Builder() {", " }", "", " private Builder(Baz<T> source) {", " this.anInt = source.anInt();", " this.aByteArray = source.aByteArray();", " this.aNullableIntArray = source.aNullableIntArray();", " this.aList = source.aList();", " this.anImmutableMap = source.anImmutableMap();", " this.anOptionalString = source.anOptionalString();", " this.aNestedAutoValue = source.aNestedAutoValue();", " set$0 = (byte) 0x1;", " }", "", " @Override", " public Baz.Builder<T> anInt(int anInt) {", " this.anInt = anInt;", " set$0 |= 0x1", " return this;", " }", "", " @Override", " public Optional<Integer> anInt() {", " if ((set$0 & 0x1) == 0) {", " return Optional.absent();", " }", " return Optional.of(anInt);", " }", "", " @Override", " public Baz.Builder<T> aByteArray(byte[] aByteArray) {", " if (aByteArray == null) {", " throw new NullPointerException(\"Null aByteArray\");", " }", " this.aByteArray = aByteArray;", " return this;", " }", "", " @Override", " public Baz.Builder<T> aNullableIntArray(@Nullable int[] aNullableIntArray) {", " this.aNullableIntArray = aNullableIntArray;", " return this;", " }", "", " @Override", " public Baz.Builder<T> aList(List<T> aList) {", " if (aList == null) {", " throw new NullPointerException(\"Null aList\");", " }", " this.aList = aList;", " return this;", " }", "", " @Override", " public List<T> aList() {", " if (this.aList == null) {", " throw new IllegalStateException(\"Property \\\"aList\\\" has not been set\");", " }", " return aList;", " }", "", " @Override", " public Baz.Builder<T> anImmutableMap(Map<T, String> anImmutableMap) {", " if (anImmutableMapBuilder$ != null) {", " throw new IllegalStateException(" + "\"Cannot set anImmutableMap after calling anImmutableMapBuilder()\");", " }", " this.anImmutableMap = ImmutableMap.copyOf(anImmutableMap);", " return this;", " }", "", " @Override", " public ImmutableMap.Builder<T, String> anImmutableMapBuilder() {", " if (anImmutableMapBuilder$ == null) {", " if (anImmutableMap == null) {", " anImmutableMapBuilder$ = ImmutableMap.builder();", " } else {", " anImmutableMapBuilder$ = ImmutableMap.builder();", " anImmutableMapBuilder$.putAll(anImmutableMap);", " anImmutableMap = null;", " }", " }", " return anImmutableMapBuilder$;", " }", "", " @Override", " public ImmutableMap<T, String> anImmutableMap() {", " if (anImmutableMapBuilder$ != null) {", " return anImmutableMapBuilder$.buildOrThrow();", " }", " if (anImmutableMap == null) {", " anImmutableMap = ImmutableMap.of();", " }", " return anImmutableMap;", " }", "", " @Override", " public Baz.Builder<T> anOptionalString(Optional<String> anOptionalString) {", " if (anOptionalString == null) {", " throw new NullPointerException(\"Null anOptionalString\");", " }", " this.anOptionalString = anOptionalString;", " return this;", " }", "", " @Override", " public Baz.Builder<T> anOptionalString(String anOptionalString) {", " this.anOptionalString = Optional.of(anOptionalString);", " return this;", " }", "", " @Override", " public NestedAutoValue.Builder<T> aNestedAutoValueBuilder() {", " if (aNestedAutoValueBuilder$ == null) {", " if (aNestedAutoValue == null) {", " aNestedAutoValueBuilder$ = NestedAutoValue.builder();", " } else {", " aNestedAutoValueBuilder$ = aNestedAutoValue.toBuilder();", " aNestedAutoValue = null;", " }", " }", " return aNestedAutoValueBuilder$;", " }", "", " @Override", " public Baz<T> build() {", " if (anImmutableMapBuilder$ != null) {", " this.anImmutableMap = anImmutableMapBuilder$.buildOrThrow();", " } else if (this.anImmutableMap == null) {", " this.anImmutableMap = ImmutableMap.of();", " }", " if (aNestedAutoValueBuilder$ != null) {", " this.aNestedAutoValue = aNestedAutoValueBuilder$.build();", " } else if (this.aNestedAutoValue == null) {", " NestedAutoValue.Builder<T> aNestedAutoValue$builder = " + "NestedAutoValue.builder();", " this.aNestedAutoValue = aNestedAutoValue$builder.build();", " }", " if (set$0 != 0x1", " || this.aByteArray == null", " || this.aList == null) {", " StringBuilder missing = new StringBuilder();", " if ((set$0 & 0x1) == 0) {", " missing.append(\" anInt\");", " }", " if (this.aByteArray == null) {", " missing.append(\" aByteArray\");", " }", " if (this.aList == null) {", " missing.append(\" aList\");", " }", " throw new IllegalStateException(\"Missing required properties:\" + missing);", " }", " return new AutoValue_Baz<T>(", " this.anInt,", " this.aByteArray,", " this.aNullableIntArray,", " this.aList,", " this.anImmutableMap,", " this.anOptionalString,", " this.aNestedAutoValue);", " }", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions( "-Xlint:-processing", "-implicit:none", "-A" + Nullables.NULLABLE_OPTION + "=") .compile(javaFileObject, nestedJavaFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Baz") .hasSourceEquivalentTo(expectedOutput); } @Test public void autoValueBuilderOnTopLevelClass() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Builder", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue.Builder", "public interface Builder {", " Builder foo(int x);", " Object build();", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("can only be applied to a class or interface inside") .inFile(javaFileObject) .onLineContaining("public interface Builder"); } @Test public void autoValueBuilderNotInsideAutoValue() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "public abstract class Baz {", " abstract int foo();", "", " static Builder builder() {", " return new AutoValue_Baz.Builder();", " }", "", " @AutoValue.Builder", " public interface Builder {", " Builder foo(int x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("can only be applied to a class or interface inside") .inFile(javaFileObject) .onLineContaining("public interface Builder"); } @Test public void autoValueBuilderNotStatic() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Example", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "class Example {", " @AutoValue", " abstract static class Baz {", " abstract int foo();", "", " static Builder builder() {", " return new AutoValue_Example_Baz.Builder();", " }", "", " @AutoValue.Builder", " abstract class Builder {", " abstract Builder foo(int x);", " abstract Baz build();", " }", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue.Builder cannot be applied to a non-static class") .inFile(javaFileObject) .onLineContaining("abstract class Builder"); } @Test public void autoValueBuilderMustHaveNoArgConstructor() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Example", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "class Example {", " @AutoValue", " abstract static class Baz {", " abstract int foo();", "", " static Builder builder() {", " return new AutoValue_Example_Baz.Builder();", " }", "", " @AutoValue.Builder", " abstract static class Builder {", " Builder(int defaultFoo) {}", " abstract Builder foo(int x);", " abstract Baz build();", " }", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("@AutoValue.Builder class must have a non-private no-arg constructor") .inFile(javaFileObject) .onLineContaining("class Builder"); } @Test public void autoValueBuilderOnEnum() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int foo();", "", " static Builder builder() {", " return null;", " }", "", " @AutoValue.Builder", " public enum Builder {}", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("can only apply to a class or an interface") .inFile(javaFileObject) .onLineContaining("public enum Builder"); } @Test public void autoValueBuilderDuplicate() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " @AutoValue.Builder", " public interface Builder1 {", " Baz build();", " }", "", " @AutoValue.Builder", " public interface Builder2 {", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("already has a Builder: foo.bar.Baz.Builder1") .inFile(javaFileObject) .onLineContaining("public interface Builder2"); } @Test public void autoValueBuilderMissingSetter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("with this signature: foo.bar.Baz.Builder blim(int)") .inFile(javaFileObject) .onLineContaining("public interface Builder"); } @Test public void autoValueBuilderMissingSetterUsingSetPrefix() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder setBlam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("with this signature: foo.bar.Baz.Builder setBlim(int)") .inFile(javaFileObject) .onLineContaining("public interface Builder"); } @Test public void autoValueBuilderWrongTypeSetter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blim(String x);", " Builder blam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Parameter type java.lang.String of setter method should be int " + "to match property method foo.bar.Baz.blim()") .inFile(javaFileObject) .onLineContaining("Builder blim(String x)"); } @Test public void autoValueBuilderWrongTypeSetterWithCopyOf() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableList;", "", "@AutoValue", "public abstract class Baz {", " abstract String blim();", " abstract ImmutableList<String> blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blim(String x);", " Builder blam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Parameter type java.lang.String of setter method should be" + " com.google.common.collect.ImmutableList<java.lang.String> to match property" + " method foo.bar.Baz.blam(), or it should be a type that can be passed to" + " ImmutableList.copyOf") .inFile(javaFileObject) .onLineContaining("Builder blam(String x)"); } @Test public void autoValueBuilderWrongTypeSetterWithCopyOfGenericallyWrong() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableList;", "import java.util.Collection;", "", "@AutoValue", "public abstract class Baz {", " abstract String blim();", " abstract ImmutableList<String> blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blim(String x);", " Builder blam(Collection<Integer> x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Parameter type java.util.Collection<java.lang.Integer> of setter method should be" + " com.google.common.collect.ImmutableList<java.lang.String> to match property" + " method foo.bar.Baz.blam(), or it should be a type that can be passed to" + " ImmutableList.copyOf to produce" + " com.google.common.collect.ImmutableList<java.lang.String>") .inFile(javaFileObject) .onLineContaining("Builder blam(Collection<Integer> x)"); } @Test public void autoValueBuilderWrongTypeSetterWithGetPrefix() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int getBlim();", " abstract String getBlam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blim(String x);", " Builder blam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Parameter type java.lang.String of setter method should be int " + "to match property method foo.bar.Baz.getBlim()") .inFile(javaFileObject) .onLineContaining("Builder blim(String x)"); } @Test public void autoValueBuilderNullableSetterForNonNullable() { JavaFileObject nullableFileObject = JavaFileObjects.forSourceLines( "foo.bar.Nullable", "package foo.bar;", "", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "@Target(ElementType.TYPE_USE)", "public @interface Nullable {}"); JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract String notNull();", "", " @AutoValue.Builder", " public interface Builder {", " Builder setNotNull(@Nullable String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject, nullableFileObject); assertThat(compilation) .hadErrorContaining( "Parameter of setter method is @Nullable but property method" + " foo.bar.Baz.notNull() is not") .inFile(javaFileObject) .onLineContaining("setNotNull"); } // Check that we get a helpful error message if some of your properties look like getters but // others don't. @Test public void autoValueBuilderBeansConfusion() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Item", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Item {", " abstract String getTitle();", " abstract boolean hasThumbnail();", "", " @AutoValue.Builder", " public interface Builder {", " Builder setTitle(String title);", " Builder setHasThumbnail(boolean t);", " Item build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method setTitle does not correspond to a property method of foo.bar.Item") .inFile(javaFileObject) .onLineContaining("Builder setTitle(String title)"); assertThat(compilation) .hadNoteContaining("hasThumbnail") .inFile(javaFileObject) .onLineContaining("Builder setTitle(String title)"); } @Test public void autoValueBuilderExtraSetter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blim(int x);", " Builder blam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Method blim does not correspond to a property method of foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("Builder blim(int x)"); } @Test public void autoValueBuilderSetPrefixAndNoSetPrefix() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blim(int x);", " Builder setBlam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("If any setter methods use the setFoo convention then all must") .inFile(javaFileObject) .onLineContaining("Builder blim(int x)"); } @Test public void autoValueBuilderSetterReturnType() { // We do allow the return type of a setter to be a supertype of the builder type, to support // step builders. But we don't allow it to be Object. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int blim();", "", " @AutoValue.Builder", " public interface Builder {", " Object blim(int x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Setter methods must return foo.bar.Baz.Builder") .inFile(javaFileObject) .onLineContaining("Object blim(int x)"); } @Test public void autoValueBuilderWrongTypeGetter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract T blim();", " abstract U blam();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " Builder<T, U> blim(T x);", " Builder<T, U> blam(U x);", " T blim();", " T blam();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContainingMatch( "Method matches a property of foo\\.bar\\.Baz<T, ?U> but has return type T instead of" + " U") .inFile(javaFileObject) .onLineContaining("T blam()"); // The <T, ?U> is because we're depending on TypeMirror.toString(), and the JDK actually spells // this as <T,U> with no space. While it's not completely sound to expect a given string from // TypeMirror.toString(), in practice it's hard to imagine that it would be anything other // than "foo.bar.Baz<T,U>" or "foo.bar.Baz<T, U>" given the specification. } @Test public void autoValueBuilderPropertyBuilderInvalidType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract String blim();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " StringBuilder blimBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method looks like a property builder, but it returns java.lang.StringBuilder which " + "does not have a non-static build() or buildOrThrow() method") .inFile(javaFileObject) .onLineContaining("StringBuilder blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderNullable() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableList;", "", "@AutoValue", "public abstract class Baz<T, U> {", " @interface Nullable {}", " abstract @Nullable ImmutableList<String> strings();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " ImmutableList.Builder<String> stringsBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Property strings is @Nullable so it cannot have a property builder") .inFile(javaFileObject) .onLineContaining("stringsBuilder()"); } @Test public void autoValueBuilderPropertyBuilderNullableType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableList;", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "@AutoValue", "public abstract class Baz<T, U> {", " @Target(ElementType.TYPE_USE)", " @interface Nullable {}", " abstract @Nullable ImmutableList<String> strings();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " ImmutableList.Builder<String> stringsBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Property strings is @Nullable so it cannot have a property builder") .inFile(javaFileObject) .onLineContaining("stringsBuilder()"); } @Test public void autoValueBuilderPropertyBuilderWrongCollectionType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableList;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract ImmutableList<T> blim();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " ImmutableSet.Builder<T> blimBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Property builder for blim has type com.google.common.collect.ImmutableSet.Builder " + "whose build() method returns com.google.common.collect.ImmutableSet<T> " + "instead of com.google.common.collect.ImmutableList<T>") .inFile(javaFileObject) .onLineContaining("ImmutableSet.Builder<T> blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderWeirdBuilderType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract Integer blim();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " int blimBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method looks like a property builder, but its return type is not a class or interface") .inFile(javaFileObject) .onLineContaining("int blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderWeirdBuiltType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract int blim();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " Integer blimBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method looks like a property builder, but the type of property blim is not a class " + "or interface") .inFile(javaFileObject) .onLineContaining("Integer blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderHasNoBuild() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract String blim();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " StringBuilder blimBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method looks like a property builder, but it returns java.lang.StringBuilder which " + "does not have a non-static build() or buildOrThrow() method") .inFile(javaFileObject) .onLineContaining("StringBuilder blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderHasStaticBuild() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract String blim();", "", " public static class StringFactory {", " public static String build() {", " return null;", " }", " }", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " StringFactory blimBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method looks like a property builder, but it returns foo.bar.Baz.StringFactory which " + "does not have a non-static build() or buildOrThrow() method") .inFile(javaFileObject) .onLineContaining("StringFactory blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderReturnsWrongType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "import java.util.List;", "", "@AutoValue", "public abstract class Baz<E> {", " abstract List<E> blim();", "", " public static class ListFactory<E> {", " public List<? extends E> build() {", " return null;", " }", " }", "", " @AutoValue.Builder", " public interface Builder<E> {", " ListFactory<E> blimBuilder();", " Baz<E> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Property builder for blim has type foo.bar.Baz.ListFactory whose build() method " + "returns java.util.List<? extends E> instead of java.util.List<E>") .inFile(javaFileObject) .onLineContaining("ListFactory<E> blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderCantConstruct() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<E> {", " abstract String blim();", "", " public static class StringFactory {", " private StringFactory() {}", "", " public String build() {", " return null;", " }", " }", "", " @AutoValue.Builder", " public interface Builder<E> {", " StringFactory blimBuilder();", " Baz<E> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method looks like a property builder, but its type foo.bar.Baz.StringFactory " + "does not have a public constructor and java.lang.String does not have a static " + "builder() or newBuilder() method that returns foo.bar.Baz.StringFactory") .inFile(javaFileObject) .onLineContaining("StringFactory blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderCantReconstruct() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<E> {", " abstract String blim();", " abstract Builder<E> toBuilder();", "", " public static class StringFactory {", " public String build() {", " return null;", " }", " }", "", " @AutoValue.Builder", " public interface Builder<E> {", " StringFactory blimBuilder();", " Baz<E> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Property builder method returns foo.bar.Baz.StringFactory but there is no way to make" + " that type from java.lang.String: java.lang.String does not have a non-static" + " toBuilder() method that returns foo.bar.Baz.StringFactory, and" + " foo.bar.Baz.StringFactory does not have a method addAll or putAll that accepts" + " an argument of type java.lang.String") .inFile(javaFileObject) .onLineContaining("StringFactory blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderWrongTypeAddAll() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "import java.util.Iterator;", "", "@AutoValue", "public abstract class Baz<T> {", " abstract ImmutableSet<String> strings();", " abstract Builder<T> toBuilder();", "", " public static class ImmutableSetBuilder<E> {", " public void addAll(Iterator<? extends E> elements) {}", "", " public ImmutableSet<E> build() {", " return null;", " }", " }", "", " @AutoValue.Builder", " public interface Builder<T> {", " ImmutableSetBuilder<String> stringsBuilder();", " Baz<T> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Property builder method returns foo.bar.Baz.ImmutableSetBuilder<java.lang.String> but" + " there is no way to make that type from" + " com.google.common.collect.ImmutableSet<java.lang.String>:" + " com.google.common.collect.ImmutableSet<java.lang.String> does not have a" + " non-static toBuilder() method that returns" + " foo.bar.Baz.ImmutableSetBuilder<java.lang.String>, and" + " foo.bar.Baz.ImmutableSetBuilder<java.lang.String> does not have a method" + " addAll or putAll that accepts an argument of type" + " com.google.common.collect.ImmutableSet<java.lang.String>") .inFile(javaFileObject) .onLineContaining("ImmutableSetBuilder<String> stringsBuilder();"); } @Test public void autoValueBuilderPropertyBuilderCantSet() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<E> {", " abstract String blim();", "", " public static class StringFactory {", " public String build() {", " return null;", " }", " }", "", " @AutoValue.Builder", " public interface Builder<E> {", " Builder<E> setBlim(String s);", " StringFactory blimBuilder();", " Baz<E> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Property builder method returns foo.bar.Baz.StringFactory but there is no way to make " + "that type from java.lang.String: java.lang.String does not have a non-static " + "toBuilder() method that returns foo.bar.Baz.StringFactory") .inFile(javaFileObject) .onLineContaining("StringFactory blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderWrongTypeToBuilder() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<E> {", " abstract Buh blim();", " abstract Builder<E> toBuilder();", "", " public static class Buh {", " StringBuilder toBuilder() {", " return null;", " }", " }", "", " public static class BuhBuilder {", " public Buh build() {", " return null;", " }", " }", "", " @AutoValue.Builder", " public interface Builder<E> {", " BuhBuilder blimBuilder();", " Baz<E> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Property builder method returns foo.bar.Baz.BuhBuilder but there is no way to make " + "that type from foo.bar.Baz.Buh: foo.bar.Baz.Buh does not have a non-static " + "toBuilder() method that returns foo.bar.Baz.BuhBuilder") .inFile(javaFileObject) .onLineContaining("BuhBuilder blimBuilder()"); } @Test public void autoValueBuilderPropertyBuilderWrongElementType() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableSet;", "", "@AutoValue", "public abstract class Baz<T, U> {", " abstract ImmutableSet<T> blim();", "", " @AutoValue.Builder", " public interface Builder<T, U> {", " ImmutableSet.Builder<U> blimBuilder();", " Baz<T, U> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Property builder for blim has type com.google.common.collect.ImmutableSet.Builder " + "whose build() method returns com.google.common.collect.ImmutableSet<U> " + "instead of com.google.common.collect.ImmutableSet<T>") .inFile(javaFileObject) .onLineContaining("ImmutableSet.Builder<U> blimBuilder()"); } @Test public void autoValueBuilderAlienMethod0() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blam(String x);", " Builder whut();", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method without arguments should be a build method returning foo.bar.Baz, or a getter" + " method with the same name and type as a property method of foo.bar.Baz, or" + " fooBuilder() where foo() or getFoo() is a property method of foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("Builder whut()"); } @Test public void autoValueBuilderAlienMethod1() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " void whut(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Method whut does not correspond to a property method of foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("void whut(String x)"); } @Test public void autoValueBuilderAlienMethod2() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blam(String x, String y);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Builder methods must have 0 or 1 parameters") .inFile(javaFileObject) .onLineContaining("Builder blam(String x, String y)"); } @Test public void autoValueBuilderMissingBuildMethod() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<T> {", " abstract T blam();", "", " @AutoValue.Builder", " public interface Builder<T> {", " Builder<T> blam(T x);", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Builder must have a single no-argument method, typically called build(), that returns" + " foo.bar.Baz<T>") .inFile(javaFileObject) .onLineContaining("public interface Builder<T>"); } @Test public void autoValueBuilderDuplicateBuildMethods() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " Baz create();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Builder must have a single no-argument method, typically called build(), that returns" + " foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("Baz build()"); assertThat(compilation) .hadErrorContaining( "Builder must have a single no-argument method, typically called build(), that returns" + " foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("Baz create()"); } @Test public void autoValueBuilderWrongTypeBuildMethod() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blam(String x);", " String build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Method without arguments should be a build method returning foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("String build()"); } @Test public void autoValueBuilderTypeParametersDontMatch1() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<T> {", " abstract String blam();", "", " @AutoValue.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Type parameters of foo.bar.Baz.Builder must have same names and " + "bounds as type parameters of foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("public interface Builder"); } @Test public void autoValueBuilderTypeParametersDontMatch2() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<T> {", " abstract T blam();", "", " @AutoValue.Builder", " public interface Builder<E> {", " Builder<E> blam(E x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Type parameters of foo.bar.Baz.Builder must have same names and " + "bounds as type parameters of foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("public interface Builder<E>"); } @Test public void autoValueBuilderTypeParametersDontMatch3() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz<T extends Number & Comparable<T>> {", " abstract T blam();", "", " @AutoValue.Builder", " public interface Builder<T extends Number> {", " Builder<T> blam(T x);", " Baz build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining( "Type parameters of foo.bar.Baz.Builder must have same names and " + "bounds as type parameters of foo.bar.Baz") .inFile(javaFileObject) .onLineContaining("public interface Builder<T extends Number>"); } @Test public void autoValueBuilderToBuilderWrongTypeParameters() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "abstract class Baz<K extends Comparable<K>, V> {", " abstract K key();", " abstract V value();", " abstract Builder<V, K> toBuilder1();", "", " @AutoValue.Builder", " interface Builder<K extends Comparable<K>, V> {", " Builder<K, V> key(K key);", " Builder<K, V> value(V value);", " Baz<K, V> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("Builder converter method should return foo.bar.Baz.Builder<K, V>") .inFile(javaFileObject) .onLineContaining("abstract Builder<V, K> toBuilder1()"); } @Test public void autoValueBuilderToBuilderDuplicate() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "abstract class Baz<K extends Comparable<K>, V> {", " abstract K key();", " abstract V value();", " abstract Builder<K, V> toBuilder1();", " abstract Builder<K, V> toBuilder2();", "", " @AutoValue.Builder", " interface Builder<K extends Comparable<K>, V> {", " Builder<K, V> key(K key);", " Builder<K, V> value(V value);", " Baz<K, V> build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("There can be at most one builder converter method") .inFile(javaFileObject) .onLineContaining("abstract Builder<K, V> toBuilder1()"); } @Test public void getFooIsFoo() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " abstract int getFoo();", " abstract boolean isFoo();", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor()) .compile(javaFileObject); assertThat(compilation) .hadErrorContaining("More than one @AutoValue property called foo") .inFile(javaFileObject) .onLineContaining("getFoo"); assertThat(compilation) .hadErrorContaining("More than one @AutoValue property called foo") .inFile(javaFileObject) .onLineContaining("isFoo"); } @Retention(RetentionPolicy.SOURCE) public @interface Foo {} /* Processor that generates an empty class BarFoo every time it sees a class Bar annotated with * @Foo. */ public static class FooProcessor extends AbstractProcessor { @Override public Set<String> getSupportedAnnotationTypes() { return ImmutableSet.of(Foo.class.getCanonicalName()); } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latestSupported(); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { Set<? extends Element> elements = roundEnv.getElementsAnnotatedWith(Foo.class); for (TypeElement type : ElementFilter.typesIn(elements)) { try { generateFoo(type); } catch (IOException e) { throw new AssertionError(e); } } return false; } private void generateFoo(TypeElement type) throws IOException { String pkg = TypeSimplifier.packageNameOf(type); String className = type.getSimpleName().toString(); String generatedClassName = className + "Foo"; JavaFileObject source = processingEnv.getFiler().createSourceFile(pkg + "." + generatedClassName, type); PrintWriter writer = new PrintWriter(source.openWriter()); writer.println("package " + pkg + ";"); writer.println("public class " + generatedClassName + " {}"); writer.close(); } } @Test public void referencingGeneratedClass() { // Test that ensures that a type that does not exist can be the type of an @AutoValue property // as long as it later does come into existence. The BarFoo type referenced here does not exist // when the AutoValueProcessor runs on the first round, but the FooProcessor then generates it. // That generation provokes a further round of annotation processing and AutoValueProcessor // should succeed then. JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " public abstract BarFoo barFoo();", "", " public static Baz create(BarFoo barFoo) {", " return new AutoValue_Baz(barFoo);", " }", "}"); JavaFileObject barFileObject = JavaFileObjects.forSourceLines( "foo.bar.Bar", "package foo.bar;", "", "@" + Foo.class.getCanonicalName(), "public abstract class Bar {", " public abstract BarFoo barFoo();", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new FooProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(bazFileObject, barFileObject); assertThat(compilation).succeededWithoutWarnings(); } @Test public void referencingGeneratedClassInAnnotation() { // Test that ensures that a type that does not exist can be referenced by a copied annotation // as long as it later does come into existence. The BarFoo type referenced here does not exist // when the AutoValueProcessor runs on the first round, but the FooProcessor then generates it. // That generation provokes a further round of annotation processing and AutoValueProcessor // should succeed then. // We test the three places that a class reference could appear: as the value of a Class // element, as the value of a Class[] element, in a nested annotation. JavaFileObject barFileObject = JavaFileObjects.forSourceLines( "foo.bar.Bar", "package foo.bar;", "", "@" + Foo.class.getCanonicalName(), "public abstract class Bar {", "}"); JavaFileObject referenceClassFileObject = JavaFileObjects.forSourceLines( "foo.bar.ReferenceClass", "package foo.bar;", "", "@interface ReferenceClass {", " Class<?> value() default Void.class;", " Class<?>[] values() default {};", " Nested nested() default @Nested;", " @interface Nested {", " Class<?>[] values() default {};", " }", "}"); ImmutableList<String> annotations = ImmutableList.of( "@ReferenceClass(BarFoo.class)", "@ReferenceClass(values = {Void.class, BarFoo.class})", "@ReferenceClass(nested = @ReferenceClass.Nested(values = {Void.class, BarFoo.class}))"); for (String annotation : annotations) { JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "@AutoValue.CopyAnnotations", annotation, "public abstract class Baz {", " public abstract int foo();", "", " public static Baz create(int foo) {", " return new AutoValue_Baz(foo);", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor(), new FooProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(bazFileObject, barFileObject, referenceClassFileObject); expect.about(compilations()).that(compilation).succeededWithoutWarnings(); if (compilation.status().equals(Compilation.Status.SUCCESS)) { expect.about(compilations()).that(compilation) .generatedSourceFile("foo.bar.AutoValue_Baz") .contentsAsUtf8String() .contains(annotation); } } } @Test public void annotationReferencesUndefined() { // Test that we don't throw an exception if asked to compile @SuppressWarnings(UNDEFINED) // where UNDEFINED is an undefined symbol. JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " @SuppressWarnings(UNDEFINED)", " public abstract int[] buh();", "}"); Compilation compilation1 = javac() .withOptions("-Xlint:-processing") .withProcessors(new AutoValueProcessor()) .compile(bazFileObject); assertThat(compilation1).hadErrorCount(1); assertThat(compilation1) .hadErrorContaining("UNDEFINED") .inFile(bazFileObject) .onLineContaining("UNDEFINED"); assertThat(compilation1).hadWarningCount(1); assertThat(compilation1) .hadWarningContaining("mutable") .inFile(bazFileObject) .onLineContaining("public abstract int[] buh()"); // Same test, except we do successfully suppress the warning despite the UNDEFINED. bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz {", " @SuppressWarnings({UNDEFINED, \"mutable\"})", " public abstract int[] buh();", "}"); Compilation compilation2 = javac() .withOptions("-Xlint:-processing") .withProcessors(new AutoValueProcessor()) .compile(bazFileObject); assertThat(compilation2).hadErrorCount(1); assertThat(compilation2) .hadErrorContaining("UNDEFINED") .inFile(bazFileObject) .onLineContaining("UNDEFINED"); assertThat(compilation2).hadWarningCount(0); } @Test public void packagePrivateAnnotationFromOtherPackage() { JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz extends otherpackage.Parent {", "}"); JavaFileObject parentFileObject = JavaFileObjects.forSourceLines( "otherpackage.Parent", "package otherpackage;", "", "public abstract class Parent {", " @PackageAnnotation", " public abstract String foo();", "", " @interface PackageAnnotation {}", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(bazFileObject, parentFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation).generatedSourceFile("foo.bar.AutoValue_Baz"); } @Test public void visibleProtectedAnnotationFromOtherPackage() { JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz extends otherpackage.Parent {}"); JavaFileObject parentFileObject = JavaFileObjects.forSourceLines( "otherpackage.Parent", "package otherpackage;", "", "public abstract class Parent {", " @ProtectedAnnotation", " public abstract String foo();", "", " protected @interface ProtectedAnnotation {}", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(bazFileObject, parentFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Baz") .contentsAsUtf8String() .containsMatch("(?s:@Parent.ProtectedAnnotation\\s*@Override\\s*public String foo\\(\\))"); } @Test public void methodAnnotationsCopiedInLexicographicalOrder() { JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.package1.Annotation1;", "import com.package2.Annotation0;", "", "@AutoValue", "public abstract class Baz extends Parent {", " @Annotation0", " @Annotation1", " @Override", " public abstract String foo();", "}"); JavaFileObject parentFileObject = JavaFileObjects.forSourceLines( "foo.bar.Parent", "package foo.bar;", "", "public abstract class Parent {", " public abstract String foo();", "}"); JavaFileObject annotation1FileObject = JavaFileObjects.forSourceLines( "com.package1.Annotation1", "package com.package1;", "", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "@Target({ElementType.FIELD, ElementType.METHOD})", "public @interface Annotation1 {}"); JavaFileObject annotation0FileObject = JavaFileObjects.forSourceLines( "com.package2.Annotation0", "package com.package2;", "", "public @interface Annotation0 {}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(bazFileObject, parentFileObject, annotation1FileObject, annotation0FileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Baz") .contentsAsUtf8String() .containsMatch( "(?s:@Annotation1\\s+@Annotation0\\s+@Override\\s+public String foo\\(\\))"); // @Annotation1 precedes @Annotation 0 because // @com.package2.Annotation1 precedes @com.package1.Annotation0 } @Test public void nonVisibleProtectedAnnotationFromOtherPackage() { JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Baz extends otherpackage.Parent {", "}"); JavaFileObject parentFileObject = JavaFileObjects.forSourceLines( "otherpackage.Parent", "package otherpackage;", "", "import otherpackage.Annotations.ProtectedAnnotation;", "", "public abstract class Parent {", " @ProtectedAnnotation", " public abstract String foo();", "}"); JavaFileObject annotationsFileObject = JavaFileObjects.forSourceLines( "otherpackage.Annotations", "package otherpackage;", "", "public class Annotations {", " protected @interface ProtectedAnnotation {}", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(bazFileObject, parentFileObject, annotationsFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Baz") .contentsAsUtf8String() .doesNotContain("ProtectedAnnotation"); } @Test public void nonVisibleProtectedClassAnnotationFromOtherPackage() { JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Outer", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "class Outer extends otherpackage.Parent {", " @AutoValue", " @AutoValue.CopyAnnotations", " @ProtectedAnnotation", " abstract static class Inner {", " abstract String foo();", " }", "}"); JavaFileObject parentFileObject = JavaFileObjects.forSourceLines( "otherpackage.Parent", "package otherpackage;", "", "public abstract class Parent {", " protected @interface ProtectedAnnotation {}", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(bazFileObject, parentFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Outer_Inner") .contentsAsUtf8String() .doesNotContain("ProtectedAnnotation"); } @Test public void builderWithVarArgsDoesNotImportJavaUtilArrays() { // Repro from https://github.com/google/auto/issues/373. JavaFileObject testFileObject = JavaFileObjects.forSourceLines( "foo.bar.Test", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import com.google.common.collect.ImmutableList;", "", "@AutoValue", "public abstract class Test {", " abstract ImmutableList<String> foo();", "", " @AutoValue.Builder", " abstract static class Builder {", " abstract Builder foo(String... foos);", " abstract Test build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(testFileObject); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Test") .contentsAsUtf8String() .doesNotContain("java.util.Arrays"); } @Test public void staticBuilderMethodInBuilderClass() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "com.example.Foo", "package com.example;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Foo {", " public abstract String bar();", "", " @AutoValue.Builder", " public abstract static class Builder {", " public static Builder builder() {", " return new AutoValue_Foo.Builder();", " }", "", " public abstract Builder setBar(String s);", " public abstract Foo build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(javaFileObject); assertThat(compilation).succeeded(); assertThat(compilation) .hadWarningContaining("Static builder() method should be in the containing class") .inFile(javaFileObject) .onLineContaining("builder()"); } /** * Tests behaviour when the package containing an {@code @AutoValue} class also has classes with * the same name as classes in {@code java.lang}. If you call a class {@code Object} you are * asking for trouble, but you could innocently call a class {@code Compiler} without realizing * there is a {@code java.lang.Compiler}. * * <p>The case where the class in question is mentioned in the {@code @AutoValue} class is the * easy one, because then our logic can easily see that there is a clash and will use * fully-qualified names. This is the case of the {@code Compiler} class below. The case where the * class is <i>not</i> mentioned is harder. We have to realize that we can't elide the package * name in {@code java.lang.Object} because there is also a {@code foo.bar.Object} in scope, and * in fact it takes precedence. */ @Test public void javaLangClash() { JavaFileObject object = JavaFileObjects.forSourceLines( "foo.bar.Object", // "package foo.bar;", "", "public class Object {}"); JavaFileObject string = JavaFileObjects.forSourceLines( "foo.bar.String", // "package foo.bar;", "", "public class String {}"); JavaFileObject integer = JavaFileObjects.forSourceLines( "foo.bar.Integer", // "package foo.bar;", "", "public class Integer {}"); JavaFileObject thread = JavaFileObjects.forSourceLines( "foo.bar.Thread", // "package foo.bar;", "", "public class Thread {}"); JavaFileObject override = JavaFileObjects.forSourceLines( "foo.bar.Override", // "package foo.bar;", "", "public class Override {}"); JavaFileObject test = JavaFileObjects.forSourceLines( "foo.bar.Test", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "", "@AutoValue", "public abstract class Test {", " public abstract java.lang.Integer integer();", " public abstract java.lang.Thread.State state();", " public static Builder builder() {", " return new AutoValue_Test.Builder();", " }", "", " @AutoValue.Builder", " public abstract static class Builder {", " public abstract Builder setInteger(java.lang.Integer x);", " public abstract Builder setState(java.lang.Thread.State x);", " public abstract Test build();", " }", "}"); Compilation compilation = javac() .withProcessors(new AutoValueProcessor()) .withOptions("-Xlint:-processing", "-implicit:none") .compile(object, string, integer, thread, override, test); assertThat(compilation).succeededWithoutWarnings(); } // This is a regression test for the problem described in // https://github.com/google/auto/issues/847#issuecomment-629857642. @Test public void generatedParentWithGeneratedGetterButSetterInBuilder() { JavaFileObject test = JavaFileObjects.forSourceLines( "foo.bar.Test", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import foo.baz.GeneratedParent;", "import foo.baz.GeneratedPropertyType;", "import java.util.Optional;", "", "@AutoValue", "public abstract class Test extends GeneratedParent {", " public abstract String string();", "", " public static Builder builder() {", " return new AutoValue_Test.Builder();", " }", "", " @AutoValue.Builder", " public abstract static class Builder extends GeneratedParent.Builder<Builder> {", " public abstract Builder setString(String x);", " public abstract Builder setGenerated(GeneratedPropertyType x);", " public abstract Test build();", " }", "}"); AutoValueProcessor autoValueProcessor = new AutoValueProcessor(); GeneratedParentProcessor generatedParentProcessor = new GeneratedParentProcessor(autoValueProcessor, expect); Compilation compilation = javac() .withProcessors(autoValueProcessor, generatedParentProcessor) .withOptions("-Xlint:-processing", "-implicit:none") .compile(test); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Test") .contentsAsUtf8String() .contains(" public int integer() {"); } @SupportedAnnotationTypes("*") private static class GeneratedParentProcessor extends AbstractProcessor { private static final String GENERATED_PARENT = String.join( "\n", "package foo.baz;", "", "public abstract class GeneratedParent {", " public abstract int integer();", " public abstract GeneratedPropertyType generated();", "", " public abstract static class Builder<B extends Builder<B>> {", " public abstract B setInteger(int x);", " }", "}"); private static final String GENERATED_PROPERTY_TYPE = String.join( "\n", // "package foo.baz;", "", "public class GeneratedPropertyType {}"); private static final ImmutableMap<String, String> GENERATED_TYPES = ImmutableMap.of( "foo.baz.GeneratedParent", GENERATED_PARENT, "foo.baz.GeneratedPropertyType", GENERATED_PROPERTY_TYPE); private final AutoValueProcessor autoValueProcessor; private final Expect expect; GeneratedParentProcessor(AutoValueProcessor autoValueProcessor, Expect expect) { this.autoValueProcessor = autoValueProcessor; this.expect = expect; } private boolean generated; @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (!generated) { generated = true; // Check that AutoValueProcessor has already run and deferred the foo.bar.Test type because // we haven't generated its parent yet. expect.that(autoValueProcessor.deferredTypeNames()).contains("foo.bar.Test"); GENERATED_TYPES.forEach( (typeName, source) -> { try { JavaFileObject generated = processingEnv.getFiler().createSourceFile(typeName); try (Writer writer = generated.openWriter()) { writer.write(source); } } catch (IOException e) { throw new UncheckedIOException(e); } }); } return false; } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latestSupported(); } } // This is a regression test for the problem described in // https://github.com/google/auto/issues/1087. @Test public void kotlinMetadataAnnotationsAreImplicitlyExcludedFromCopying() { JavaFileObject metadata = JavaFileObjects.forSourceLines( "kotlin.Metadata", "package kotlin;", "", "public @interface Metadata {", "}"); JavaFileObject test = JavaFileObjects.forSourceLines( "foo.bar.Test", "package foo.bar;", "", "import com.google.auto.value.AutoValue;", "import kotlin.Metadata;", "", "@AutoValue.CopyAnnotations", "@Metadata", "@AutoValue", "public abstract class Test {", " public abstract String string();", "}"); AutoValueProcessor autoValueProcessor = new AutoValueProcessor(); Compilation compilation = javac() .withProcessors(autoValueProcessor) .withOptions("-Xlint:-processing", "-implicit:none") .compile(test, metadata); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("foo.bar.AutoValue_Test") .contentsAsUtf8String() .doesNotContain("kotlin.Metadata"); } private String sorted(String... imports) { return Arrays.stream(imports).sorted().collect(joining("\n")); } }
google/auto
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
Java
apache-2.0
139,312
 // // Copyright 2011 Ekon Benefits // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.Linq; using System.Runtime.CompilerServices; using System.Reflection; namespace Dynamitey.Internal.Optimization { internal static partial class InvokeHelper { internal static readonly Type[] FuncKinds; internal static readonly Type[] ActionKinds; internal static readonly Type[] TupleKinds; internal static readonly IDictionary<Type,int> FuncArgs; internal static readonly IDictionary<Type,int> ActionArgs; internal static readonly IDictionary<Type,int> TupleArgs; static InvokeHelper() { FuncKinds = new [] { typeof(Func<>), //0 typeof(Func<,>), //1 typeof(Func<,,>), //2 typeof(Func<,,,>), //3 typeof(Func<,,,,>), //4 typeof(Func<,,,,,>), //5 typeof(Func<,,,,,,>), //6 typeof(Func<,,,,,,,>), //7 typeof(Func<,,,,,,,,>), //8 typeof(Func<,,,,,,,,,>), //9 typeof(Func<,,,,,,,,,,>), //10 typeof(Func<,,,,,,,,,,,>), //11 typeof(Func<,,,,,,,,,,,,>), //12 typeof(Func<,,,,,,,,,,,,,>), //13 typeof(Func<,,,,,,,,,,,,,,>), //14 typeof(Func<,,,,,,,,,,,,,,,>), //15 typeof(Func<,,,,,,,,,,,,,,,,>), //16 }; ActionKinds = new [] { typeof(Action), //0 typeof(Action<>), //1 typeof(Action<,>), //2 typeof(Action<,,>), //3 typeof(Action<,,,>), //4 typeof(Action<,,,,>), //5 typeof(Action<,,,,,>), //6 typeof(Action<,,,,,,>), //7 typeof(Action<,,,,,,,>), //8 typeof(Action<,,,,,,,,>), //9 typeof(Action<,,,,,,,,,>), //10 typeof(Action<,,,,,,,,,,>), //11 typeof(Action<,,,,,,,,,,,>), //12 typeof(Action<,,,,,,,,,,,,>), //13 typeof(Action<,,,,,,,,,,,,,>), //14 typeof(Action<,,,,,,,,,,,,,,>), //15 typeof(Action<,,,,,,,,,,,,,,,>), //16 }; TupleKinds = new [] { typeof(Tuple<>), //1 typeof(Tuple<,>), //2 typeof(Tuple<,,>), //3 typeof(Tuple<,,,>), //4 typeof(Tuple<,,,,>), //5 typeof(Tuple<,,,,,>), //6 typeof(Tuple<,,,,,,>), //7 typeof(Tuple<,,,,,,,>), //8 }; FuncArgs = FuncKinds.Zip(Enumerable.Range(0, FuncKinds.Length), (key, value) => new { key, value }).ToDictionary(k => k.key, v => v.value); ActionArgs = ActionKinds.Zip(Enumerable.Range(0, ActionKinds.Length), (key, value) => new { key, value }).ToDictionary(k => k.key, v => v.value); TupleArgs = TupleKinds.Zip(Enumerable.Range(1, ActionKinds.Length), (key, value) => new { key, value }).ToDictionary(k => k.key, v => v.value); } internal static dynamic TupleItem(dynamic tuple, int index){ switch(index){ case 1: return tuple.Item1; case 2: return tuple.Item2; case 3: return tuple.Item3; case 4: return tuple.Item4; case 5: return tuple.Item5; case 6: return tuple.Item6; case 7: return tuple.Item7; default: return tuple.Rest; } } internal static void InvokeMemberAction(ref CallSite callsite, Type binderType, int knownType, LazyBinder binder, InvokeMemberName name, bool staticContext, Type context, string[] argNames, object target, params object [] args) { var tSwitch = args.Length; switch (tSwitch) { #region Optimizations case 0: { var tCallSite = (CallSite<Action<CallSite, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target); break; } case 1: { var tCallSite = (CallSite<Action<CallSite, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0]); break; } case 2: { var tCallSite = (CallSite<Action<CallSite, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1]); break; } case 3: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2]); break; } case 4: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3]); break; } case 5: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4]); break; } case 6: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5]); break; } case 7: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6]); break; } case 8: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7]); break; } case 9: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8]); break; } case 10: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9]); break; } case 11: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10]); break; } case 12: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10], args[11]); break; } case 13: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10], args[11], args[12]); break; } case 14: { var tCallSite = (CallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object>>)callsite; if(tCallSite == null){ tCallSite = CreateCallSite<Action<CallSite, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object>>(binderType,knownType, binder, name, context, argNames, staticContext); callsite=tCallSite; } tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10], args[11], args[12], args[13]); break; } #endregion default: var tArgTypes = Enumerable.Repeat(typeof(object), tSwitch); var tDelagateType = EmitCallSiteFuncType(tArgTypes, typeof(void)); Dynamic.InvokeCallSite(CreateCallSite(tDelagateType, binderType,knownType, binder, name, context, argNames), target, args); break; } } internal static TReturn InvokeMemberTargetType<TTarget,TReturn>( ref CallSite callsite, Type binderType, int knownType, LazyBinder binder, InvokeMemberName name, bool staticContext, Type context, string[] argNames, TTarget target, params object [] args) { var tSwitch = args.Length; switch (tSwitch) { #region Optimizations case 0: { var tCallSite = (CallSite<Func<CallSite, TTarget, TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target); } case 1: { var tCallSite = (CallSite<Func<CallSite, TTarget, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0]); } case 2: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1]); } case 3: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2]); } case 4: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3]); } case 5: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4]); } case 6: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5]); } case 7: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6]); } case 8: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7]); } case 9: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8]); } case 10: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9]); } case 11: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10]); } case 12: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10], args[11]); } case 13: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10], args[11], args[12]); } case 14: { var tCallSite = (CallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object, object, object, object,TReturn>>)callsite; if(tCallSite==null){ tCallSite = CreateCallSite<Func<CallSite, TTarget, object, object, object, object, object, object, object, object, object, object, object, object, object, object,TReturn>>(binderType,knownType,binder, name, context, argNames, staticContext); callsite =tCallSite; } return tCallSite.Target(tCallSite, target, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7], args[8], args[9], args[10], args[11], args[12], args[13]); } #endregion default: var tArgTypes = Enumerable.Repeat(typeof(object), tSwitch); var tDelagateType = EmitCallSiteFuncType(tArgTypes, typeof(TTarget)); return Dynamic.InvokeCallSite(CreateCallSite(tDelagateType, binderType,knownType, binder, name, context, argNames), target, args); } } #if !__MonoCS__ internal static Delegate WrapFuncHelper<TReturn>(dynamic invokable, int length) { switch(length){ #region Optimizations case 0: return new Func< TReturn>(()=> invokable()); case 1: return new Func< object, TReturn>((a1)=> invokable(a1)); case 2: return new Func< object, object, TReturn>((a1,a2)=> invokable(a1,a2)); case 3: return new Func< object, object, object, TReturn>((a1,a2,a3)=> invokable(a1,a2,a3)); case 4: return new Func< object, object, object, object, TReturn>((a1,a2,a3,a4)=> invokable(a1,a2,a3,a4)); case 5: return new Func< object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5)=> invokable(a1,a2,a3,a4,a5)); case 6: return new Func< object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6)=> invokable(a1,a2,a3,a4,a5,a6)); case 7: return new Func< object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7)=> invokable(a1,a2,a3,a4,a5,a6,a7)); case 8: return new Func< object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8)); case 9: return new Func< object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9)); case 10: return new Func< object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10)); case 11: return new Func< object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11)); case 12: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12)); case 13: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13)); case 14: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14)); case 15: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15)); case 16: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16)); #endregion default: return new DynamicFunc<TReturn>(args=>(TReturn)Dynamic.Invoke((object)invokable,args)); } } #endif internal static class MonoConvertCallSite<T>{ internal static CallSite CallSite; } internal static Delegate WrapFuncHelperMono<TReturn>(dynamic invokable, int length) { switch(length){ #region Optimizations case 0: return new Func< TReturn>(()=>{ object tResult= invokable(); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 1: return new Func< object, TReturn>((a1)=>{ object tResult= invokable(a1); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 2: return new Func< object, object, TReturn>((a1,a2)=>{ object tResult= invokable(a1,a2); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 3: return new Func< object, object, object, TReturn>((a1,a2,a3)=>{ object tResult= invokable(a1,a2,a3); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 4: return new Func< object, object, object, object, TReturn>((a1,a2,a3,a4)=>{ object tResult= invokable(a1,a2,a3,a4); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 5: return new Func< object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5)=>{ object tResult= invokable(a1,a2,a3,a4,a5); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 6: return new Func< object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 7: return new Func< object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 8: return new Func< object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 9: return new Func< object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 10: return new Func< object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 11: return new Func< object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 12: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 13: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 14: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 15: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); case 16: return new Func< object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, TReturn>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16)=>{ object tResult= invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); #endregion default: return new DynamicFunc<TReturn>(args=>{ object tResult= Dynamic.Invoke((object)invokable,args); return (TReturn) InvokeConvertCallSite(tResult, true, typeof(TReturn), typeof(object), ref MonoConvertCallSite<TReturn>.CallSite); }); } } internal static Delegate WrapAction(dynamic invokable, int length) { switch(length){ #region Optimizations case 0: return new Action(()=>invokable()); case 1: return new Action< object>((a1)=> invokable(a1)); case 2: return new Action< object, object>((a1,a2)=> invokable(a1,a2)); case 3: return new Action< object, object, object>((a1,a2,a3)=> invokable(a1,a2,a3)); case 4: return new Action< object, object, object, object>((a1,a2,a3,a4)=> invokable(a1,a2,a3,a4)); case 5: return new Action< object, object, object, object, object>((a1,a2,a3,a4,a5)=> invokable(a1,a2,a3,a4,a5)); case 6: return new Action< object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6)=> invokable(a1,a2,a3,a4,a5,a6)); case 7: return new Action< object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7)=> invokable(a1,a2,a3,a4,a5,a6,a7)); case 8: return new Action< object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8)); case 9: return new Action< object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9)); case 10: return new Action< object, object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10)); case 11: return new Action< object, object, object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11)); case 12: return new Action< object, object, object, object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12)); case 13: return new Action< object, object, object, object, object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13)); case 14: return new Action< object, object, object, object, object, object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14)); case 15: return new Action< object, object, object, object, object, object, object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15)); case 16: return new Action< object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object>((a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16)=> invokable(a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16)); #endregion default: return new DynamicAction(args=>Dynamic.InvokeAction((object)invokable,args)); } } internal static object FastDynamicInvokeReturn(Delegate del, dynamic [] args) { dynamic tDel =del; switch(args.Length){ default: try { return del.DynamicInvoke(args); } catch (TargetInvocationException ex) { throw ex.InnerException; } #region Optimization case 1: return tDel(args[0]); case 2: return tDel(args[0],args[1]); case 3: return tDel(args[0],args[1],args[2]); case 4: return tDel(args[0],args[1],args[2],args[3]); case 5: return tDel(args[0],args[1],args[2],args[3],args[4]); case 6: return tDel(args[0],args[1],args[2],args[3],args[4],args[5]); case 7: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6]); case 8: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7]); case 9: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8]); case 10: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9]); case 11: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10]); case 12: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11]); case 13: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12]); case 14: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12],args[13]); case 15: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12],args[13],args[14]); case 16: return tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12],args[13],args[14],args[15]); #endregion } } internal static void FastDynamicInvokeAction(Delegate del, params dynamic [] args) { dynamic tDel =del; switch(args.Length){ default: try { del.DynamicInvoke(args); } catch (TargetInvocationException ex) { throw ex.InnerException; } return; #region Optimization case 1: tDel(args[0]); return; case 2: tDel(args[0],args[1]); return; case 3: tDel(args[0],args[1],args[2]); return; case 4: tDel(args[0],args[1],args[2],args[3]); return; case 5: tDel(args[0],args[1],args[2],args[3],args[4]); return; case 6: tDel(args[0],args[1],args[2],args[3],args[4],args[5]); return; case 7: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6]); return; case 8: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7]); return; case 9: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8]); return; case 10: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9]); return; case 11: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10]); return; case 12: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11]); return; case 13: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12]); return; case 14: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12],args[13]); return; case 15: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12],args[13],args[14]); return; case 16: tDel(args[0],args[1],args[2],args[3],args[4],args[5],args[6],args[7],args[8],args[9],args[10],args[11],args[12],args[13],args[14],args[15]); return; #endregion } } } }
curit/dynamitey
Dynamitey/Internal/Optimization/InvokeHelper.cs
C#
apache-2.0
42,310
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.kms.model; import java.io.Serializable; /** * */ public class ReEncryptResult implements Serializable, Cloneable { /** * <p> * The re-encrypted data. If you are using the CLI, the value is Base64 * encoded. Otherwise, it is not encoded. * </p> */ private java.nio.ByteBuffer ciphertextBlob; /** * <p> * Unique identifier of the key used to originally encrypt the data. * </p> */ private String sourceKeyId; /** * <p> * Unique identifier of the key used to re-encrypt the data. * </p> */ private String keyId; /** * <p> * The re-encrypted data. If you are using the CLI, the value is Base64 * encoded. Otherwise, it is not encoded. * </p> * <p> * AWS SDK for Java performs a Base64 encoding on this field before sending * this request to AWS service by default. Users of the SDK should not * perform Base64 encoding on this field. * </p> * <p> * Warning: ByteBuffers returned by the SDK are mutable. Changes to the * content or position of the byte buffer will be seen by all objects that * have a reference to this object. It is recommended to call * ByteBuffer.duplicate() or ByteBuffer.asReadOnlyBuffer() before using or * reading from the buffer. This behavior will be changed in a future major * version of the SDK. * </p> * * @param ciphertextBlob * The re-encrypted data. If you are using the CLI, the value is * Base64 encoded. Otherwise, it is not encoded. */ public void setCiphertextBlob(java.nio.ByteBuffer ciphertextBlob) { this.ciphertextBlob = ciphertextBlob; } /** * <p> * The re-encrypted data. If you are using the CLI, the value is Base64 * encoded. Otherwise, it is not encoded. * </p> * <p> * {@code ByteBuffer}s are stateful. Calling their {@code get} methods * changes their {@code position}. We recommend using * {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view * of the buffer with an independent {@code position}, and calling * {@code get} methods on this rather than directly on the returned * {@code ByteBuffer}. Doing so will ensure that anyone else using the * {@code ByteBuffer} will not be affected by changes to the {@code position} * . * </p> * * @return The re-encrypted data. If you are using the CLI, the value is * Base64 encoded. Otherwise, it is not encoded. */ public java.nio.ByteBuffer getCiphertextBlob() { return this.ciphertextBlob; } /** * <p> * The re-encrypted data. If you are using the CLI, the value is Base64 * encoded. Otherwise, it is not encoded. * </p> * * @param ciphertextBlob * The re-encrypted data. If you are using the CLI, the value is * Base64 encoded. Otherwise, it is not encoded. * @return Returns a reference to this object so that method calls can be * chained together. */ public ReEncryptResult withCiphertextBlob(java.nio.ByteBuffer ciphertextBlob) { setCiphertextBlob(ciphertextBlob); return this; } /** * <p> * Unique identifier of the key used to originally encrypt the data. * </p> * * @param sourceKeyId * Unique identifier of the key used to originally encrypt the data. */ public void setSourceKeyId(String sourceKeyId) { this.sourceKeyId = sourceKeyId; } /** * <p> * Unique identifier of the key used to originally encrypt the data. * </p> * * @return Unique identifier of the key used to originally encrypt the data. */ public String getSourceKeyId() { return this.sourceKeyId; } /** * <p> * Unique identifier of the key used to originally encrypt the data. * </p> * * @param sourceKeyId * Unique identifier of the key used to originally encrypt the data. * @return Returns a reference to this object so that method calls can be * chained together. */ public ReEncryptResult withSourceKeyId(String sourceKeyId) { setSourceKeyId(sourceKeyId); return this; } /** * <p> * Unique identifier of the key used to re-encrypt the data. * </p> * * @param keyId * Unique identifier of the key used to re-encrypt the data. */ public void setKeyId(String keyId) { this.keyId = keyId; } /** * <p> * Unique identifier of the key used to re-encrypt the data. * </p> * * @return Unique identifier of the key used to re-encrypt the data. */ public String getKeyId() { return this.keyId; } /** * <p> * Unique identifier of the key used to re-encrypt the data. * </p> * * @param keyId * Unique identifier of the key used to re-encrypt the data. * @return Returns a reference to this object so that method calls can be * chained together. */ public ReEncryptResult withKeyId(String keyId) { setKeyId(keyId); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCiphertextBlob() != null) sb.append("CiphertextBlob: " + getCiphertextBlob() + ","); if (getSourceKeyId() != null) sb.append("SourceKeyId: " + getSourceKeyId() + ","); if (getKeyId() != null) sb.append("KeyId: " + getKeyId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ReEncryptResult == false) return false; ReEncryptResult other = (ReEncryptResult) obj; if (other.getCiphertextBlob() == null ^ this.getCiphertextBlob() == null) return false; if (other.getCiphertextBlob() != null && other.getCiphertextBlob().equals(this.getCiphertextBlob()) == false) return false; if (other.getSourceKeyId() == null ^ this.getSourceKeyId() == null) return false; if (other.getSourceKeyId() != null && other.getSourceKeyId().equals(this.getSourceKeyId()) == false) return false; if (other.getKeyId() == null ^ this.getKeyId() == null) return false; if (other.getKeyId() != null && other.getKeyId().equals(this.getKeyId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCiphertextBlob() == null) ? 0 : getCiphertextBlob() .hashCode()); hashCode = prime * hashCode + ((getSourceKeyId() == null) ? 0 : getSourceKeyId().hashCode()); hashCode = prime * hashCode + ((getKeyId() == null) ? 0 : getKeyId().hashCode()); return hashCode; } @Override public ReEncryptResult clone() { try { return (ReEncryptResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
flofreud/aws-sdk-java
aws-java-sdk-kms/src/main/java/com/amazonaws/services/kms/model/ReEncryptResult.java
Java
apache-2.0
8,596
define(["jquery", "bootstrap", "d3","jnj_chart", "ohdsi_common", "datatables", "datatables-colvis", "colorbrewer", "tabletools"], function ($, bootstrap, d3, jnj_chart, common, DataTables, DataTablesColvis, colorbrewer, TableTools) { function ObservationsRenderer() {} ObservationsRenderer.prototype = {}; ObservationsRenderer.prototype.constructor = ObservationsRenderer; ObservationsRenderer.render = function(cohort) { d3.selectAll("svg").remove(); var id = cohort.id; this.baseUrl = getSourceSpecificWebApiUrl() + 'cohortresults/' + id; var threshold; var datatable; // bind to all matching elements upon creation $(document).on('click', '#observation_table tbody tr', function () { $('#observation_table tbody tr.selected').removeClass('selected'); $(this).addClass('selected'); var data = datatable.data()[datatable.row(this)[0]]; if (data) { var did = data.concept_id; var concept_name = data.snomed; ObservationsRenderer.drilldown(did, concept_name); } }); $(document).on( 'shown.bs.tab', 'a[data-toggle="tab"]', function (e) { $(window).trigger("resize"); // Version 1. $('table:visible').each(function() { var oTableTools = TableTools.fnGetInstance(this); if (oTableTools && oTableTools.fnResizeRequired()) { oTableTools.fnResizeButtons(); } }); }); ObservationsRenderer.drilldown = function (concept_id, concept_name) { $('#loading-text').text("Querying Database..."); $('#spinner-modal').modal('show'); $('.drilldown svg').remove(); $('#observationDrilldownTitle').text(concept_name); $('#reportObservationDrilldown').removeClass('hidden'); $.ajax({ type: "GET", url: ObservationsRenderer.baseUrl + '/observation/' + concept_id, success: function (data) { $('#loading-text').text("Rendering Visualizations..."); if (data) { // age at first diagnosis visualization var firstDiagnosis = common.normalizeArray(data.ageAtFirstOccurrence); if (!firstDiagnosis.empty) { var ageAtFirstOccurrence = new jnj_chart.boxplot(); var bpseries = []; var bpdata = common.normalizeDataframe(firstDiagnosis); for (var i = 0; i < bpdata.category.length; i++) { bpseries.push({ Category: bpdata.category[i], min: bpdata.minValue[i], max: bpdata.maxValue[i], median: bpdata.medianValue[i], LIF: bpdata.p10Value[i], q1: bpdata.p25Value[i], q3: bpdata.p75Value[i], UIF: bpdata.p90Value[i] }); } ageAtFirstOccurrence.render(bpseries, "#ageAtFirstOccurrence", 500, 300, { xLabel: 'Gender', yLabel: 'Age at First Occurrence' }); } common.generateCSVDownload($("#ageAtFirstOccurrence"), data.ageAtFirstOccurrence, "ageAtFirstOccurrence"); // prevalence by month var prevData = common.normalizeArray(data.prevalenceByMonth); if (!prevData.empty) { var byMonthSeries = common.mapMonthYearDataToSeries(prevData, { dateField: 'xCalendarMonth', yValue: 'yPrevalence1000Pp', yPercent: 'yPrevalence1000Pp' }); var prevalenceByMonth = new jnj_chart.line(); prevalenceByMonth.render(byMonthSeries, "#observationPrevalenceByMonth", 1000, 300, { xScale: d3.time.scale().domain(d3.extent(byMonthSeries[0].values, function (d) { return d.xValue; })), xFormat: d3.time.format("%m/%Y"), tickFormat: function (d) { var monthFormat = d3.time.format("%m/%Y"); var yearFormat = d3.time.format("%Y"); return (d.getMonth() === 0) ? yearFormat(d) : monthFormat(d); }, xLabel: "Date", yLabel: "Prevalence per 1000 People" }); } common.generateCSVDownload($("#observationPrevalenceByMonth"), data.prevalenceByMonth, "observationPrevalenceByMonth"); // observation type visualization if (data.observationsByType && data.observationsByType.length > 0) { var observationsByType = new jnj_chart.donut(); observationsByType.render(common.mapConceptData(data.observationsByType), "#observationsByType", 500, 300, { margin: { top: 5, left: 5, right: 220, bottom: 5 } }); } common.generateCSVDownload($("#observationsByType"), data.observationsByType, "observationsByType"); // render trellis var trellisData = common.normalizeArray(data.prevalenceByGenderAgeYear, true); if (!trellisData.empty) { var allDeciles = ["0-9", "10-19", "20-29", "30-39", "40-49", "50-59", "60-69", "70-79", "80-89", "90-99"]; var allSeries = ["MALE", "FEMALE"]; var minYear = d3.min(trellisData.xCalendarYear), maxYear = d3.max(trellisData.xCalendarYear); var seriesInitializer = function (tName, sName, x, y) { return { trellisName: tName, seriesName: sName, xCalendarYear: x, yPrevalence1000Pp: y }; }; var nestByDecile = d3.nest() .key(function (d) { return d.trellisName; }) .key(function (d) { return d.seriesName; }) .sortValues(function (a, b) { return a.xCalendarYear - b.xCalendarYear; }); // map data into chartable form var normalizedSeries = trellisData.trellisName.map(function (d, i) { var item = {}; var container = this; d3.keys(container).forEach(function (p) { item[p] = container[p][i]; }); return item; }, trellisData); var dataByDecile = nestByDecile.entries(normalizedSeries); // fill in gaps var yearRange = d3.range(minYear, maxYear, 1); dataByDecile.forEach(function (trellis) { trellis.values.forEach(function (series) { series.values = yearRange.map(function (year) { yearData = series.values.filter(function (f) { return f.xCalendarYear === year; })[0] || seriesInitializer(trellis.key, series.key, year, 0); yearData.date = new Date(year, 0, 1); return yearData; }); }); }); // create svg with range bands based on the trellis names var chart = new jnj_chart.trellisline(); chart.render(dataByDecile, "#trellisLinePlot", 1000, 300, { trellisSet: allDeciles, trellisLabel: "Age Decile", seriesLabel: "Year of Observation", yLabel: "Prevalence Per 1000 People", xFormat: d3.time.format("%Y"), yFormat: d3.format("0.2f"), tickPadding: 20, colors: d3.scale.ordinal() .domain(["MALE", "FEMALE", "UNKNOWN",]) .range(["#1F78B4", "#FB9A99", "#33A02C"]) }); } common.generateCSVDownload($("#trellisLinePlot"), data.prevalenceByGenderAgeYear, "prevalenceByGenderAgeYear"); // Records by Unit var recordsByUnit = new jnj_chart.donut(); var datdaRecordsByUnit = []; var recordsByUnitData = common.normalizeArray(data.recordsByUnit); if (!recordsByUnitData.empty) { if (recordsByUnitData.conceptName instanceof Array) { datdaRecordsByUnit = recordsByUnitData.conceptName.map(function (d, i) { var item = { id: this.conceptName[i], label: this.conceptName[i], value: this.countValue[i] }; return item; }, recordsByUnitData); } else { datdaRecordsByUnit.push( { id: recordsByUnitData.conceptName, label: recordsByUnitData.conceptName, value: recordsByUnitData.countValue }); } datdaRecordsByUnit.sort(function (a, b) { var nameA = a.label.toLowerCase(), nameB = b.label.toLowerCase(); if (nameA < nameB) { //sort string ascending return -1; } if (nameA > nameB) { return 1; } return 0; //default return value (no sorting) }); recordsByUnit.render(datdaRecordsByUnit, "#recordsByUnit", 500, 300, { margin: { top: 5, left: 5, right: 200, bottom: 5 } }); } common.generateCSVDownload($("#recordsByUnit"), data.recordsByUnit, "recordsByUnit"); // Observation Value Distribution var obsValueDist = common.normalizeArray(data.observationValueDistribution); if (!obsValueDist.empty) { var observationValues = new jnj_chart.boxplot(); var obpseries = []; obpdata = common.normalizeDataframe(obsValueDist); obpseries = obpdata.category.map(function (d, i) { var item = { Category: obpdata.category[i], min: obpdata.minValue[i], max: obpdata.maxValue[i], median: obpdata.medianValue[i], LIF: obpdata.p10Value[i], q1: obpdata.p25Value[i], q3: obpdata.p75Value[i], UIF: obpdata.p90Value[i] }; return item; }, obpdata); observationValues.render(obpseries, "#observationValues", 500, 300, { yMax: d3.max(obpdata.p90Value) || obpdata.p90Value, // handle when dataframe is not array of values xLabel: 'Unit', yLabel: 'Observation Value' }); } common.generateCSVDownload($("#observationValues"), data.observationValueDistribution, "observationValues"); } $('#spinner-modal').modal('hide'); }, error : function() { $('#spinner-modal').modal('hide'); } }); }; function getColors(data) { /* console.log(data); if (data.length <= 3) { var colors = []; $.each(data, function() { var lbl = this.label.toLowerCase(); if (lbl.indexOf("above") >= 0 || lbl.indexOf("high") >= 0) { colors.push("#e31a1c"); } else if (lbl.indexOf("below") >= 0 || lbl.indexOf("low") >= 0) { colors.push("#1f78b4"); } else if (lbl.indexOf("normal") >= 0 || lbl.indexOf("within") >= 0) { colors.push("#33a02c"); } else { colors.push("#6a3d9a"); } }); console.log(colors); return colors; } */ return colorbrewer.Dark2[3]; } function buildHierarchyFromJSON(data, threshold) { var total = 0; var root = { "name": "root", "children": [] }; for (i = 0; i < data.percentPersons.length; i++) { total += data.percentPersons[i]; } for (var i = 0; i < data.conceptPath.length; i++) { var parts = data.conceptPath[i].split("||"); var currentNode = root; for (var j = 0; j < parts.length; j++) { var children = currentNode.children; var nodeName = parts[j]; var childNode; if (j + 1 < parts.length) { // Not yet at the end of the path; move down the tree. var foundChild = false; for (var k = 0; k < children.length; k++) { if (children[k].name === nodeName) { childNode = children[k]; foundChild = true; break; } } // If we don't already have a child node for this branch, create it. if (!foundChild) { childNode = { "name": nodeName, "children": [] }; children.push(childNode); } currentNode = childNode; } else { // Reached the end of the path; create a leaf node. childNode = { "name": nodeName, "num_persons": data.numPersons[i], "id": data.conceptId[i], "path": data.conceptPath[i], "pct_persons": data.percentPersons[i], "records_per_person": data.recordsPerPerson[i] }; // we only include nodes with sufficient size in the treemap display // sufficient size is configurable in the calculation of threshold // which is a function of the number of pixels in the treemap display if ((data.percentPersons[i] / total) > threshold) { children.push(childNode); } } } } return root; } // show the treemap $('#loading-text').text("Querying Database..."); $('#spinner-modal').modal('show'); var format_pct = d3.format('.2%'); var format_fixed = d3.format('.2f'); var format_comma = d3.format(','); $('#reportObservationOccurrences svg').remove(); var width = 1000; var height = 250; var minimum_area = 50; threshold = minimum_area / (width * height); $.ajax({ type: "GET", url: ObservationsRenderer.baseUrl + '/observation', contentType: "application/json; charset=utf-8", success: function (data) { $('#loading-text').text("Rendering Visualizations..."); var normalizedData = common.normalizeDataframe(common.normalizeArray(data, true)); data = normalizedData; if (!data.empty) { var table_data = normalizedData.conceptPath.map(function (d, i) { conceptDetails = this.conceptPath[i].split('||'); return { concept_id: this.conceptId[i], level_4: conceptDetails[0], level_3: conceptDetails[1], level_2: conceptDetails[2], observation_name: conceptDetails[3], num_persons: format_comma(this.numPersons[i]), percent_persons: format_pct(this.percentPersons[i]), records_per_person: format_fixed(this.recordsPerPerson[i]) }; }, data); datatable = $('#observation_table').DataTable({ order: [6, 'desc'], dom: 'T<"clear">lfrtip', data: table_data, columns: [ { data: 'concept_id' }, { data: 'level_4' }, { data: 'level_3', visible: false }, { data: 'level_2' }, { data: 'observation_name' }, { data: 'num_persons', className: 'numeric' }, { data: 'percent_persons', className: 'numeric' }, { data: 'records_per_person', className: 'numeric' } ], pageLength: 5, lengthChange: false, deferRender: true, destroy: true }); $('#reportObservationOccurrences').show(); tree = buildHierarchyFromJSON(data, threshold); var treemap = new jnj_chart.treemap(); treemap.render(tree, '#treemap_container', width, height, { onclick: function (node) { ObservationsRenderer.drilldown(node.id, node.name); }, getsizevalue: function (node) { return node.num_persons; }, getcolorvalue: function (node) { return node.records_per_person; }, getcolorrange: function() { return colorbrewer.Paired[3]; }, getcontent: function (node) { var result = '', steps = node.path.split('||'), i = steps.length - 1; result += '<div class="pathleaf">' + steps[i] + '</div>'; result += '<div class="pathleafstat">Prevalence: ' + format_pct(node.pct_persons) + '</div>'; result += '<div class="pathleafstat">Number of People: ' + format_comma(node.num_persons) + '</div>'; result += '<div class="pathleafstat">Records per Person: ' + format_fixed(node.records_per_person) + '</div>'; return result; }, gettitle: function (node) { var title = '', steps = node.path.split('||'); for (i = 0; i < steps.length - 1; i++) { title += ' <div class="pathstep">' + Array(i + 1).join('&nbsp;&nbsp') + steps[i] + ' </div>'; } return title; } }); $('[data-toggle="popover"]').popover(); } $('#spinner-modal').modal('hide'); }, error : function(data) { $('#spinner-modal').modal('hide'); } }); return ObservationsRenderer; }; return ObservationsRenderer; });
OHDSI/Olympus
src/main/webapp/Heracles/src/js/charts/observations.js
JavaScript
apache-2.0
25,745
import responses import unittest from unittest import skipIf from unittest.mock import mock_open, patch, ANY, call from docopt import docopt, DocoptExit from io import StringIO from collections import OrderedDict import sys import nio_cli.cli as cli from nio_cli.commands.base import Base from nio.block.terminals import input try: import niocore niocore_installed = True except: niocore_installed = False class TestCLI(unittest.TestCase): def parse_args(self, command): return docopt(cli.__doc__, command.split(' ')) def test_new_arguments(self): """'new' requires a project-name""" args = self.parse_args('new project') self.assertEqual(args['<project-name>'], 'project') with self.assertRaises(DocoptExit): self.parse_args('new') def test_buildpsec_arguments(self): """'buildspec' requires a repo-name""" args = self.parse_args('buildspec repo') self.assertEqual(args['<repo-name>'], 'repo') with self.assertRaises(DocoptExit): self.parse_args('buildspec') def test_buildreadme_arguments(self): """'buildreadme' take no args""" args = self.parse_args('buildreadme') with self.assertRaises(DocoptExit): self.parse_args('buildreadme some-args') def test_new_command(self): """Clone the project template from GitHub""" with patch('nio_cli.commands.new.os.path.isdir', return_value=True), \ patch('nio_cli.commands.new.subprocess.call') as call, \ patch('nio_cli.commands.new.config_project') as config: self._patched_new_command(call, config) def _patched_new_command(self, call, config): self._main('new', **{ '<project-name>': 'project', }) config.assert_called_once_with(name='project', niohost='127.0.0.1', nioport='8181', pubkeeper_hostname=None, pubkeeper_token=None, ssl=True, instance_id=None) self.assertEqual(call.call_args_list[0][0][0], ( 'git clone ' 'git://github.com/niolabs/project_template.git project' )) self.assertEqual(call.call_args_list[1][0][0], ( 'cd ./project ' '&& git submodule update --init --recursive' )) self.assertEqual(call.call_args_list[2][0][0], ( 'cd ./project ' '&& git remote remove origin ' '&& git commit --amend --reset-author --quiet -m "Initial commit"' )) def test_new_command_set_user(self): """Clone the project template from GitHub""" with patch('nio_cli.commands.new.os.path.isdir', return_value=True), \ patch('nio_cli.commands.new.subprocess.call') as call, \ patch('nio_cli.commands.new.set_user') as user, \ patch('nio_cli.commands.new.config_project') as config: self._patched_new_command_set_user(call, user, config) def _patched_new_command_set_user(self, call, user, config): self._main('new', **{ '<project-name>': 'project', '--username': 'new_user', '--password': 'new_password', }) user.assert_called_once_with('project', 'new_user', 'new_password', True) config.assert_called_once_with(name='project', niohost='127.0.0.1', nioport='8181', pubkeeper_hostname=None, pubkeeper_token=None, ssl=True, instance_id=None) self.assertEqual(call.call_args_list[0][0][0], ( 'git clone ' 'git://github.com/niolabs/project_template.git project' )) self.assertEqual(call.call_args_list[1][0][0], ( 'cd ./project ' '&& git submodule update --init --recursive' )) self.assertEqual(call.call_args_list[2][0][0], ( 'cd ./project ' '&& git remote remove origin ' '&& git commit --amend --reset-author --quiet -m "Initial commit"' )) def test_new_command_template(self): """Clone the project template from GitHub""" with patch('nio_cli.commands.new.os.path.isdir', return_value=True), \ patch('nio_cli.commands.new.subprocess.call') as call, \ patch('nio_cli.commands.new.config_project') as config: self._patched_new_command_template(call, config) def _patched_new_command_template(self, call, config): with patch('nio_cli.commands.new.os.walk') as patched_os_walk: join_module = 'nio_cli.commands.new.os.path.join' with patch(join_module, return_value='join'): patched_os_walk.return_value = [ ('root', ('dirs'), ['requirements.txt'])] self._main('new', **{ '<project-name>': 'project', '<template>': 'my_template', '--pubkeeper-hostname': 'pkhost', '--pubkeeper-token': 'pktoken', '--instance-id': 'abc-123', }) config.assert_called_once_with(name='project', niohost='127.0.0.1', nioport='8181', pubkeeper_hostname='pkhost', pubkeeper_token='pktoken', ssl=True, instance_id='abc-123') self.assertEqual(call.call_args_list[0][0][0], ( 'git clone ' 'git://github.com/niolabs/my_template.git project' )) self.assertEqual(call.call_args_list[1][0][0], ( 'cd ./project ' '&& git submodule update --init --recursive' )) self.assertEqual(call.call_args_list[2][0][0], ( [sys.executable, '-m', 'pip', 'install', '-r', 'join'] )) self.assertEqual(call.call_args_list[3][0][0], ( 'cd ./project ' '&& git remote remove origin ' '&& git commit --amend --reset-author --quiet ' '-m "Initial commit"' )) def test_new_command_with_failed_clone(self): """Cleanly handle new command when 'git clone' fails""" isdir_path = 'nio_cli.commands.new.os.path.isdir' with patch(isdir_path, return_value=False) as isdir, \ patch('nio_cli.commands.new.subprocess.call') as call: self._main('new', **{ '<project-name>': 'project', '--username': 'user', '--password': 'pwd' }) self.assertEqual(call.call_count, 1) isdir.assert_called_once_with('project') @responses.activate def test_add_command(self): """Clone specified blocks as submodules""" responses.add(responses.POST, 'http://127.0.0.1:8181/project/blocks') self._main('add', **{ '<block-repo>': ['block1'], '--project': '.' }) self.assertEqual(len(responses.calls), 1) self._main('add', **{ '<block-repo>': ['block1'], '--project': '.', '--upgrade': True }) self.assertEqual(len(responses.calls), 3) @responses.activate def test_list_command(self): """List blocks or services from the rest api""" service_response = [{'api': 'response'}, {'another': 'service'}] responses.add(responses.GET, 'http://127.0.0.1:8181/services', json=service_response) with patch('builtins.print') as print: self._main('list', **{ "services": True, '--username': 'user', '--password': 'pwd' }) self.assertEqual(len(responses.calls), 1) self.assertEqual(print.call_count, len(service_response)) for index, service in enumerate(service_response): self.assertDictEqual( print.call_args_list[index][0][0], service) @responses.activate def test_list_command_with_id(self): """List blocks or services from the rest api""" blk_response = {'id1': {'name': 'name1', 'id': 'id1'}, 'id2': {'name': 'name2', 'id': 'id2'}} responses.add(responses.GET, 'http://127.0.0.1:8181/blocks', json=blk_response) with patch('builtins.print') as mock_print: self._main('list', **{ "services": False, '--username': 'user', '--password': 'pwd' }) self.assertEqual(len(responses.calls), 1) self.assertEqual(mock_print.call_count, 2) call_args = [arg[0] for arg in mock_print.call_args_list] for blk in blk_response: # the order of responses is not guaranteed self.assertTrue( (blk_response[blk]['id'], blk_response[blk]['name']) in call_args) @responses.activate def test_shutdown_command(self): """Shutdown nio through the rest api""" responses.add(responses.GET, 'http://127.0.0.1:8181/shutdown') self._main('shutdown', **{ '--username': 'user', '--password': 'pwd' }) self.assertEqual(len(responses.calls), 1) @responses.activate def test_command_command(self): """Command a nio block through the rest api""" responses.add(responses.POST, 'http://127.0.0.1:8181/services/service/block/command') self._main('command', **{ '<command-name>': 'command', '<service-name>': 'service', '<block-name>': 'block', '--username': 'user', '--password': 'pwd' }) self.assertEqual(len(responses.calls), 1) def test_publishblock_command(self): """Create spec.json file from block class""" from nio.block.base import Block from nio.properties import StringProperty, VersionProperty from nio.command import command @command('commandit') @command('commander') @input("testInput") @input("testInput2") class SampleBlock1(Block): version = VersionProperty('1.2.3') str_prop = StringProperty( title='String Prop', default='default string', ) another = StringProperty( title='Another Prop', ) get_block_class_path = 'nio_cli.utils.spec._get_block_class' requests_path = 'nio_cli.commands.publishblock.requests' sample_spec = """ { "nio/SampleBlock1": { "description": "This is the description", "outputs": "The original output", "from_python": "myfile.SampleBlock1" } } """ sample_release = """ { "nio/SampleBlock1": { "language": "Python", "from_python": "myfile.SampleBlock1", "url": "git://myblock" } } """ with patch('builtins.open', new_callable=mock_open) as open_calls, \ patch(get_block_class_path) as mock_get_class, \ patch(requests_path) as mock_requests: open_calls.side_effect = [ mock_open(read_data=sample_spec).return_value, mock_open(read_data=sample_release).return_value ] # mocks to load existing spec.json and to discover blocks mock_get_class.return_value = SampleBlock1 # Exectute on repo 'myblocks' self._main('publishblock', **{ '--api-url': 'http://fake', '--api-token': 'token'}) mock_get_class.assert_called_with('myfile.SampleBlock1') self.maxDiff = None # One POST for spec and one for release self.assertEqual(mock_requests.post.call_count, 2) spec_call_args = mock_requests.post.call_args_list[0][1]['json'] release_call_args = mock_requests.post.call_args_list[1][1]['json'] self.assertDictEqual(spec_call_args, { 'nio/SampleBlock1': { 'description': 'This is the description', 'commands': { 'commander': {'params': {}}, 'commandit': {'params': {}} }, 'inputs': { 'testInput': {'description': ''}, 'testInput2': {'description': ''} }, 'outputs': 'The original output', # orig output preserved 'properties': { 'another': { 'default': None, 'title': 'Another Prop', 'type': 'StringType' }, 'str_prop': { 'default': 'default string', 'title': 'String Prop', 'type': 'StringType' } }, 'version': '1.2.0' # Make sure only major.minor } }) self.assertDictEqual(release_call_args, { 'nio/SampleBlock1': { "language": "Python", "version": "1.2.3", "url": "git://myblock" } }) @skipIf(not niocore_installed, 'niocore required for buildrelease') def test_buildrelease_command(self): """create release.json from block class""" from nio.block.base import Block from nio.properties import StringProperty, VersionProperty from nio.command import command @command('commandit') @command('commander') class SampleBlock1(Block): version = VersionProperty('0.1.0') str_prop = StringProperty( title='String Prop', default='default string', ) another = StringProperty( title='Another Prop', ) class SampleBlock2(Block): # if a block has no configured version prop, the version is 0.0.0 # by default pass discover_path = \ 'nio_cli.commands.buildrelease.Discover.discover_classes' json_dump_path = 'nio_cli.commands.buildrelease.json.dump' file_exists_path = 'nio_cli.commands.buildrelease.os.path.exists' subprocess_call_path = \ 'nio_cli.commands.buildrelease.subprocess.check_output' with patch(discover_path) as discover_classes, \ patch('builtins.open', mock_open()) as mock_file, \ patch(file_exists_path) as mock_file_exists, \ patch(json_dump_path) as mock_json_dump, \ patch(subprocess_call_path) as check_output: # mocks to load existing spec.json and to discover blocks mock_file_exists.return_value = True discover_classes.return_value = [SampleBlock1, SampleBlock2] check_output.return_value = \ b'origin git@github.com:niolabs/myblocks.git (fetch)' # Exectute on repo 'myblocks' self._main('buildrelease', **{'<repo-name>': 'myblocks'}) discover_classes.assert_called_once_with( 'blocks.myblocks', ANY, ANY) # json dump to file with formatting mock_json_dump.assert_called_once_with( { 'nio/SampleBlock2': { 'version': '0.0.0', 'language': 'Python', 'url': 'git://github.com/niolabs/myblocks.git' }, 'nio/SampleBlock1': { 'version': '0.1.0', 'language': 'Python', 'url': 'git://github.com/niolabs/myblocks.git'} }, mock_file(), indent=2, sort_keys=True) def test_newblock_command(self): """Clone the block template from GitHub""" with patch('nio_cli.commands.new.subprocess.call') as call, \ patch('builtins.open', mock_open( read_data='Example ..example_block TestExample') ) as mock_file, \ patch("nio_cli.commands.newblock.os") as os_mock, \ patch("nio_cli.commands.newblock.move") as move_mock: self._main('newblock', **{'<block-name>': 'yaba_daba'}) self.assertEqual(call.call_args_list[0][0][0], ( 'git clone ' 'git://github.com/nio-blocks/block_template.git yaba_daba' )) self.assertEqual(mock_file.call_args_list[0][0], ('./yaba_daba/yaba_daba_block.py',)) self.assertEqual( mock_file.return_value.write.call_args_list[0][0][0], 'YabaDaba ..example_block TestYabaDaba') # assert calls to rename block files self.assertEqual(os_mock.remove.call_count, 1) self.assertEqual(move_mock.call_count, 3) def test_blockcheck_command(self): self.maxDiff = None file_exists_path = 'nio_cli.commands.blockcheck.os.path.exists' getcwd_path = 'nio_cli.commands.blockcheck.os.getcwd' listdir_path = 'nio_cli.commands.blockcheck.os.listdir' subprocess_path = 'nio_cli.commands.blockcheck.subprocess.call' sys_exit_path = 'nio_cli.commands.blockcheck.sys.exit' print_path = 'nio_cli.commands.blockcheck.sys.stdout' json_load_path = 'nio_cli.commands.blockcheck.json.load' with patch('builtins.open', mock_open()) as mock_file, \ patch(file_exists_path) as mock_file_exists, \ patch(getcwd_path) as mock_getcwd, \ patch(listdir_path) as mock_listdir, \ patch(subprocess_path) as mock_subprocess_call, \ patch(sys_exit_path) as mock_sys_exit, \ patch(print_path, new_callable=StringIO) as mock_print, \ patch(json_load_path) as mock_json_load: mock_file_exists.return_value = True mock_getcwd.return_value = 'nio_lmnopio_block' mock_listdir.return_value = ['nio_lmnopio_block.py'] mock_json_load.side_effect = [ # json.load() for spec.json (prop1 missing description) { 'nio/nioLmnopio': { 'version': '0.1.0', 'description': 'spec description', 'properties': { 'prop1': { 'description': '' } }, 'inputs': {}, 'outputs': {}, 'commands': {}, } }, # json.load() for release.json (versions do not match) { 'nio/nioLmnopio': { 'language': 'Python', 'url': 'release url', 'version': '0.2.0', } } ] mock_file.return_value.readlines.side_effect = [ # .readlines() for nio_lmnopio_block.py [ 'class nioLmnopio(Block):', "version = VersionProperty('0.1.0')" ], # .readlines() for README.md (missing 'Outputs') [ 'nioLmnopio', 'Properties', 'Inputs', 'Commands', 'Dependencies' ] ] self._main('blockcheck') self.assertEqual( 'pycodestyle .', mock_subprocess_call.call_args_list[0][0][0]) # Check that print statements are run what_was_printed = mock_print.getvalue() self.assertIn('Checking PEP8 formatting ...', what_was_printed) self.assertIn('Checking spec.json formatting ...', what_was_printed) self.assertIn('Fill in the description for the "prop1" property ', what_was_printed) self.assertIn('in the nioLmnopio block', what_was_printed) self.assertIn('Checking README.md formatting ...', what_was_printed) self.assertIn('Add "Outputs" to the nioLmnopio block', what_was_printed) self.assertIn('Checking release.json formatting ...', what_was_printed) self.assertIn('Checking version formatting ...', what_was_printed) self.assertIn('The nioLmnopio version in the release file does not match ', what_was_printed) self.assertIn('the version in its block file', what_was_printed) self.assertIn('Spec.json and release.json versions do not match ', what_was_printed) self.assertIn('for nioLmnopio block', what_was_printed) self.assertIn('Checking class and file name formatting ...', what_was_printed) def test_add_user_command(self): """ Adds a user through the rest api""" with patch("nio_cli.commands.add_user.set_user") as set_user_patch: self._main('add_user', **{ '--project': 'testing_project', '<username>': 'user', '<password>': 'pwd' }) self.assertEqual(set_user_patch.call_count, 1) self.assertEqual(set_user_patch.call_args_list[0], call('testing_project', 'user', 'pwd')) from nio_cli.utils.users import set_user, _hash_password, \ _set_permissions with patch(set_user.__module__ + '.os') as mock_os, \ patch(set_user.__module__ + '.json') as mock_json, \ patch('builtins.open') as mock_open, \ patch('nio_cli.utils.users._hash_password') as mock_hash, \ patch('nio_cli.utils.users._set_permissions'): mock_os.path.isfile.return_value = True mock_hash.return_value = "AdminPwdHash" mock_json.load.return_value = {"Admin": "AdminPwd"} username = "user1" password = "pwd1" self._main('add_user', **{ '--project': 'testing_project', '<username>': username, '<password>': password }) # one call to read users.json and one to save users.json self.assertEqual(mock_open.call_count, 2) print(mock_json.dump.call_args_list) users, _ = mock_json.dump.call_args_list[0][0] self.assertIn(username, users) self.assertDictEqual(users[username], {"password": "AdminPwdHash"}) _set_permissions('testing_project', username, False) # make sure we open permissions.json two times # to read and write new permissions self.assertEqual(mock_open.call_count, 4) print(mock_json.dump.call_args_list) permissions, _ = mock_json.dump.call_args_list[0][0] self.assertIn(username, permissions) self.assertDictEqual(permissions[username], {".*": "rwx"}) def test_remove_user_command(self): """ Adds a user through the rest api""" with patch("nio_cli.commands.remove_user.remove_user") as \ remove_user_patch: self._main('remove_user', **{ '--project': 'testing_project', '<username>': 'user' }) self.assertEqual(remove_user_patch.call_count, 1) self.assertEqual(remove_user_patch.call_args_list[0], call('testing_project', 'user')) from nio_cli.commands.remove_user import RemoveUser, _remove_permission with patch(RemoveUser.__module__ + '.os') as mock_os, \ patch(RemoveUser.__module__ + '.json') as mock_json, \ patch('builtins.open') as mock_open, \ patch('nio_cli.commands.remove_user._remove_permission'): mock_os.path.isfile.return_value = True mock_json.load.return_value = {"Admin": "AdminPwd"} username = "Admin" self._main('remove_user', **{ '--project': 'testing_project', '<username>': username }) # one call to read users.json and one to save users.json self.assertEqual(mock_open.call_count, 2) users, _ = mock_json.dump.call_args_list[0][0] self.assertNotIn(username, users) self.assertEqual(len(users), 0) # make sure we open permissions.json two times # to read and write new permissions mock_json.load.return_value = {"Admin": {".*": "rwx"}} _remove_permission('testing_project', username) self.assertEqual(mock_open.call_count, 4) permissions, _ = mock_json.dump.call_args_list[0][0] self.assertNotIn(username, permissions) self.assertEqual(len(permissions), 0) def test_cleanup_host(self): cli_command = Base({}) self.assertEqual( cli_command._cleanup_host('localhost'), 'https://localhost') self.assertEqual( cli_command._cleanup_host('http://localhost'), 'http://localhost') self.assertEqual( cli_command._cleanup_host('https://localhost'), 'https://localhost') self.assertEqual( cli_command._cleanup_host('https://localhost:8181'), 'https://localhost:8181') self.assertEqual( cli_command._cleanup_host('https://localhost:8181/'), 'https://localhost:8181') def _main(self, command, **kwargs): args = { '--daemon': False, '--upgrade': False, '-u': False, '--template': False, '-t': False, } if command in ('new', 'config'): args['--ip'] = '127.0.0.1' args['--port'] = '8181' else: args['--instance-host'] = 'http://127.0.0.1:8181' args[command] = True for k, v in kwargs.items(): args[k] = v with patch('nio_cli.cli.docopt') as docopt: docopt.return_value = args cli.main()
nioinnovation/nio-cli
tests/test_cli.py
Python
apache-2.0
27,669
using System; using System.ComponentModel.Composition; using System.Diagnostics.CodeAnalysis; using System.Linq; using EnvDTE; using EnvDTE80; using Microsoft.VisualStudio.ComponentModelHost; namespace NuGet.VisualStudio { [Export(typeof(IFileSystemProvider))] public class VsFileSystemProvider : IFileSystemProvider { private readonly DTE _dte; private readonly IComponentModel _componentModel; private readonly ISettings _settings; public VsFileSystemProvider() : this(ServiceLocator.GetInstance<DTE>(), ServiceLocator.GetGlobalService<SComponentModel, IComponentModel>(), ServiceLocator.GetInstance<ISettings>()) { } public VsFileSystemProvider(DTE dte, IComponentModel componentModel, ISettings settings) { if (dte == null) { throw new ArgumentNullException("dte"); } if (componentModel == null) { throw new ArgumentNullException("componentModel"); } if (settings == null) { throw new ArgumentNullException("settings"); } _componentModel = componentModel; _dte = dte; _settings = settings; } public IFileSystem GetFileSystem(string path) { // Get the source control providers var physicalFileSystem = new PhysicalFileSystem(path); if (_settings.IsSourceControlDisabled()) { return physicalFileSystem; } var providers = _componentModel.GetExtensions<ISourceControlFileSystemProvider>(); // Get the repository path IFileSystem fileSystem = null; var sourceControl = (SourceControl2)_dte.SourceControl; if (providers.Any() && sourceControl != null) { SourceControlBindings binding = null; try { // Get the binding for this solution binding = sourceControl.GetBindings(_dte.Solution.FullName); } catch (NotImplementedException) { // Some source control providers don't bother to implement this. // TFS might be the only one using it } if (binding != null) { fileSystem = providers.Select(provider => GetFileSystemFromProvider(provider, path, binding)) .FirstOrDefault(fs => fs != null); } } return fileSystem ?? physicalFileSystem; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "We should never fail")] private static IFileSystem GetFileSystemFromProvider(ISourceControlFileSystemProvider provider, string path, SourceControlBindings binding) { try { return provider.GetFileSystem(path, binding); } catch (Exception exception) { ExceptionHelper.WriteToActivityLog(exception); // Ignore exceptions that can happen when some binaries are missing. e.g. TfsSourceControlFileSystemProvider // would throw a jitting error if TFS is not installed } return null; } } }
mdavid/nuget
src/VisualStudio/VsFileSystemProvider.cs
C#
apache-2.0
3,640
require 'spec_helper' describe 'duplicity::profile' do let(:title) { 'default' } let(:facts) { {:concat_basedir => '/path/to/dir'} } let(:default_config_file) { '/etc/duply/default/conf' } let(:default_filelist) { '/etc/duply/default/exclude' } describe 'by default' do let(:params) { {} } it { should contain_file('/etc/duply/default').with( 'ensure' => 'directory', 'owner' => 'root', 'group' => 'root', 'mode' => '0700' ) } it { should contain_file('/etc/duply/default/conf').with( 'ensure' => 'file', 'owner' => 'root', 'group' => 'root', 'mode' => '0400' ) } it { should contain_concat('/etc/duply/default/exclude').with( 'ensure' => 'present', 'owner' => 'root', 'group' => 'root', 'mode' => '0400' ) } it { should contain_concat('/etc/duply/default/pre').with( 'ensure' => 'present', 'owner' => 'root', 'group' => 'root', 'mode' => '0700' ) } it { should contain_concat('/etc/duply/default/post').with( 'ensure' => 'present', 'owner' => 'root', 'group' => 'root', 'mode' => '0700' ) } it { should contain_file(default_config_file).with_content(/^# GPG_KEY='disabled'/) } it { should contain_file(default_config_file).with_content(/^GPG_KEYS_ENC=''$/) } it { should contain_file(default_config_file).with_content(/^GPG_KEY_SIGN='disabled'$/) } it { should contain_file(default_config_file).with_content(/^GPG_PW=''$/) } it { should contain_file(default_config_file).with_content(/^GPG_OPTS=''$/) } it { should contain_file(default_config_file).with_content(/^TARGET_USER=''$/) } it { should contain_file(default_config_file).with_content(/^TARGET_PASS=''$/) } it { should contain_file(default_config_file).without_content(/^MAX_FULLBKP_AGE=.*$/) } it { should contain_file(default_config_file).with_content(/^VOLSIZE=50$/) } it { should contain_concat__fragment("#{default_filelist}/exclude-by-default").with_content(/^\n\- \*\*$/) } it { should_not contain_concat__fragment("#{default_filelist}/include") } it { should_not contain_concat__fragment("#{default_filelist}/exclude") } specify { should contain_cron("backup-default").with_ensure('absent') } specify { should contain_file(default_config_file).with_content(/^SOURCE='\/'$/) } specify { should contain_file(default_config_file).with_content(/^TARGET='\/default'$/) } end describe 'with ensure absent' do let(:params) { {:ensure => 'absent'} } it { should contain_file('/etc/duply/default').with_ensure('absent') } it { should contain_file('/etc/duply/default/conf').with_ensure('absent') } it { should contain_file('/etc/duply/default/exclude').with_ensure('absent') } it { should contain_file('/etc/duply/default/pre').with_ensure('absent') } it { should contain_file('/etc/duply/default/post').with_ensure('absent') } end describe 'with invalid ensure' do let(:params) { {:ensure => 'foobar'} } it do expect { should contain_file(default_config_file) }.to raise_error(Puppet::Error, /ensure/) end end describe 'with gpg_encryption => false' do let(:params) { {:gpg_encryption => false} } it { should contain_file(default_config_file).with_content(/^GPG_KEY='disabled'$/) } end describe 'with gpg_encryption => true' do let(:params) { {:gpg_encryption => true} } it { should contain_file(default_config_file).with_content(/^# GPG_KEY='disabled'/) } end describe 'with empty gpg_encryption_keys' do let(:params) { {:gpg_encryption_keys => ''} } it { should contain_file(default_config_file).with_content(/^GPG_KEYS_ENC=''$/) } end describe 'with gpg_encryption_keys => key1' do let(:params) { {:gpg_encryption_keys => 'key1'} } it { should contain_file(default_config_file).with_content(/^GPG_KEYS_ENC='key1'$/) } it { should contain_duplicity__public_key_link('default/key1') } end describe 'with gpg_encryption_keys => [key1]' do let(:params) { {:gpg_encryption_keys => ['key1']} } it { should contain_file(default_config_file).with_content(/^GPG_KEYS_ENC='key1'$/) } it { should contain_duplicity__public_key_link('default/key1') } end describe 'with gpg_encryption_keys => [key1,key2]' do let(:params) { {:gpg_encryption_keys => ['key1', 'key2']} } it { should contain_file(default_config_file).with_content(/^GPG_KEYS_ENC='key1,key2'$/) } it { should contain_duplicity__public_key_link('default/key1') } it { should contain_duplicity__public_key_link('default/key2') } end describe 'with invalid gpg_signing_key' do let(:params) { {:gpg_signing_key => 'invalid-key-id'} } it do expect { should contain_file(default_config_file) }.to raise_error(Puppet::Error, /signing_key/) end end describe 'with gpg_signing_key => key1' do let(:params) { {:gpg_signing_key => 'key1'} } it { should contain_file(default_config_file).with_content(/^GPG_KEY_SIGN='key1'$/) } it { should contain_duplicity__private_key_link('default/key1') } end describe 'with gpg_passphrase => secret' do let(:params) { {:gpg_passphrase => 'secret'} } it { should contain_file(default_config_file).with_content(/^GPG_PW='secret'$/) } end describe 'with empty gpg_options' do let(:params) { {:gpg_options => ''} } specify { should contain_file(default_config_file).with_content(/^GPG_OPTS=''$/) } end describe 'with gpg_options => --switch' do let(:params) { {:gpg_options => '--switch'} } specify { should contain_file(default_config_file).with_content(/^GPG_OPTS='--switch'$/) } end describe 'with gpg_options => [--switch]' do let(:params) { {:gpg_options => ['--switch']} } it { should contain_file(default_config_file).with_content(/^GPG_OPTS='--switch'$/) } end describe 'with gpg_options => [--switch, --key=value]' do let(:params) { {:gpg_options => ['--switch', '--key=value']} } it { should contain_file(default_config_file).with_content(/^GPG_OPTS='--switch --key=value'$/) } end describe 'with empty source' do let(:params) { {:source => '' } } it do expect { should contain_file(default_config_file) }.to raise_error(Puppet::Error, /source/) end end describe 'with source => /path/of/source' do let(:params) { {:source => '/path/of/source', } } it { should contain_file(default_config_file).with_content(/^SOURCE='\/path\/of\/source'$/) } end describe 'with empty target' do let(:params) { {:target => '', } } it do expect { should contain_file(default_config_file) }.to raise_error(Puppet::Error, /target/) end end describe 'with target => http://example.com' do let(:params) { {:target => 'http://example.com', } } it { should contain_file(default_config_file).with_content(/^TARGET='http:\/\/example.com'$/) } end describe 'with target_username => johndoe' do let(:params) { {:target_username => 'johndoe'} } it { should contain_file(default_config_file).with_content(/^TARGET_USER='johndoe'$/) } end describe 'with target_password => secret' do let(:params) { {:target_password => 'secret'} } it { should contain_file(default_config_file).with_content(/^TARGET_PASS='secret'$/) } end describe 'should accept max_full_backups as integer' do let(:params) { {:max_full_backups => 5} } it { should contain_file(default_config_file).with_content(/^MAX_FULL_BACKUPS=5$/) } end describe 'should accept max_full_backups as string' do let(:params) { {:max_full_backups => '5'} } it { should contain_file(default_config_file).with_content(/^MAX_FULL_BACKUPS=5$/) } end describe 'should not accept any string as max_full_backups' do let(:params) { {:max_full_backups => 'invalid'} } specify { expect { should contain_file(default_config_file) }.to raise_error(Puppet::Error, /max_full_backups/) } end describe 'with full_if_older_than => 1M' do let(:params) { {:full_if_older_than => '1M'} } it { should contain_file(default_config_file).with_content(/^MAX_FULLBKP_AGE=1M$/) } it { should contain_file(default_config_file).with_content(/^DUPL_PARAMS="\$DUPL_PARAMS --full-if-older-than \$MAX_FULLBKP_AGE "$/) } end describe 'with invalid volsize' do let(:params) { {:volsize => 'invalid'} } specify { expect { should contain_file(default_config_file) }.to raise_error(Puppet::Error, /volsize/) } end describe 'with volsize => 25' do let(:params) { {:volsize => 25} } it { should contain_file(default_config_file).with_content(/^VOLSIZE=25$/) } it { should contain_file(default_config_file).with_content(/^DUPL_PARAMS="\$DUPL_PARAMS --volsize \$VOLSIZE "$/) } end describe 'with include_files => "/a/b"' do let(:params) { {:include_filelist => ['/a/b']} } it { should contain_concat__fragment("#{default_filelist}/include").with_content(/^\+ \/a\/b$/) } end describe 'with invalid include_filelist' do let(:params) { {:include_filelist => 'invalid'} } specify { expect { should contain_concat__fragment("#{default_filelist}/include") }.to raise_error(Puppet::Error, /include_filelist/) } end describe 'with exclude_files => "/a/b"' do let(:params) { {:exclude_filelist => ['/a/b']} } it { should contain_concat__fragment("#{default_filelist}/exclude").with_content(/^\- \/a\/b$/) } end describe 'with invalid exclude_filelist' do let(:params) { {:exclude_filelist => 'invalid'} } specify { expect { should contain_concat__fragment("#{default_filelist}/exclude") }.to raise_error(Puppet::Error, /exclude_filelist/) } end describe 'with exclude_by_default => false' do let(:params) { {:exclude_by_default => false} } it { should contain_concat__fragment("#{default_filelist}/exclude-by-default").with_ensure('absent') } end describe 'with cron_enabled and cron_hour and cron_minute set' do let(:params) { {:cron_enabled => true, :cron_hour => '1', :cron_minute => '2'} } specify do should contain_cron("backup-default").with( 'ensure' => 'present', 'hour' => '1', 'minute' => '2' ) end end end
adamcrews/puppet-duplicity
spec/defines/profile_spec.rb
Ruby
apache-2.0
10,421
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.prepare; import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.materialize.Lattice; import org.apache.calcite.plan.RelOptSchema; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelDistribution; import org.apache.calcite.rel.RelDistributionTraitDef; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelReferentialConstraint; import org.apache.calcite.rel.logical.LogicalTableScan; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rel.type.RelProtoDataType; import org.apache.calcite.rel.type.RelRecordType; import org.apache.calcite.schema.ColumnStrategy; import org.apache.calcite.schema.FilterableTable; import org.apache.calcite.schema.ModifiableTable; import org.apache.calcite.schema.Path; import org.apache.calcite.schema.ProjectableFilterableTable; import org.apache.calcite.schema.QueryableTable; import org.apache.calcite.schema.ScannableTable; import org.apache.calcite.schema.Schema; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.SchemaVersion; import org.apache.calcite.schema.Schemas; import org.apache.calcite.schema.StreamableTable; import org.apache.calcite.schema.Table; import org.apache.calcite.schema.TemporalTable; import org.apache.calcite.schema.TranslatableTable; import org.apache.calcite.schema.Wrapper; import org.apache.calcite.sql.SqlAccessType; import org.apache.calcite.sql.validate.SqlModality; import org.apache.calcite.sql.validate.SqlMonotonicity; import org.apache.calcite.sql2rel.InitializerExpressionFactory; import org.apache.calcite.sql2rel.NullInitializerExpressionFactory; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; import com.google.common.collect.ImmutableList; import java.util.AbstractList; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.function.Function; /** * Implementation of {@link org.apache.calcite.plan.RelOptTable}. */ public class RelOptTableImpl extends Prepare.AbstractPreparingTable { private final RelOptSchema schema; private final RelDataType rowType; private final Table table; private final Function<Class, Expression> expressionFunction; private final ImmutableList<String> names; /** Estimate for the row count, or null. * * <p>If not null, overrides the estimate from the actual table. * * <p>Useful when a table that contains a materialized query result is being * used to replace a query expression that wildly underestimates the row * count. Now the materialized table can tell the same lie. */ private final Double rowCount; private RelOptTableImpl( RelOptSchema schema, RelDataType rowType, List<String> names, Table table, Function<Class, Expression> expressionFunction, Double rowCount) { this.schema = schema; this.rowType = Objects.requireNonNull(rowType); this.names = ImmutableList.copyOf(names); this.table = table; // may be null this.expressionFunction = expressionFunction; // may be null this.rowCount = rowCount; // may be null } public static RelOptTableImpl create( RelOptSchema schema, RelDataType rowType, List<String> names, Expression expression) { return new RelOptTableImpl(schema, rowType, names, null, c -> expression, null); } public static RelOptTableImpl create( RelOptSchema schema, RelDataType rowType, List<String> names, Table table, Expression expression) { return new RelOptTableImpl(schema, rowType, names, table, c -> expression, table.getStatistic().getRowCount()); } public static RelOptTableImpl create(RelOptSchema schema, RelDataType rowType, Table table, Path path) { final SchemaPlus schemaPlus = MySchemaPlus.create(path); return new RelOptTableImpl(schema, rowType, Pair.left(path), table, getClassExpressionFunction(schemaPlus, Util.last(path).left, table), table.getStatistic().getRowCount()); } public static RelOptTableImpl create(RelOptSchema schema, RelDataType rowType, final CalciteSchema.TableEntry tableEntry, Double rowCount) { final Table table = tableEntry.getTable(); return new RelOptTableImpl(schema, rowType, tableEntry.path(), table, getClassExpressionFunction(tableEntry, table), rowCount); } /** * Creates a copy of this RelOptTable. The new RelOptTable will have newRowType. */ public RelOptTableImpl copy(RelDataType newRowType) { return new RelOptTableImpl(this.schema, newRowType, this.names, this.table, this.expressionFunction, this.rowCount); } @Override public String toString() { return "RelOptTableImpl{" + "schema=" + schema + ", names= " + names + ", table=" + table + ", rowType=" + rowType + '}'; } private static Function<Class, Expression> getClassExpressionFunction( CalciteSchema.TableEntry tableEntry, Table table) { return getClassExpressionFunction(tableEntry.schema.plus(), tableEntry.name, table); } private static Function<Class, Expression> getClassExpressionFunction( final SchemaPlus schema, final String tableName, final Table table) { if (table instanceof QueryableTable) { final QueryableTable queryableTable = (QueryableTable) table; return clazz -> queryableTable.getExpression(schema, tableName, clazz); } else if (table instanceof ScannableTable || table instanceof FilterableTable || table instanceof ProjectableFilterableTable) { return clazz -> Schemas.tableExpression(schema, Object[].class, tableName, table.getClass()); } else if (table instanceof StreamableTable) { return getClassExpressionFunction(schema, tableName, ((StreamableTable) table).stream()); } else { return input -> { throw new UnsupportedOperationException(); }; } } public static RelOptTableImpl create(RelOptSchema schema, RelDataType rowType, Table table, ImmutableList<String> names) { assert table instanceof TranslatableTable || table instanceof ScannableTable || table instanceof ModifiableTable; return new RelOptTableImpl(schema, rowType, names, table, null, null); } public <T> T unwrap(Class<T> clazz) { if (clazz.isInstance(this)) { return clazz.cast(this); } if (clazz.isInstance(table)) { return clazz.cast(table); } if (table instanceof Wrapper) { final T t = ((Wrapper) table).unwrap(clazz); if (t != null) { return t; } } if (clazz == CalciteSchema.class) { return clazz.cast( Schemas.subSchema(((CalciteCatalogReader) schema).rootSchema, Util.skipLast(getQualifiedName()))); } return null; } public Expression getExpression(Class clazz) { if (expressionFunction == null) { return null; } return expressionFunction.apply(clazz); } @Override protected RelOptTable extend(Table extendedTable) { final RelDataType extendedRowType = extendedTable.getRowType(getRelOptSchema().getTypeFactory()); return new RelOptTableImpl(getRelOptSchema(), extendedRowType, getQualifiedName(), extendedTable, expressionFunction, getRowCount()); } @Override public boolean equals(Object obj) { return obj instanceof RelOptTableImpl && this.rowType.equals(((RelOptTableImpl) obj).getRowType()) && this.table == ((RelOptTableImpl) obj).table; } @Override public int hashCode() { return (this.table == null) ? super.hashCode() : this.table.hashCode(); } public double getRowCount() { if (rowCount != null) { return rowCount; } if (table != null) { final Double rowCount = table.getStatistic().getRowCount(); if (rowCount != null) { return rowCount; } } return 100d; } public RelOptSchema getRelOptSchema() { return schema; } public RelNode toRel(ToRelContext context) { // Make sure rowType's list is immutable. If rowType is DynamicRecordType, creates a new // RelOptTable by replacing with immutable RelRecordType using the same field list. if (this.getRowType().isDynamicStruct()) { final RelDataType staticRowType = new RelRecordType(getRowType().getFieldList()); final RelOptTable relOptTable = this.copy(staticRowType); return relOptTable.toRel(context); } // If there are any virtual columns, create a copy of this table without // those virtual columns. final List<ColumnStrategy> strategies = getColumnStrategies(); if (strategies.contains(ColumnStrategy.VIRTUAL)) { final RelDataTypeFactory.Builder b = context.getCluster().getTypeFactory().builder(); for (RelDataTypeField field : rowType.getFieldList()) { if (strategies.get(field.getIndex()) != ColumnStrategy.VIRTUAL) { b.add(field.getName(), field.getType()); } } final RelOptTable relOptTable = new RelOptTableImpl(this.schema, b.build(), this.names, this.table, this.expressionFunction, this.rowCount) { @Override public <T> T unwrap(Class<T> clazz) { if (clazz.isAssignableFrom(InitializerExpressionFactory.class)) { return clazz.cast(NullInitializerExpressionFactory.INSTANCE); } return super.unwrap(clazz); } }; return relOptTable.toRel(context); } if (table instanceof TranslatableTable) { return ((TranslatableTable) table).toRel(context, this); } return LogicalTableScan.create(context.getCluster(), this, context.getTableHints()); } public List<RelCollation> getCollationList() { if (table != null) { return table.getStatistic().getCollations(); } return ImmutableList.of(); } public RelDistribution getDistribution() { if (table != null) { return table.getStatistic().getDistribution(); } return RelDistributionTraitDef.INSTANCE.getDefault(); } public boolean isKey(ImmutableBitSet columns) { if (table != null) { return table.getStatistic().isKey(columns); } return false; } public List<ImmutableBitSet> getKeys() { return table.getStatistic().getKeys(); } public List<RelReferentialConstraint> getReferentialConstraints() { if (table != null) { return table.getStatistic().getReferentialConstraints(); } return ImmutableList.of(); } public RelDataType getRowType() { return rowType; } public boolean supportsModality(SqlModality modality) { switch (modality) { case STREAM: return table instanceof StreamableTable; default: return !(table instanceof StreamableTable); } } @Override public boolean isTemporal() { return table instanceof TemporalTable; } public List<String> getQualifiedName() { return names; } public SqlMonotonicity getMonotonicity(String columnName) { for (RelCollation collation : table.getStatistic().getCollations()) { final RelFieldCollation fieldCollation = collation.getFieldCollations().get(0); final int fieldIndex = fieldCollation.getFieldIndex(); if (fieldIndex < rowType.getFieldCount() && rowType.getFieldNames().get(fieldIndex).equals(columnName)) { return fieldCollation.direction.monotonicity(); } } return SqlMonotonicity.NOT_MONOTONIC; } public SqlAccessType getAllowedAccess() { return SqlAccessType.ALL; } /** Helper for {@link #getColumnStrategies()}. */ public static List<ColumnStrategy> columnStrategies(final RelOptTable table) { final int fieldCount = table.getRowType().getFieldCount(); final InitializerExpressionFactory ief = Util.first(table.unwrap(InitializerExpressionFactory.class), NullInitializerExpressionFactory.INSTANCE); return new AbstractList<ColumnStrategy>() { public int size() { return fieldCount; } public ColumnStrategy get(int index) { return ief.generationStrategy(table, index); } }; } /** Converts the ordinal of a field into the ordinal of a stored field. * That is, it subtracts the number of virtual fields that come before it. */ public static int realOrdinal(final RelOptTable table, int i) { List<ColumnStrategy> strategies = table.getColumnStrategies(); int n = 0; for (int j = 0; j < i; j++) { switch (strategies.get(j)) { case VIRTUAL: ++n; } } return i - n; } /** Returns the row type of a table after any {@link ColumnStrategy#VIRTUAL} * columns have been removed. This is the type of the records that are * actually stored. */ public static RelDataType realRowType(RelOptTable table) { final RelDataType rowType = table.getRowType(); final List<ColumnStrategy> strategies = columnStrategies(table); if (!strategies.contains(ColumnStrategy.VIRTUAL)) { return rowType; } final RelDataTypeFactory.Builder builder = table.getRelOptSchema().getTypeFactory().builder(); for (RelDataTypeField field : rowType.getFieldList()) { if (strategies.get(field.getIndex()) != ColumnStrategy.VIRTUAL) { builder.add(field); } } return builder.build(); } /** Implementation of {@link SchemaPlus} that wraps a regular schema and knows * its name and parent. * * <p>It is read-only, and functionality is limited in other ways, it but * allows table expressions to be generated. */ private static class MySchemaPlus implements SchemaPlus { private final SchemaPlus parent; private final String name; private final Schema schema; MySchemaPlus(SchemaPlus parent, String name, Schema schema) { this.parent = parent; this.name = name; this.schema = schema; } public static MySchemaPlus create(Path path) { final Pair<String, Schema> pair = Util.last(path); final SchemaPlus parent; if (path.size() == 1) { parent = null; } else { parent = create(path.parent()); } return new MySchemaPlus(parent, pair.left, pair.right); } @Override public SchemaPlus getParentSchema() { return parent; } @Override public String getName() { return name; } @Override public SchemaPlus getSubSchema(String name) { final Schema subSchema = schema.getSubSchema(name); return subSchema == null ? null : new MySchemaPlus(this, name, subSchema); } @Override public SchemaPlus add(String name, Schema schema) { throw new UnsupportedOperationException(); } @Override public void add(String name, Table table) { throw new UnsupportedOperationException(); } @Override public void add(String name, org.apache.calcite.schema.Function function) { throw new UnsupportedOperationException(); } @Override public void add(String name, RelProtoDataType type) { throw new UnsupportedOperationException(); } @Override public void add(String name, Lattice lattice) { throw new UnsupportedOperationException(); } @Override public boolean isMutable() { return schema.isMutable(); } @Override public <T> T unwrap(Class<T> clazz) { return null; } @Override public void setPath(ImmutableList<ImmutableList<String>> path) { throw new UnsupportedOperationException(); } @Override public void setCacheEnabled(boolean cache) { throw new UnsupportedOperationException(); } @Override public boolean isCacheEnabled() { return false; } @Override public Table getTable(String name) { return schema.getTable(name); } @Override public Set<String> getTableNames() { return schema.getTableNames(); } @Override public RelProtoDataType getType(String name) { return schema.getType(name); } @Override public Set<String> getTypeNames() { return schema.getTypeNames(); } @Override public Collection<org.apache.calcite.schema.Function> getFunctions(String name) { return schema.getFunctions(name); } @Override public Set<String> getFunctionNames() { return schema.getFunctionNames(); } @Override public Set<String> getSubSchemaNames() { return schema.getSubSchemaNames(); } @Override public Expression getExpression(SchemaPlus parentSchema, String name) { return schema.getExpression(parentSchema, name); } @Override public Schema snapshot(SchemaVersion version) { throw new UnsupportedOperationException(); } } }
vlsi/calcite
core/src/main/java/org/apache/calcite/prepare/RelOptTableImpl.java
Java
apache-2.0
17,920
/** * Copyright (c) 2013-2020 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.io.IOException; import java.io.Serializable; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * This is a simple HashMap based in-memory implementation of the AdapterStore and can be useful if * it is undesirable to persist and query objects within another storage mechanism such as an * Accumulo table. */ public class MemoryAdapterStore implements TransientAdapterStore, Serializable { /** */ private static final long serialVersionUID = 1L; private Map<String, DataTypeAdapter<?>> adapterMap; public MemoryAdapterStore() { adapterMap = Collections.synchronizedMap(new HashMap<String, DataTypeAdapter<?>>()); } public MemoryAdapterStore(final DataTypeAdapter<?>[] adapters) { adapterMap = Collections.synchronizedMap(new HashMap<String, DataTypeAdapter<?>>()); for (final DataTypeAdapter<?> adapter : adapters) { adapterMap.put(adapter.getTypeName(), adapter); } } @Override public void addAdapter(final DataTypeAdapter<?> adapter) { adapterMap.put(adapter.getTypeName(), adapter); } @Override public DataTypeAdapter<?> getAdapter(final String typeName) { return adapterMap.get(typeName); } @Override public boolean adapterExists(final String typeName) { return adapterMap.containsKey(typeName); } @Override public DataTypeAdapter<?>[] getAdapters() { return adapterMap.values().toArray(new DataTypeAdapter[adapterMap.size()]); } @Override public void removeAll() { adapterMap.clear(); } private void writeObject(final java.io.ObjectOutputStream out) throws IOException { final int count = adapterMap.size(); out.writeInt(count); for (final Map.Entry<String, DataTypeAdapter<?>> entry : adapterMap.entrySet()) { out.writeUTF(entry.getKey()); final byte[] val = PersistenceUtils.toBinary(entry.getValue()); out.writeObject(val); } } private void readObject(final java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { final int count = in.readInt(); adapterMap = Collections.synchronizedMap(new HashMap<String, DataTypeAdapter<?>>()); for (int i = 0; i < count; i++) { final String id = in.readUTF(); final byte[] data = (byte[]) in.readObject(); adapterMap.put(id, (DataTypeAdapter<?>) PersistenceUtils.fromBinary(data)); } } @Override public void removeAdapter(final String typeName) { adapterMap.remove(typeName); } }
locationtech/geowave
core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryAdapterStore.java
Java
apache-2.0
3,177
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.metainject; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Counter; import org.pentaho.di.core.ObjectLocationSpecificationMethod; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryDirectoryInterface; import org.pentaho.di.repository.StringObjectId; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.w3c.dom.Node; /** * @since 2007-07-05 * @author matt * @version 3.0 */ public class MetaInjectMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = MetaInjectMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ // description of the transformation to execute... // private String transName; private String fileName; private String directoryPath; private ObjectId transObjectId; private ObjectLocationSpecificationMethod specificationMethod; private String sourceStepName; private Map<TargetStepAttribute, SourceStepField> targetSourceMapping; public MetaInjectMeta() { super(); // allocate BaseStepMeta specificationMethod=ObjectLocationSpecificationMethod.FILENAME; targetSourceMapping = new HashMap<TargetStepAttribute, SourceStepField>(); } public Object clone() { Object retval = super.clone(); return retval; } public void setDefault() { } public String getXML() { StringBuffer retval = new StringBuffer(500); retval.append(" ").append(XMLHandler.addTagValue("specification_method", specificationMethod == null ? null : specificationMethod.getCode())); retval.append(" ").append(XMLHandler.addTagValue("trans_object_id", transObjectId == null ? null : transObjectId.toString())); retval.append(" ").append(XMLHandler.addTagValue("trans_name", transName)); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("filename", fileName)); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("directory_path", directoryPath)); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("source_step", sourceStepName)); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.openTag("mappings")); for (TargetStepAttribute target : targetSourceMapping.keySet()) { retval.append(" ").append(XMLHandler.openTag("mapping")); SourceStepField source = targetSourceMapping.get(target); retval.append(" ").append(XMLHandler.addTagValue("target_step_name", target.getStepname())); retval.append(" ").append(XMLHandler.addTagValue("target_attribute_key", target.getAttributeKey())); retval.append(" ").append(XMLHandler.addTagValue("target_detail", target.isDetail())); retval.append(" ").append(XMLHandler.addTagValue("source_step", source.getStepname())); retval.append(" ").append(XMLHandler.addTagValue("source_field", source.getField())); retval.append(" ").append(XMLHandler.closeTag("mapping")); } retval.append(" ").append(XMLHandler.closeTag("mappings")); return retval.toString(); } public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException { try { String method = XMLHandler.getTagValue(stepnode, "specification_method"); specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method); String transId = XMLHandler.getTagValue(stepnode, "trans_object_id"); transObjectId = Const.isEmpty(transId) ? null : new StringObjectId(transId); transName = XMLHandler.getTagValue(stepnode, "trans_name"); //$NON-NLS-1$ fileName = XMLHandler.getTagValue(stepnode, "filename"); //$NON-NLS-1$ directoryPath = XMLHandler.getTagValue(stepnode, "directory_path"); //$NON-NLS-1$ sourceStepName = XMLHandler.getTagValue(stepnode, "source_step"); //$NON-NLS-1$ Node mappingsNode = XMLHandler.getSubNode(stepnode, "mappings"); int nrMappings = XMLHandler.countNodes(mappingsNode, "mapping"); for (int i=0;i<nrMappings;i++) { Node mappingNode = XMLHandler.getSubNodeByNr(mappingsNode, "mapping", i); String targetStepname = XMLHandler.getTagValue(mappingNode, "target_step_name"); String targetAttributeKey = XMLHandler.getTagValue(mappingNode, "target_attribute_key"); boolean targetDetail = "Y".equalsIgnoreCase(XMLHandler.getTagValue(mappingNode, "target_detail")); String sourceStepname = XMLHandler.getTagValue(mappingNode, "source_step"); String sourceField = XMLHandler.getTagValue(mappingNode, "source_field"); TargetStepAttribute target = new TargetStepAttribute(targetStepname, targetAttributeKey, targetDetail); SourceStepField source = new SourceStepField(sourceStepname, sourceField); targetSourceMapping.put(target, source); } } catch (Exception e) { throw new KettleXMLException("Unable to load step info from XML", e); } } public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { String method = rep.getStepAttributeString(id_step, "specification_method"); specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method); String transId = rep.getStepAttributeString(id_step, "trans_object_id"); transObjectId = Const.isEmpty(transId) ? null : new StringObjectId(transId); transName = rep.getStepAttributeString(id_step, "trans_name"); //$NON-NLS-1$ fileName = rep.getStepAttributeString(id_step, "filename"); //$NON-NLS-1$ directoryPath = rep.getStepAttributeString(id_step, "directory_path"); //$NON-NLS-1$ sourceStepName = rep.getStepAttributeString(id_step, "source_step"); //$NON-NLS-1$ int nrMappings = rep.countNrStepAttributes(id_step, "mapping_target_step_name"); for (int i=0;i<nrMappings;i++) { String targetStepname = rep.getStepAttributeString(id_step, i, "mapping_target_step_name"); String targetAttributeKey = rep.getStepAttributeString(id_step, i, "mapping_target_attribute_key"); boolean targetDetail = rep.getStepAttributeBoolean(id_step, i, "mapping_target_detail"); String sourceStepname = rep.getStepAttributeString(id_step, i, "mapping_source_step"); String sourceField = rep.getStepAttributeString(id_step, i, "mapping_source_field"); TargetStepAttribute target = new TargetStepAttribute(targetStepname, targetAttributeKey, targetDetail); SourceStepField source = new SourceStepField(sourceStepname, sourceField); targetSourceMapping.put(target, source); } } catch (Exception e) { throw new KettleException("Unexpected error reading step information from the repository", e); } } public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "specification_method", specificationMethod==null ? null : specificationMethod.getCode()); rep.saveStepAttribute(id_transformation, id_step, "trans_object_id", transObjectId==null ? null : transObjectId.toString()); rep.saveStepAttribute(id_transformation, id_step, "filename", fileName); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "trans_name", transName); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "directory_path", directoryPath); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "source_step", sourceStepName); //$NON-NLS-1$ List<TargetStepAttribute> keySet = new ArrayList<TargetStepAttribute>(targetSourceMapping.keySet()); for (int i=0;i<keySet.size();i++) { TargetStepAttribute target = keySet.get(i); SourceStepField source = targetSourceMapping.get(target); rep.saveStepAttribute(id_transformation, id_step, i, "mapping_target_step_name", target.getStepname()); rep.saveStepAttribute(id_transformation, id_step, i, "mapping_target_attribute_key", target.getAttributeKey()); rep.saveStepAttribute(id_transformation, id_step, i, "mapping_target_detail", target.isDetail()); rep.saveStepAttribute(id_transformation, id_step, i, "mapping_source_step", source.getStepname()); rep.saveStepAttribute(id_transformation, id_step, i, "mapping_source_field", source.getField()); } } catch (Exception e) { throw new KettleException("Unable to save step information to the repository for id_step=" + id_step, e); } } public void getFields(RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException { rowMeta.clear(); // No defined output is expected from this step. } public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans) { return new MetaInject(stepMeta, stepDataInterface, cnr, tr, trans); } public StepDataInterface getStepData() { return new MetaInjectData(); } public Map<TargetStepAttribute, SourceStepField> getTargetSourceMapping() { return targetSourceMapping; } public void setTargetSourceMapping(Map<TargetStepAttribute, SourceStepField> targetSourceMapping) { this.targetSourceMapping = targetSourceMapping; } @Override public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info) { // TODO Auto-generated method stub } /** * @return the transName */ public String getTransName() { return transName; } /** * @param transName the transName to set */ public void setTransName(String transName) { this.transName = transName; } /** * @return the fileName */ public String getFileName() { return fileName; } /** * @param fileName the fileName to set */ public void setFileName(String fileName) { this.fileName = fileName; } /** * @return the directoryPath */ public String getDirectoryPath() { return directoryPath; } /** * @param directoryPath the directoryPath to set */ public void setDirectoryPath(String directoryPath) { this.directoryPath = directoryPath; } /** * @return the transObjectId */ public ObjectId getTransObjectId() { return transObjectId; } /** * @param transObjectId the transObjectId to set */ public void setTransObjectId(ObjectId transObjectId) { this.transObjectId = transObjectId; } /** * @return the specificationMethod */ public ObjectLocationSpecificationMethod getSpecificationMethod() { return specificationMethod; } /** * @param specificationMethod the specificationMethod to set */ public void setSpecificationMethod(ObjectLocationSpecificationMethod specificationMethod) { this.specificationMethod = specificationMethod; } public synchronized static final TransMeta loadTransformationMeta(MetaInjectMeta mappingMeta, Repository rep, VariableSpace space) throws KettleException { TransMeta mappingTransMeta = null; switch(mappingMeta.getSpecificationMethod()) { case FILENAME: String realFilename = space.environmentSubstitute(mappingMeta.getFileName()); try { // OK, load the meta-data from file... // // Don't set internal variables: they belong to the parent thread! // mappingTransMeta = new TransMeta(realFilename, false); mappingTransMeta.getLogChannel().logDetailed("Loading Mapping from repository", "Mapping transformation was loaded from XML file [" + realFilename + "]"); } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "MetaInjectMeta.Exception.UnableToLoadTransformationFromFile", realFilename), e); } break; case REPOSITORY_BY_NAME: String realTransname = space.environmentSubstitute(mappingMeta.getTransName()); String realDirectory = space.environmentSubstitute(mappingMeta.getDirectoryPath()); if (!Const.isEmpty(realTransname) && !Const.isEmpty(realDirectory) && rep != null) { RepositoryDirectoryInterface repdir = rep.findDirectory(realDirectory); if (repdir != null) { try { // reads the last revision in the repository... // mappingTransMeta = rep.loadTransformation(realTransname, repdir, null, true, null); mappingTransMeta.getLogChannel().logDetailed("Loading Mapping from repository", "Mapping transformation [" + realTransname + "] was loaded from the repository"); } catch (Exception e) { throw new KettleException("Unable to load transformation [" + realTransname + "]", e); } } else { throw new KettleException(BaseMessages.getString(PKG, "MetaInjectMeta.Exception.UnableToLoadTransformationFromRepository", realTransname, realDirectory)); //$NON-NLS-1$ //$NON-NLS-2$ } } break; case REPOSITORY_BY_REFERENCE: // Read the last revision by reference... mappingTransMeta = rep.loadTransformation(mappingMeta.getTransObjectId(), null); break; } // Pass some important information to the mapping transformation metadata: // mappingTransMeta.copyVariablesFrom(space); mappingTransMeta.setRepository(rep); mappingTransMeta.setFilename(mappingTransMeta.getFilename()); return mappingTransMeta; } @Override public boolean excludeFromCopyDistributeVerification() { return true; } @Override public boolean excludeFromRowLayoutVerification() { return true; } /** * @return the sourceStepName */ public String getSourceStepName() { return sourceStepName; } /** * @param sourceStepName the sourceStepName to set */ public void setSourceStepName(String sourceStepName) { this.sourceStepName = sourceStepName; } }
lihongqiang/kettle-4.4.0-stable
src/org/pentaho/di/trans/steps/metainject/MetaInjectMeta.java
Java
apache-2.0
16,453
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "instruction_set.h" // Explicitly include our own elf.h to avoid Linux and other dependencies. #include "../elf.h" #include "globals.h" namespace art { const char* GetInstructionSetString(const InstructionSet isa) { switch (isa) { case kArm: case kThumb2: return "arm"; case kArm64: return "arm64"; case kX86: return "x86"; case kX86_64: return "x86_64"; case kMips: return "mips"; case kMips64: return "mips64"; case kNone: return "none"; default: LOG(FATAL) << "Unknown ISA " << isa; UNREACHABLE(); } } InstructionSet GetInstructionSetFromString(const char* isa_str) { CHECK(isa_str != nullptr); if (strcmp("arm", isa_str) == 0) { return kArm; } else if (strcmp("arm64", isa_str) == 0) { return kArm64; } else if (strcmp("x86", isa_str) == 0) { return kX86; } else if (strcmp("x86_64", isa_str) == 0) { return kX86_64; } else if (strcmp("mips", isa_str) == 0) { return kMips; } else if (strcmp("mips64", isa_str) == 0) { return kMips64; } return kNone; } InstructionSet GetInstructionSetFromELF(uint16_t e_machine, uint32_t e_flags) { switch (e_machine) { case EM_ARM: return kArm; case EM_AARCH64: return kArm64; case EM_386: return kX86; case EM_X86_64: return kX86_64; case EM_MIPS: { if ((e_flags & EF_MIPS_ARCH) == EF_MIPS_ARCH_32R2 || (e_flags & EF_MIPS_ARCH) == EF_MIPS_ARCH_32R6) { return kMips; } else if ((e_flags & EF_MIPS_ARCH) == EF_MIPS_ARCH_64R6) { return kMips64; } break; } } return kNone; } size_t GetInstructionSetAlignment(InstructionSet isa) { switch (isa) { case kArm: // Fall-through. case kThumb2: return kArmAlignment; case kArm64: return kArm64Alignment; case kX86: // Fall-through. case kX86_64: return kX86Alignment; case kMips: // Fall-through. case kMips64: return kMipsAlignment; case kNone: LOG(FATAL) << "ISA kNone does not have alignment."; UNREACHABLE(); default: LOG(FATAL) << "Unknown ISA " << isa; UNREACHABLE(); } } static constexpr size_t kDefaultStackOverflowReservedBytes = 16 * KB; static constexpr size_t kMipsStackOverflowReservedBytes = kDefaultStackOverflowReservedBytes; static constexpr size_t kMips64StackOverflowReservedBytes = kDefaultStackOverflowReservedBytes; static constexpr size_t kArmStackOverflowReservedBytes = 8 * KB; static constexpr size_t kArm64StackOverflowReservedBytes = 8 * KB; static constexpr size_t kX86StackOverflowReservedBytes = 8 * KB; static constexpr size_t kX86_64StackOverflowReservedBytes = 8 * KB; size_t GetStackOverflowReservedBytes(InstructionSet isa) { switch (isa) { case kArm: // Intentional fall-through. case kThumb2: return kArmStackOverflowReservedBytes; case kArm64: return kArm64StackOverflowReservedBytes; case kMips: return kMipsStackOverflowReservedBytes; case kMips64: return kMips64StackOverflowReservedBytes; case kX86: return kX86StackOverflowReservedBytes; case kX86_64: return kX86_64StackOverflowReservedBytes; case kNone: LOG(FATAL) << "kNone has no stack overflow size"; UNREACHABLE(); default: LOG(FATAL) << "Unknown instruction set" << isa; UNREACHABLE(); } } } // namespace art
android-art-intel/marshmallow
art-extension/runtime/arch/instruction_set.cc
C++
apache-2.0
4,097
/** * Copyright (c) 2013-2020 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.format.sentinel2.theia; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLEncoder; import java.security.GeneralSecurityException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Iterator; import java.util.Locale; import javax.net.ssl.HttpsURLConnection; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.adapter.raster.plugin.gdal.GDALGeoTiffReader; import org.locationtech.geowave.adapter.raster.util.ZipUtils; import org.locationtech.geowave.format.sentinel2.BandFeatureIterator; import org.locationtech.geowave.format.sentinel2.DownloadRunner; import org.locationtech.geowave.format.sentinel2.RasterBandData; import org.locationtech.geowave.format.sentinel2.SceneFeatureIterator; import org.locationtech.geowave.format.sentinel2.Sentinel2ImageryProvider; import org.locationtech.jts.geom.Envelope; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.config.ClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig; import net.sf.json.JSONArray; import net.sf.json.JSONObject; /** Sentinel2 imagery provider for the Theia repository. See: https://theia.cnes.fr */ public class TheiaImageryProvider extends Sentinel2ImageryProvider { private static final Logger LOGGER = LoggerFactory.getLogger(TheiaImageryProvider.class); private static final String SCENES_TYPE_NAME = "theia-sentinel2-scene"; private static final String BANDS_TYPE_NAME = "theia-sentinel2-band"; private static final double NO_DATA_VALUE = 0; private static final String SCENES_SEARCH_URL = "https://theia.cnes.fr/atdistrib/resto2/api/collections/%s/search.json?"; private static final String AUNTHENTICATION_URL = "https://theia.cnes.fr/atdistrib/services/authenticate/"; private static final String DOWNLOAD_URL = "https://theia.cnes.fr/atdistrib/resto2/collections/%s/%s/download/?issuerId=theia"; private static final int DOWNLOAD_RETRY = 5; @Override public String providerName() { return "THEIA"; } @Override public String description() { return "Sentinel2 provider for the Theia repository (https://theia.cnes.fr)"; } @Override public String[] collections() { return new String[] {"SENTINEL2"}; } @Override public boolean isAvailable() { return true; } @Override public SimpleFeatureTypeBuilder sceneFeatureTypeBuilder() throws NoSuchAuthorityCodeException, FactoryException { return SceneFeatureIterator.defaultSceneFeatureTypeBuilder(SCENES_TYPE_NAME); } @Override public SimpleFeatureTypeBuilder bandFeatureTypeBuilder() throws NoSuchAuthorityCodeException, FactoryException { return BandFeatureIterator.defaultBandFeatureTypeBuilder(BANDS_TYPE_NAME); } @Override public Iterator<SimpleFeature> searchScenes( final File scenesDir, final String collection, final String platform, final String location, final Envelope envelope, final Date startDate, final Date endDate, final int orbitNumber, final int relativeOrbitNumber) throws IOException { final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); // Build the search URL to fetch products from Theia repository. String searchUrl = String.format(SCENES_SEARCH_URL, collection); if ((platform != null) && (platform.length() > 0)) { searchUrl += "platform=" + platform + "&"; } if ((location != null) && (location.length() > 0)) { searchUrl += "location=" + location + "&"; } if ((envelope != null) && (envelope.isNull() == false)) { searchUrl += String.format( Locale.ENGLISH, "box=%.6f,%.6f,%.6f,%.6f&", envelope.getMinX(), envelope.getMinY(), envelope.getMaxX(), envelope.getMaxY()); } if (startDate != null) { searchUrl += "startDate=" + dateFormat.format(startDate) + "&"; } if (endDate != null) { searchUrl += "completionDate=" + dateFormat.format(endDate) + "&"; } if (orbitNumber > 0) { searchUrl += "orbitNumber=" + orbitNumber + "&"; } if (relativeOrbitNumber > 0) { searchUrl += "relativeOrbitNumber=" + relativeOrbitNumber + "&"; } searchUrl = searchUrl.substring(0, searchUrl.length() - 1); // Fetch the JSON meta data with found Theia products. InputStream inputStream = null; ByteArrayOutputStream outputStream = null; try { final URL url = new URL(searchUrl); final HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); // HP Fortify "Certificate Validation" False Positive // we allow for custom trust store to anchor acceptable certs // to reduce the level of trust if desired connection.setUseCaches(false); connection.setRequestProperty(HttpHeaders.USER_AGENT, "Mozilla/5.0"); connection.setRequestMethod("GET"); // allow for custom trust store to anchor acceptable certs, use an // expected file in the workspace directory final File customCertsFile = new File(scenesDir.getParentFile(), "theia-keystore.crt"); applyCustomCertsFile(connection, customCertsFile); inputStream = connection.getInputStream(); // HP Fortify "Resource Shutdown" false positive // The InputStream is being closed in the finally block IOUtils.copyLarge(inputStream, outputStream = new ByteArrayOutputStream()); final String geoJson = new String(outputStream.toByteArray(), java.nio.charset.StandardCharsets.UTF_8); final JSONObject response = JSONObject.fromObject(geoJson); final JSONArray features = response.getJSONArray("features"); final SimpleFeatureTypeBuilder typeBuilder = sceneFeatureTypeBuilder(); final SimpleFeatureType type = typeBuilder.buildFeatureType(); class TheiaJSONFeatureIterator extends JSONFeatureIterator { public TheiaJSONFeatureIterator( final Sentinel2ImageryProvider provider, final SimpleFeatureType featureType, final Iterator<?> iterator) { super(provider, featureType, iterator); } @Override public SimpleFeature next() { final SimpleFeature feature = super.next(); JSONObject jsonObject = null; if ((feature != null) && ((jsonObject = super.currentObject()) != null)) { final JSONObject properties = (JSONObject) jsonObject.get("properties"); final String entityId = jsonObject.getString("id"); final String collection = properties.getString(SceneFeatureIterator.COLLECTION_ATTRIBUTE_NAME); final String downloadUrl = String.format(DOWNLOAD_URL, collection, entityId); feature.setAttribute(SceneFeatureIterator.SCENE_DOWNLOAD_ATTRIBUTE_NAME, downloadUrl); } return feature; } }; return new TheiaJSONFeatureIterator(this, type, features.iterator()); } catch (GeneralSecurityException | FactoryException e) { throw new IOException(e); } finally { if (outputStream != null) { IOUtils.closeQuietly(outputStream); outputStream = null; } if (inputStream != null) { IOUtils.closeQuietly(inputStream); inputStream = null; } } } @Override public boolean downloadScene( final SimpleFeature scene, final String workspaceDir, final String userIdent, final String password) throws IOException { final String tokenUrl = AUNTHENTICATION_URL; String authentication; String tokenId; final String collection = (String) scene.getAttribute(SceneFeatureIterator.COLLECTION_ATTRIBUTE_NAME); final String productId = (String) scene.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME); final String entityId = (String) scene.getAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME); // Check authentication parameters if ((userIdent == null) || (userIdent.length() == 0) || (password == null) || (password.length() == 0)) { LOGGER.error("Invalid or empty authentication parameters (email and password)"); return false; } try { authentication = "ident=" + URLEncoder.encode(userIdent, "UTF-8") + "&pass=" + URLEncoder.encode(password, "UTF-8"); } catch (final UnsupportedEncodingException e) { LOGGER.error( "Invalid or empty authentication parameters (email and password)" + e.getMessage()); return false; } // Get a valid tokenId to download data InputStream inputStream = null; try { final URL url = new URL(tokenUrl); final HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); // HP Fortify "Certificate Validation" False Positive // we allow for custom trust store to anchor acceptable certs // to reduce the level of trust if desired connection.setUseCaches(false); connection.setRequestProperty(HttpHeaders.USER_AGENT, "Mozilla/5.0"); connection.setRequestMethod("POST"); connection.setDoOutput(true); connection.setRequestProperty( HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED); connection.setRequestProperty( HttpHeaders.CONTENT_LENGTH, String.valueOf(authentication.length())); // allow for custom trust store to anchor acceptable certs, use an // expected file in the workspace directory final File customCertsFile = new File(workspaceDir, "theia-keystore.crt"); applyCustomCertsFile(connection, customCertsFile); final OutputStream os = connection.getOutputStream(); // HP Fortify "Resource Shutdown" false positive // The OutputStream is being closed os.write(authentication.getBytes("UTF-8")); // HP Fortify "Privacy Violation" false positive // In this case the password is being sent to an output // stream in order to authenticate the system and allow // us to perform the requested download. os.flush(); os.close(); inputStream = connection.getInputStream(); // HP Fortify "Resource Shutdown" false positive // The InputStream is being closed in the finally block final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); IOUtils.copyLarge(inputStream, outputStream); tokenId = new String(outputStream.toByteArray(), java.nio.charset.StandardCharsets.UTF_8); IOUtils.closeQuietly(outputStream); } catch (final IOException | GeneralSecurityException e) { LOGGER.error("Unable to query a token to download '" + e.getMessage() + "'"); return false; } finally { if (inputStream != null) { IOUtils.closeQuietly(inputStream); inputStream = null; } } // Token is right? if (tokenId.length() == 0) { LOGGER.error("Unable to get a token to download. Check your ident and password"); return false; } // First steps to download the gzipped file final File compressedFile = new File( workspaceDir + File.separator + DOWNLOAD_DIRECTORY + File.separator + productId + ".zip"); final File productDir = DownloadRunner.getSceneDirectory(scene, workspaceDir); // Download the gzipped file final String downloadUrl = String.format(DOWNLOAD_URL, collection, entityId); int retry = 0; boolean success = false; while (!success && (retry < DOWNLOAD_RETRY)) { try { final ClientConfig clientConfig = new DefaultClientConfig(); final Client client = Client.create(clientConfig); final ClientResponse response = client.resource(downloadUrl).accept("application/zip").header( javax.ws.rs.core.HttpHeaders.USER_AGENT, "Mozilla/5.0").header( javax.ws.rs.core.HttpHeaders.AUTHORIZATION, "Bearer " + tokenId).get(ClientResponse.class); String displaySize = FileUtils.byteCountToDisplaySize(response.getLength()); System.out.println("\nDownloading file '" + productId + "' (" + displaySize + ")"); System.out.print("Wait please... "); inputStream = response.getEntityInputStream(); final FileOutputStream outputStream = new FileOutputStream(compressedFile); // HP Fortify "Resource Shutdown" false positive // The OutputStream is being closed copyLarge(inputStream, outputStream, response.getLength()); IOUtils.closeQuietly(outputStream); displaySize = FileUtils.byteCountToDisplaySize(compressedFile.length()); System.out.println("File successfully downloaded! (" + displaySize + ")"); ZipUtils.unZipFile(compressedFile, productDir.getAbsolutePath(), true); System.out.println("File successfully unzipped!"); if (!compressedFile.delete()) { LOGGER.warn("Unable to delete file '" + compressedFile.getAbsolutePath() + "'"); } success = true; } catch (final IOException e) { LOGGER.error( "Unable to read file from public '" + downloadUrl + "'; retry round " + ++retry, e); } finally { if (inputStream != null) { IOUtils.closeQuietly(inputStream); inputStream = null; } } } return success; } /** * Fetch the coverage of the specified band in the specified workspace directory */ @Override public RasterBandData getCoverage(final SimpleFeature band, final String workspaceDir) throws IOException { final File sceneDir = DownloadRunner.getSceneDirectory(band, workspaceDir); final String entityId = (String) band.getAttribute(SceneFeatureIterator.ENTITY_ID_ATTRIBUTE_NAME); final String productId = (String) band.getAttribute(SceneFeatureIterator.PRODUCT_ID_ATTRIBUTE_NAME); final String bandName = (String) band.getAttribute(BandFeatureIterator.BAND_ATTRIBUTE_NAME); final File file = sceneDir; final String[] fileList = sceneDir.list(); if (fileList != null) { for (final String name : fileList) { final File temp = new File(file.getAbsolutePath() + File.separatorChar + name); if (temp.isDirectory() && name.toUpperCase(Locale.ENGLISH).startsWith(productId.toUpperCase(Locale.ENGLISH))) { // We provide the coverage in ground reflectance with the // correction of slope effects. // The full description of the product format is here: // 'https://theia.cnes.fr/atdistrib/documents/PSC-NT-411-0362-CNES_01_00_SENTINEL-2A_L2A_Products_Description.pdf' // A more succinct one is also available here: // 'http://www.cesbio.ups-tlse.fr/multitemp/?page_id=8352' // final File geotiffFile = new File( file.getAbsolutePath() + File.separatorChar + name + File.separatorChar + name + "_FRE_" + bandName + ".tif"); if (geotiffFile.exists()) { final GDALGeoTiffReader reader = new GDALGeoTiffReader(geotiffFile); final GridCoverage2D coverage = reader.read(null); reader.dispose(); return new RasterBandData(entityId + "_" + bandName, coverage, reader, NO_DATA_VALUE); } } } } throw new IOException( "The file of the '" + productId + "_" + bandName + "' coverage does not exist"); } }
locationtech/geowave
extensions/cli/sentinel2/src/main/java/org/locationtech/geowave/format/sentinel2/theia/TheiaImageryProvider.java
Java
apache-2.0
17,063
//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2014-2021 ArangoDB GmbH, Cologne, Germany /// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Simon Grätzer //////////////////////////////////////////////////////////////////////////////// #include "UpgradeTasks.h" #include "Agency/AgencyComm.h" #include "ApplicationFeatures/ApplicationServer.h" #include "Basics/Exceptions.h" #include "Basics/FileUtils.h" #include "Basics/VelocyPackHelper.h" #include "Basics/application-exit.h" #include "Basics/files.h" #include "ClusterEngine/ClusterEngine.h" #include "GeneralServer/AuthenticationFeature.h" #include "Logger/Logger.h" #include "RestServer/DatabaseFeature.h" #include "RestServer/SystemDatabaseFeature.h" #include "RocksDBEngine/RocksDBCommon.h" #include "RocksDBEngine/RocksDBEngine.h" #include "RocksDBEngine/RocksDBIndex.h" #include "StorageEngine/EngineSelectorFeature.h" #include "StorageEngine/PhysicalCollection.h" #include "Transaction/StandaloneContext.h" #include "Utils/OperationOptions.h" #include "VocBase/LogicalCollection.h" #include "VocBase/Methods/CollectionCreationInfo.h" #include "VocBase/Methods/Collections.h" #include "VocBase/Methods/Indexes.h" #include "VocBase/vocbase.h" #include <velocypack/Collection.h> #include <velocypack/velocypack-aliases.h> using namespace arangodb; using namespace arangodb::methods; using application_features::ApplicationServer; using basics::VelocyPackHelper; // Note: this entire file should run with superuser rights namespace { arangodb::Result recreateGeoIndex(TRI_vocbase_t& vocbase, arangodb::LogicalCollection& collection, arangodb::RocksDBIndex* oldIndex) { arangodb::Result res; IndexId iid = oldIndex->id(); VPackBuilder oldDesc; oldIndex->toVelocyPack(oldDesc, Index::makeFlags()); VPackBuilder overw; overw.openObject(); overw.add(arangodb::StaticStrings::IndexType, arangodb::velocypack::Value( arangodb::Index::oldtypeName(Index::TRI_IDX_TYPE_GEO_INDEX))); overw.close(); VPackBuilder newDesc = VPackCollection::merge(oldDesc.slice(), overw.slice(), false); bool dropped = collection.dropIndex(iid); if (!dropped) { res.reset(TRI_ERROR_INTERNAL); return res; } bool created = false; auto newIndex = collection.getPhysical()->createIndex(newDesc.slice(), /*restore*/ true, created); if (!created) { res.reset(TRI_ERROR_INTERNAL); } TRI_ASSERT(newIndex->id() == iid); // will break cluster otherwise TRI_ASSERT(newIndex->type() == Index::TRI_IDX_TYPE_GEO_INDEX); return res; } Result upgradeGeoIndexes(TRI_vocbase_t& vocbase) { if (!vocbase.server().getFeature<EngineSelectorFeature>().isRocksDB()) { LOG_TOPIC("2cb46", DEBUG, Logger::STARTUP) << "No need to upgrade geo indexes!"; return {}; } auto collections = vocbase.collections(false); for (auto collection : collections) { auto indexes = collection->getIndexes(); for (auto index : indexes) { RocksDBIndex* rIndex = static_cast<RocksDBIndex*>(index.get()); if (index->type() == Index::TRI_IDX_TYPE_GEO1_INDEX || index->type() == Index::TRI_IDX_TYPE_GEO2_INDEX) { LOG_TOPIC("5e53d", INFO, Logger::STARTUP) << "Upgrading legacy geo index '" << rIndex->id().id() << "'"; auto res = ::recreateGeoIndex(vocbase, *collection, rIndex); if (res.fail()) { LOG_TOPIC("5550a", ERR, Logger::STARTUP) << "Error upgrading geo indexes " << res.errorMessage(); return res; } } } } return {}; } Result createSystemCollections(TRI_vocbase_t& vocbase, std::vector<std::shared_ptr<LogicalCollection>>& createdCollections) { typedef std::function<void(std::shared_ptr<LogicalCollection> const&)> FuncCallback; FuncCallback const noop = [](std::shared_ptr<LogicalCollection> const&) -> void {}; OperationOptions options(ExecContext::current()); std::vector<CollectionCreationInfo> systemCollectionsToCreate; // the order of systemCollections is important. If we're in _system db, the // UsersCollection needs to be first, otherwise, the GraphsCollection must be first. std::vector<std::string> systemCollections; systemCollections.reserve(10); std::shared_ptr<LogicalCollection> colToDistributeShardsLike; Result res; if (vocbase.isSystem()) { // check for legacy sharding, could still be graphs. std::shared_ptr<LogicalCollection> coll; res = methods::Collections::lookup(vocbase, StaticStrings::GraphsCollection, coll); if (res.ok()) { TRI_ASSERT(coll); if (coll && coll.get()->distributeShardsLike().empty()) { // We have a graphs collection, and this is not sharded by something else. colToDistributeShardsLike = std::move(coll); } } if (colToDistributeShardsLike == nullptr) { // otherwise, we will use UsersCollection for distributeShardsLike res = methods::Collections::createSystem(vocbase, options, StaticStrings::UsersCollection, /*isNewDatabase*/ true, colToDistributeShardsLike); if (!res.ok()) { return res; } } else { systemCollections.push_back(StaticStrings::UsersCollection); } createdCollections.push_back(colToDistributeShardsLike); systemCollections.push_back(StaticStrings::GraphsCollection); systemCollections.push_back(StaticStrings::StatisticsCollection); systemCollections.push_back(StaticStrings::Statistics15Collection); systemCollections.push_back(StaticStrings::StatisticsRawCollection); } else { // we will use GraphsCollection for distributeShardsLike // this is equal to older versions res = methods::Collections::createSystem(vocbase, options, StaticStrings::GraphsCollection, /*isNewDatabase*/ true, colToDistributeShardsLike); if (!res.ok()) { return res; } createdCollections.push_back(colToDistributeShardsLike); } TRI_ASSERT(colToDistributeShardsLike != nullptr); systemCollections.push_back(StaticStrings::AnalyzersCollection); systemCollections.push_back(StaticStrings::AqlFunctionsCollection); systemCollections.push_back(StaticStrings::QueuesCollection); systemCollections.push_back(StaticStrings::JobsCollection); systemCollections.push_back(StaticStrings::AppsCollection); systemCollections.push_back(StaticStrings::AppBundlesCollection); systemCollections.push_back(StaticStrings::FrontendCollection); TRI_IF_FAILURE("UpgradeTasks::CreateCollectionsExistsGraphAqlFunctions") { VPackBuilder testOptions; std::vector<std::shared_ptr<VPackBuffer<uint8_t>>> testBuffers; std::vector<CollectionCreationInfo> testSystemCollectionsToCreate; std::vector<std::string> testSystemCollections = {StaticStrings::GraphsCollection, StaticStrings::AqlFunctionsCollection}; for (auto const& collection : testSystemCollections) { VPackBuilder options; methods::Collections::createSystemCollectionProperties(collection, options, vocbase); testSystemCollectionsToCreate.emplace_back( CollectionCreationInfo{collection, TRI_COL_TYPE_DOCUMENT, options.slice()}); testBuffers.emplace_back(options.steal()); } std::vector<std::shared_ptr<LogicalCollection>> cols; auto res = methods::Collections::create(vocbase, options, testSystemCollectionsToCreate, true, true, true, colToDistributeShardsLike, cols); // capture created collection vector createdCollections.insert(std::end(createdCollections), std::begin(cols), std::end(cols)); } std::vector<std::shared_ptr<VPackBuffer<uint8_t>>> buffers; for (auto const& cname : systemCollections) { std::shared_ptr<LogicalCollection> col; res = methods::Collections::lookup(vocbase, cname, col); if (col) { createdCollections.emplace_back(col); } if (res.is(TRI_ERROR_ARANGO_DATA_SOURCE_NOT_FOUND)) { // if not found, create it VPackBuilder options; methods::Collections::createSystemCollectionProperties(cname, options, vocbase); systemCollectionsToCreate.emplace_back( CollectionCreationInfo{cname, TRI_COL_TYPE_DOCUMENT, options.slice()}); buffers.emplace_back(options.steal()); } } // We capture the vector of created LogicalCollections here // to use it to create indices later. if (systemCollectionsToCreate.size() > 0) { std::vector<std::shared_ptr<LogicalCollection>> cols; res = methods::Collections::create(vocbase, options, systemCollectionsToCreate, true, true, true, colToDistributeShardsLike, cols); if (res.fail()) { return res; } createdCollections.insert(std::end(createdCollections), std::begin(cols), std::end(cols)); } return {TRI_ERROR_NO_ERROR}; } Result createSystemStatisticsCollections(TRI_vocbase_t& vocbase, std::vector<std::shared_ptr<LogicalCollection>>& createdCollections) { if (vocbase.isSystem()) { typedef std::function<void(std::shared_ptr<LogicalCollection> const&)> FuncCallback; FuncCallback const noop = [](std::shared_ptr<LogicalCollection> const&) -> void {}; std::vector<CollectionCreationInfo> systemCollectionsToCreate; // the order of systemCollections is important. If we're in _system db, the // UsersCollection needs to be first, otherwise, the GraphsCollection must be first. std::vector<std::string> systemCollections; Result res; systemCollections.push_back(StaticStrings::StatisticsCollection); systemCollections.push_back(StaticStrings::Statistics15Collection); systemCollections.push_back(StaticStrings::StatisticsRawCollection); std::vector<std::shared_ptr<VPackBuffer<uint8_t>>> buffers; for (auto const& collection : systemCollections) { std::shared_ptr<LogicalCollection> col; res = methods::Collections::lookup(vocbase, collection, col); if (col) { createdCollections.emplace_back(std::move(col)); } if (res.is(TRI_ERROR_ARANGO_DATA_SOURCE_NOT_FOUND)) { // if not found, create it VPackBuilder options; options.openObject(); options.add(StaticStrings::DataSourceSystem, VPackSlice::trueSlice()); options.add(StaticStrings::WaitForSyncString, VPackSlice::falseSlice()); options.close(); systemCollectionsToCreate.emplace_back( CollectionCreationInfo{collection, TRI_COL_TYPE_DOCUMENT, options.slice()}); buffers.emplace_back(options.steal()); } } // We capture the vector of created LogicalCollections here // to use it to create indices later. if (systemCollectionsToCreate.size() > 0) { std::vector<std::shared_ptr<LogicalCollection>> cols; OperationOptions options(ExecContext::current()); res = methods::Collections::create(vocbase, options, systemCollectionsToCreate, true, false, false, nullptr, cols); if (res.fail()) { return res; } // capture created collection vector createdCollections.insert(std::end(createdCollections), std::begin(cols), std::end(cols)); } } return {TRI_ERROR_NO_ERROR}; } static Result createIndex(std::string const& name, Index::IndexType type, std::vector<std::string> const& fields, bool unique, bool sparse, std::vector<std::shared_ptr<LogicalCollection>>& collections) { // Static helper function that wraps creating an index. If we fail to // create an index with some indices created, we clean up by removing all // collections later on. Find the collection by name auto colIt = std::find_if(collections.begin(), collections.end(), [&name](std::shared_ptr<LogicalCollection> const& col) { TRI_ASSERT(col != nullptr); return col->name() == name; }); if (colIt == collections.end()) { return Result(TRI_ERROR_ARANGO_DATA_SOURCE_NOT_FOUND, "Collection " + name + " not found"); } return methods::Indexes::createIndex(colIt->get(), type, fields, unique, sparse, false /*estimates*/); } Result createSystemStatisticsIndices(TRI_vocbase_t& vocbase, std::vector<std::shared_ptr<LogicalCollection>>& collections) { Result res; if (vocbase.isSystem()) { res = ::createIndex(StaticStrings::StatisticsCollection, arangodb::Index::TRI_IDX_TYPE_SKIPLIST_INDEX, {"time"}, false, false, collections); if (!res.ok() && !res.is(TRI_ERROR_ARANGO_DATA_SOURCE_NOT_FOUND)) { return res; } res = ::createIndex(StaticStrings::Statistics15Collection, arangodb::Index::TRI_IDX_TYPE_SKIPLIST_INDEX, {"time"}, false, false, collections); if (!res.ok() && !res.is(TRI_ERROR_ARANGO_DATA_SOURCE_NOT_FOUND)) { return res; } res = ::createIndex(StaticStrings::StatisticsRawCollection, arangodb::Index::TRI_IDX_TYPE_SKIPLIST_INDEX, {"time"}, false, false, collections); if (!res.ok() && !res.is(TRI_ERROR_ARANGO_DATA_SOURCE_NOT_FOUND)) { return res; } } return res; } Result createSystemCollectionsIndices(TRI_vocbase_t& vocbase, std::vector<std::shared_ptr<LogicalCollection>>& collections) { Result res; if (vocbase.isSystem()) { res = ::createIndex(StaticStrings::UsersCollection, arangodb::Index::TRI_IDX_TYPE_HASH_INDEX, {"user"}, true, true, collections); if (!res.ok()) { return res; } res = ::createSystemStatisticsIndices(vocbase, collections); if (!res.ok()) { return res; } } res = upgradeGeoIndexes(vocbase); if (!res.ok()) { return res; } res = ::createIndex(StaticStrings::AppsCollection, arangodb::Index::TRI_IDX_TYPE_HASH_INDEX, {"mount"}, true, true, collections); if (!res.ok()) { return res; } res = ::createIndex(StaticStrings::JobsCollection, arangodb::Index::TRI_IDX_TYPE_SKIPLIST_INDEX, {"queue", "status", "delayUntil"}, false, false, collections); if (!res.ok()) { return res; } res = ::createIndex(StaticStrings::JobsCollection, arangodb::Index::TRI_IDX_TYPE_SKIPLIST_INDEX, {"status", "queue", "delayUntil"}, false, false, collections); if (!res.ok()) { return res; } return res; } } // namespace bool UpgradeTasks::createSystemCollectionsAndIndices(TRI_vocbase_t& vocbase, arangodb::velocypack::Slice const& slice) { // after the call to ::createSystemCollections this vector should contain // a LogicalCollection for *every* (required) system collection. std::vector<std::shared_ptr<LogicalCollection>> presentSystemCollections; Result res = ::createSystemCollections(vocbase, presentSystemCollections); // TODO: Maybe check or assert that all collections are present (i.e. were // present or created), raise an error if not? if (res.fail()) { LOG_TOPIC("94824", ERR, Logger::STARTUP) << "could not create system collections" << ": error: " << res.errorMessage(); return false; } TRI_IF_FAILURE("UpgradeTasks::HideDatabaseUntilCreationIsFinished") { // just trigger a sleep here. The client test will create the db async // and directly fetch the state of creation. The DB is not allowed to be // visible to the outside world. std::this_thread::sleep_for(std::chrono::milliseconds(5000)); } TRI_IF_FAILURE("UpgradeTasks::FatalExitDuringDatabaseCreation") { FATAL_ERROR_EXIT(); } res = ::createSystemCollectionsIndices(vocbase, presentSystemCollections); if (res.fail()) { LOG_TOPIC("fedc0", ERR, Logger::STARTUP) << "could not create indices for system collections" << ": error: " << res.errorMessage(); return false; } return true; } bool UpgradeTasks::createStatisticsCollectionsAndIndices(TRI_vocbase_t& vocbase, arangodb::velocypack::Slice const& slice) { // This vector should after the call to ::createSystemCollections contain // a LogicalCollection for *every* (required) system collection. std::vector<std::shared_ptr<LogicalCollection>> presentSystemCollections; Result res; res = ::createSystemStatisticsCollections(vocbase, presentSystemCollections); if (res.fail()) { LOG_TOPIC("2824e", ERR, Logger::STARTUP) << "could not create system collections" << ": error: " << res.errorMessage(); return false; } res = ::createSystemStatisticsIndices(vocbase, presentSystemCollections); if (res.fail()) { LOG_TOPIC("dffbd", ERR, Logger::STARTUP) << "could not create indices for system collections" << ": error: " << res.errorMessage(); return false; } return true; } //////////////////////////////////////////////////////////////////////////////// /// @brief drops '_iresearch_analyzers' collection //////////////////////////////////////////////////////////////////////////////// bool UpgradeTasks::dropLegacyAnalyzersCollection(TRI_vocbase_t& vocbase, arangodb::velocypack::Slice const& /*upgradeParams*/) { // drop legacy collection if upgrading the system vocbase and collection found #ifdef ARANGODB_ENABLE_MAINTAINER_MODE if (!vocbase.server().hasFeature<arangodb::SystemDatabaseFeature>()) { LOG_TOPIC("8783e", WARN, Logger::STARTUP) << "failure to find '" << arangodb::SystemDatabaseFeature::name() << "' feature while registering legacy static analyzers with vocbase '" << vocbase.name() << "'"; TRI_set_errno(TRI_ERROR_INTERNAL); return false; // internal error } auto& sysDatabase = vocbase.server().getFeature<arangodb::SystemDatabaseFeature>(); auto sysVocbase = sysDatabase.use(); TRI_ASSERT(sysVocbase.get() == &vocbase || sysVocbase->name() == vocbase.name()); #endif // find legacy analyzer collection std::shared_ptr<arangodb::LogicalCollection> col; auto res = arangodb::methods::Collections::lookup(vocbase, StaticStrings::LegacyAnalyzersCollection, col); if (col) { res = arangodb::methods::Collections::drop(*col, true, -1.0); // -1.0 same as in RestCollectionHandler return res.ok(); } return res.is(TRI_ERROR_ARANGO_DATA_SOURCE_NOT_FOUND); } bool UpgradeTasks::addDefaultUserOther(TRI_vocbase_t& vocbase, arangodb::velocypack::Slice const& params) { TRI_ASSERT(!vocbase.isSystem()); TRI_ASSERT(params.isObject()); VPackSlice users = params.get("users"); if (users.isNone()) { return true; // exit, no users were specified } else if (!users.isArray()) { LOG_TOPIC("44623", ERR, Logger::STARTUP) << "addDefaultUserOther: users is invalid"; return false; } auth::UserManager* um = AuthenticationFeature::instance()->userManager(); if (um == nullptr) { return true; // server does not support users } for (VPackSlice slice : VPackArrayIterator(users)) { std::string user = VelocyPackHelper::getStringValue(slice, "username", StaticStrings::Empty); if (user.empty()) { continue; } std::string passwd = VelocyPackHelper::getStringValue(slice, "passwd", ""); bool active = VelocyPackHelper::getBooleanValue(slice, "active", true); VPackSlice extra = slice.get("extra"); Result res = um->storeUser(false, user, passwd, active, VPackSlice::noneSlice()); if (res.fail() && !res.is(TRI_ERROR_USER_DUPLICATE)) { LOG_TOPIC("b5b8a", WARN, Logger::STARTUP) << "could not add database user " << user << ": " << res.errorMessage(); } else if (extra.isObject() && !extra.isEmptyObject()) { um->updateUser(user, [&](auth::User& user) { user.setUserData(VPackBuilder(extra)); return TRI_ERROR_NO_ERROR; }); } res = um->updateUser(user, [&](auth::User& entry) { entry.grantDatabase(vocbase.name(), auth::Level::RW); entry.grantCollection(vocbase.name(), "*", auth::Level::RW); return TRI_ERROR_NO_ERROR; }); if (res.fail()) { LOG_TOPIC("60019", WARN, Logger::STARTUP) << "could not set permissions for new user " << user << ": " << res.errorMessage(); } } return true; } bool UpgradeTasks::renameReplicationApplierStateFiles(TRI_vocbase_t& vocbase, arangodb::velocypack::Slice const& slice) { TRI_ASSERT(vocbase.server().getFeature<EngineSelectorFeature>().isRocksDB()); StorageEngine& engine = vocbase.server().getFeature<EngineSelectorFeature>().engine(); std::string const path = engine.databasePath(&vocbase); std::string const source = arangodb::basics::FileUtils::buildFilename(path, "REPLICATION-APPLIER-STATE"); if (!basics::FileUtils::isRegularFile(source)) { // source file does not exist return true; } bool result = true; // copy file REPLICATION-APPLIER-STATE to REPLICATION-APPLIER-STATE-<id> Result res = basics::catchToResult([&vocbase, &path, &source, &result]() -> Result { std::string const dest = arangodb::basics::FileUtils::buildFilename( path, "REPLICATION-APPLIER-STATE-" + std::to_string(vocbase.id())); LOG_TOPIC("75337", TRACE, Logger::STARTUP) << "copying replication applier file '" << source << "' to '" << dest << "'"; std::string error; if (!TRI_CopyFile(source, dest, error)) { LOG_TOPIC("6c90c", WARN, Logger::STARTUP) << "could not copy replication applier file '" << source << "' to '" << dest << "'"; result = false; } return Result(); }); if (res.fail()) { return false; } return result; }
Simran-B/arangodb
arangod/VocBase/Methods/UpgradeTasks.cpp
C++
apache-2.0
23,269
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.parquet.hadoop; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.junit.Test; import org.apache.parquet.Log; import org.apache.parquet.bytes.BytesInput; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.column.Encoding; import org.apache.parquet.column.page.DataPage; import org.apache.parquet.column.page.DataPageV1; import org.apache.parquet.column.page.PageReadStore; import org.apache.parquet.column.page.PageReader; import org.apache.parquet.column.statistics.BinaryStatistics; import org.apache.parquet.column.statistics.LongStatistics; import org.apache.parquet.format.Statistics; import org.apache.parquet.hadoop.metadata.*; import org.apache.parquet.hadoop.util.HiddenFileFilter; import org.apache.parquet.io.api.Binary; import org.apache.parquet.schema.MessageType; import org.apache.parquet.schema.MessageTypeParser; import org.apache.parquet.schema.PrimitiveType; import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName; import java.io.File; import java.io.IOException; import java.util.*; import static org.junit.Assert.*; import static org.apache.parquet.column.Encoding.BIT_PACKED; import static org.apache.parquet.column.Encoding.PLAIN; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY; import static org.apache.parquet.schema.Type.Repetition.*; import static org.apache.parquet.hadoop.TestUtils.enforceEmptyDir; import org.apache.parquet.example.data.Group; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.hadoop.example.GroupWriteSupport; public class TestParquetFileWriter { private static final Log LOG = Log.getLog(TestParquetFileWriter.class); private String writeSchema; @Test public void testWriteMode() throws Exception { File testDir = new File("target/test/TestParquetFileWriter/"); testDir.mkdirs(); File testFile = new File(testDir, "testParquetFile"); testFile = testFile.getAbsoluteFile(); testFile.createNewFile(); MessageType schema = MessageTypeParser.parseMessageType( "message m { required group a {required binary b;} required group " + "c { required int64 d; }}"); Configuration conf = new Configuration(); ParquetFileWriter writer = null; boolean exceptionThrown = false; Path path = new Path(testFile.toURI()); try { writer = new ParquetFileWriter(conf, schema, path, ParquetFileWriter.Mode.CREATE); } catch(IOException ioe1) { exceptionThrown = true; } assertTrue(exceptionThrown); exceptionThrown = false; try { writer = new ParquetFileWriter(conf, schema, path, ParquetFileWriter.Mode.OVERWRITE); } catch(IOException ioe2) { exceptionThrown = true; } assertTrue(!exceptionThrown); testFile.delete(); } @Test public void testWriteRead() throws Exception { File testFile = new File("target/test/TestParquetFileWriter/testParquetFile").getAbsoluteFile(); testFile.delete(); Path path = new Path(testFile.toURI()); Configuration configuration = new Configuration(); MessageType schema = MessageTypeParser.parseMessageType("message m { required group a {required binary b;} required group c { required int64 d; }}"); String[] path1 = {"a", "b"}; ColumnDescriptor c1 = schema.getColumnDescription(path1); String[] path2 = {"c", "d"}; ColumnDescriptor c2 = schema.getColumnDescription(path2); byte[] bytes1 = { 0, 1, 2, 3}; byte[] bytes2 = { 1, 2, 3, 4}; byte[] bytes3 = { 2, 3, 4, 5}; byte[] bytes4 = { 3, 4, 5, 6}; CompressionCodecName codec = CompressionCodecName.UNCOMPRESSED; BinaryStatistics stats1 = new BinaryStatistics(); BinaryStatistics stats2 = new BinaryStatistics(); ParquetFileWriter w = new ParquetFileWriter(configuration, schema, path); w.start(); w.startBlock(3); w.startColumn(c1, 5, codec); long c1Starts = w.getPos(); w.writeDataPage(2, 4, BytesInput.from(bytes1), stats1, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(3, 4, BytesInput.from(bytes1), stats1, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); long c1Ends = w.getPos(); w.startColumn(c2, 6, codec); long c2Starts = w.getPos(); w.writeDataPage(2, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(3, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(1, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); long c2Ends = w.getPos(); w.endBlock(); w.startBlock(4); w.startColumn(c1, 7, codec); w.writeDataPage(7, 4, BytesInput.from(bytes3), stats1, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.startColumn(c2, 8, codec); w.writeDataPage(8, 4, BytesInput.from(bytes4), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.endBlock(); w.end(new HashMap<String, String>()); ParquetMetadata readFooter = ParquetFileReader.readFooter(configuration, path); assertEquals("footer: "+ readFooter, 2, readFooter.getBlocks().size()); assertEquals(c1Ends - c1Starts, readFooter.getBlocks().get(0).getColumns().get(0).getTotalSize()); assertEquals(c2Ends - c2Starts, readFooter.getBlocks().get(0).getColumns().get(1).getTotalSize()); assertEquals(c2Ends - c1Starts, readFooter.getBlocks().get(0).getTotalByteSize()); HashSet<Encoding> expectedEncoding=new HashSet<Encoding>(); expectedEncoding.add(PLAIN); expectedEncoding.add(BIT_PACKED); assertEquals(expectedEncoding,readFooter.getBlocks().get(0).getColumns().get(0).getEncodings()); { // read first block of col #1 ParquetFileReader r = new ParquetFileReader(configuration, path, Arrays.asList(readFooter.getBlocks().get(0)), Arrays.asList(schema.getColumnDescription(path1))); PageReadStore pages = r.readNextRowGroup(); assertEquals(3, pages.getRowCount()); validateContains(schema, pages, path1, 2, BytesInput.from(bytes1)); validateContains(schema, pages, path1, 3, BytesInput.from(bytes1)); assertNull(r.readNextRowGroup()); } { // read all blocks of col #1 and #2 ParquetFileReader r = new ParquetFileReader(configuration, path, readFooter.getBlocks(), Arrays.asList(schema.getColumnDescription(path1), schema.getColumnDescription(path2))); PageReadStore pages = r.readNextRowGroup(); assertEquals(3, pages.getRowCount()); validateContains(schema, pages, path1, 2, BytesInput.from(bytes1)); validateContains(schema, pages, path1, 3, BytesInput.from(bytes1)); validateContains(schema, pages, path2, 2, BytesInput.from(bytes2)); validateContains(schema, pages, path2, 3, BytesInput.from(bytes2)); validateContains(schema, pages, path2, 1, BytesInput.from(bytes2)); pages = r.readNextRowGroup(); assertEquals(4, pages.getRowCount()); validateContains(schema, pages, path1, 7, BytesInput.from(bytes3)); validateContains(schema, pages, path2, 8, BytesInput.from(bytes4)); assertNull(r.readNextRowGroup()); } PrintFooter.main(new String[] {path.toString()}); } @Test public void testConvertToThriftStatistics() throws Exception { long[] longArray = new long[] {39L, 99L, 12L, 1000L, 65L, 542L, 2533461316L, -253346131996L, Long.MAX_VALUE, Long.MIN_VALUE}; LongStatistics parquetMRstats = new LongStatistics(); for (long l: longArray) { parquetMRstats.updateStats(l); } Statistics thriftStats = org.apache.parquet.format.converter.ParquetMetadataConverter.toParquetStatistics(parquetMRstats); LongStatistics convertedBackStats = (LongStatistics) org.apache.parquet.format.converter.ParquetMetadataConverter.fromParquetStatistics(thriftStats, PrimitiveTypeName.INT64); assertEquals(parquetMRstats.getMax(), convertedBackStats.getMax()); assertEquals(parquetMRstats.getMin(), convertedBackStats.getMin()); assertEquals(parquetMRstats.getNumNulls(), convertedBackStats.getNumNulls()); } @Test public void testWriteReadStatistics() throws Exception { File testFile = new File("target/test/TestParquetFileWriter/testParquetFile").getAbsoluteFile(); testFile.delete(); Path path = new Path(testFile.toURI()); Configuration configuration = new Configuration(); MessageType schema = MessageTypeParser.parseMessageType("message m { required group a {required binary b;} required group c { required int64 d; }}"); String[] path1 = {"a", "b"}; ColumnDescriptor c1 = schema.getColumnDescription(path1); String[] path2 = {"c", "d"}; ColumnDescriptor c2 = schema.getColumnDescription(path2); byte[] bytes1 = { 0, 1, 2, 3}; byte[] bytes2 = { 1, 2, 3, 4}; byte[] bytes3 = { 2, 3, 4, 5}; byte[] bytes4 = { 3, 4, 5, 6}; CompressionCodecName codec = CompressionCodecName.UNCOMPRESSED; BinaryStatistics statsB1C1P1 = new BinaryStatistics(); BinaryStatistics statsB1C1P2 = new BinaryStatistics(); LongStatistics statsB1C2P1 = new LongStatistics(); LongStatistics statsB1C2P2 = new LongStatistics(); BinaryStatistics statsB2C1P1 = new BinaryStatistics(); LongStatistics statsB2C2P1 = new LongStatistics(); statsB1C1P1.setMinMax(Binary.fromString("s"), Binary.fromString("z")); statsB1C1P2.setMinMax(Binary.fromString("a"), Binary.fromString("b")); statsB1C2P1.setMinMax(2l, 10l); statsB1C2P2.setMinMax(-6l, 4l); statsB2C1P1.setMinMax(Binary.fromString("d"), Binary.fromString("e")); statsB2C2P1.setMinMax(11l, 122l); ParquetFileWriter w = new ParquetFileWriter(configuration, schema, path); w.start(); w.startBlock(3); w.startColumn(c1, 5, codec); w.writeDataPage(2, 4, BytesInput.from(bytes1), statsB1C1P1, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(3, 4, BytesInput.from(bytes1), statsB1C1P2, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.startColumn(c2, 6, codec); w.writeDataPage(3, 4, BytesInput.from(bytes2), statsB1C2P1, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(1, 4, BytesInput.from(bytes2), statsB1C2P2, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.endBlock(); w.startBlock(4); w.startColumn(c1, 7, codec); w.writeDataPage(7, 4, BytesInput.from(bytes3), statsB2C1P1, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.startColumn(c2, 8, codec); w.writeDataPage(8, 4, BytesInput.from(bytes4), statsB2C2P1, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.endBlock(); w.end(new HashMap<String, String>()); ParquetMetadata readFooter = ParquetFileReader.readFooter(configuration, path); for (BlockMetaData block : readFooter.getBlocks()) { for (ColumnChunkMetaData col : block.getColumns()) { col.getPath(); } } // correct statistics BinaryStatistics bs1 = new BinaryStatistics(); bs1.setMinMax(Binary.fromString("a"), Binary.fromString("z")); LongStatistics ls1 = new LongStatistics(); ls1.setMinMax(-6l, 10l); BinaryStatistics bs2 = new BinaryStatistics(); bs2.setMinMax(Binary.fromString("d"), Binary.fromString("e")); LongStatistics ls2 = new LongStatistics(); ls2.setMinMax(11l, 122l); { // assert stats are correct for the first block BinaryStatistics bsout = (BinaryStatistics)readFooter.getBlocks().get(0).getColumns().get(0).getStatistics(); String str = new String(bsout.getMaxBytes()); String str2 = new String(bsout.getMinBytes()); assertTrue(((BinaryStatistics)readFooter.getBlocks().get(0).getColumns().get(0).getStatistics()).equals(bs1)); assertTrue(((LongStatistics)readFooter.getBlocks().get(0).getColumns().get(1).getStatistics()).equals(ls1)); } { // assert stats are correct for the second block assertTrue(((BinaryStatistics)readFooter.getBlocks().get(1).getColumns().get(0).getStatistics()).equals(bs2)); assertTrue(((LongStatistics)readFooter.getBlocks().get(1).getColumns().get(1).getStatistics()).equals(ls2)); } } @Test public void testMetaDataFile() throws Exception { File testDir = new File("target/test/TestParquetFileWriter/testMetaDataFileDir").getAbsoluteFile(); Path testDirPath = new Path(testDir.toURI()); Configuration configuration = new Configuration(); final FileSystem fs = testDirPath.getFileSystem(configuration); enforceEmptyDir(configuration, testDirPath); MessageType schema = MessageTypeParser.parseMessageType("message m { required group a {required binary b;} required group c { required int64 d; }}"); createFile(configuration, new Path(testDirPath, "part0"), schema); createFile(configuration, new Path(testDirPath, "part1"), schema); createFile(configuration, new Path(testDirPath, "part2"), schema); FileStatus outputStatus = fs.getFileStatus(testDirPath); List<Footer> footers = ParquetFileReader.readFooters(configuration, outputStatus, false); validateFooters(footers); ParquetFileWriter.writeMetadataFile(configuration, testDirPath, footers); footers = ParquetFileReader.readFooters(configuration, outputStatus, false); validateFooters(footers); footers = ParquetFileReader.readFooters(configuration, fs.getFileStatus(new Path(testDirPath, "part0")), false); assertEquals(1, footers.size()); final FileStatus metadataFile = fs.getFileStatus(new Path(testDirPath, ParquetFileWriter.PARQUET_METADATA_FILE)); final FileStatus metadataFileLight = fs.getFileStatus(new Path(testDirPath, ParquetFileWriter.PARQUET_COMMON_METADATA_FILE)); final List<Footer> metadata = ParquetFileReader.readSummaryFile(configuration, metadataFile); validateFooters(metadata); footers = ParquetFileReader.readAllFootersInParallelUsingSummaryFiles(configuration, Arrays.asList(fs.listStatus(testDirPath, HiddenFileFilter.INSTANCE)), false); validateFooters(footers); fs.delete(metadataFile.getPath(), false); fs.delete(metadataFileLight.getPath(), false); footers = ParquetFileReader.readAllFootersInParallelUsingSummaryFiles(configuration, Arrays.asList(fs.listStatus(testDirPath)), false); validateFooters(footers); } @Test public void testWriteReadStatisticsAllNulls() throws Exception { File testFile = new File("target/test/TestParquetFileWriter/testParquetFile").getAbsoluteFile(); testFile.delete(); writeSchema = "message example {\n" + "required binary content;\n" + "}"; Path path = new Path(testFile.toURI()); MessageType schema = MessageTypeParser.parseMessageType(writeSchema); Configuration configuration = new Configuration(); GroupWriteSupport.setSchema(schema, configuration); ParquetWriter<Group> writer = new ParquetWriter<Group>(path, configuration, new GroupWriteSupport()); Group r1 = new SimpleGroup(schema); writer.write(r1); writer.close(); ParquetMetadata readFooter = ParquetFileReader.readFooter(configuration, path); // assert the statistics object is not empty assertTrue((readFooter.getBlocks().get(0).getColumns().get(0).getStatistics().isEmpty()) == false); // assert the number of nulls are correct for the first block assertEquals(1, (readFooter.getBlocks().get(0).getColumns().get(0).getStatistics().getNumNulls())); } private void validateFooters(final List<Footer> metadata) { LOG.debug(metadata); assertEquals(String.valueOf(metadata), 3, metadata.size()); for (Footer footer : metadata) { final File file = new File(footer.getFile().toUri()); assertTrue(file.getName(), file.getName().startsWith("part")); assertTrue(file.getPath(), file.exists()); final ParquetMetadata parquetMetadata = footer.getParquetMetadata(); assertEquals(2, parquetMetadata.getBlocks().size()); final Map<String, String> keyValueMetaData = parquetMetadata.getFileMetaData().getKeyValueMetaData(); assertEquals("bar", keyValueMetaData.get("foo")); assertEquals(footer.getFile().getName(), keyValueMetaData.get(footer.getFile().getName())); } } private void createFile(Configuration configuration, Path path, MessageType schema) throws IOException { String[] path1 = {"a", "b"}; ColumnDescriptor c1 = schema.getColumnDescription(path1); String[] path2 = {"c", "d"}; ColumnDescriptor c2 = schema.getColumnDescription(path2); byte[] bytes1 = { 0, 1, 2, 3}; byte[] bytes2 = { 1, 2, 3, 4}; byte[] bytes3 = { 2, 3, 4, 5}; byte[] bytes4 = { 3, 4, 5, 6}; CompressionCodecName codec = CompressionCodecName.UNCOMPRESSED; BinaryStatistics stats1 = new BinaryStatistics(); BinaryStatistics stats2 = new BinaryStatistics(); ParquetFileWriter w = new ParquetFileWriter(configuration, schema, path); w.start(); w.startBlock(3); w.startColumn(c1, 5, codec); w.writeDataPage(2, 4, BytesInput.from(bytes1), stats1, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(3, 4, BytesInput.from(bytes1), stats1, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.startColumn(c2, 6, codec); w.writeDataPage(2, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(3, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.writeDataPage(1, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.endBlock(); w.startBlock(4); w.startColumn(c1, 7, codec); w.writeDataPage(7, 4, BytesInput.from(bytes3), stats1, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.startColumn(c2, 8, codec); w.writeDataPage(8, 4, BytesInput.from(bytes4), stats2, BIT_PACKED, BIT_PACKED, PLAIN); w.endColumn(); w.endBlock(); final HashMap<String, String> extraMetaData = new HashMap<String, String>(); extraMetaData.put("foo", "bar"); extraMetaData.put(path.getName(), path.getName()); w.end(extraMetaData); } private void validateContains(MessageType schema, PageReadStore pages, String[] path, int values, BytesInput bytes) throws IOException { PageReader pageReader = pages.getPageReader(schema.getColumnDescription(path)); DataPage page = pageReader.readPage(); assertEquals(values, page.getValueCount()); assertArrayEquals(bytes.toByteArray(), ((DataPageV1)page).getBytes().toByteArray()); } @Test public void testMergeMetadata() { FileMetaData md1 = new FileMetaData( new MessageType("root1", new PrimitiveType(REPEATED, BINARY, "a"), new PrimitiveType(OPTIONAL, BINARY, "b")), new HashMap<String, String>(), "test"); FileMetaData md2 = new FileMetaData( new MessageType("root2", new PrimitiveType(REQUIRED, BINARY, "c")), new HashMap<String, String>(), "test2"); GlobalMetaData merged = ParquetFileWriter.mergeInto(md2, ParquetFileWriter.mergeInto(md1, null)); assertEquals( merged.getSchema(), new MessageType("root1", new PrimitiveType(REPEATED, BINARY, "a"), new PrimitiveType(OPTIONAL, BINARY, "b"), new PrimitiveType(REQUIRED, BINARY, "c")) ); } @Test public void testMergeFooters() { List<BlockMetaData> oneBlocks = new ArrayList<BlockMetaData>(); oneBlocks.add(new BlockMetaData()); oneBlocks.add(new BlockMetaData()); List<BlockMetaData> twoBlocks = new ArrayList<BlockMetaData>(); twoBlocks.add(new BlockMetaData()); List<BlockMetaData> expected = new ArrayList<BlockMetaData>(); expected.addAll(oneBlocks); expected.addAll(twoBlocks); Footer one = new Footer(new Path("file:/tmp/output/one.parquet"), new ParquetMetadata(new FileMetaData( new MessageType("root1", new PrimitiveType(REPEATED, BINARY, "a"), new PrimitiveType(OPTIONAL, BINARY, "b")), new HashMap<String, String>(), "test"), oneBlocks)); Footer two = new Footer(new Path("/tmp/output/two.parquet"), new ParquetMetadata(new FileMetaData( new MessageType("root2", new PrimitiveType(REQUIRED, BINARY, "c")), new HashMap<String, String>(), "test2"), twoBlocks)); List<Footer> footers = new ArrayList<Footer>(); footers.add(one); footers.add(two); ParquetMetadata merged = ParquetFileWriter.mergeFooters( new Path("/tmp"), footers); assertEquals( new MessageType("root1", new PrimitiveType(REPEATED, BINARY, "a"), new PrimitiveType(OPTIONAL, BINARY, "b"), new PrimitiveType(REQUIRED, BINARY, "c")), merged.getFileMetaData().getSchema()); assertEquals("Should have all blocks", expected, merged.getBlocks()); } }
snorden/parquet-mr-apache-parquet-1.7.0
parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
Java
apache-2.0
21,679
using Lucene.Net.Queries; using Lucene.Net.Search; using Lucene.Net.Spatial.Prefix.Tree; using Lucene.Net.Spatial.Queries; using Lucene.Net.Util; using Spatial4n.Shapes; using System; using System.Collections.Generic; namespace Lucene.Net.Spatial.Prefix { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// A basic implementation of <see cref="PrefixTreeStrategy"/> using a large /// <see cref="TermsFilter"/> of all the cells from /// <see cref="SpatialPrefixTree.GetCells(IShape, int, bool, bool)"/>. /// It only supports the search of indexed Point shapes. /// <para/> /// The precision of query shapes (DistErrPct) is an important factor in using /// this Strategy. If the precision is too precise then it will result in many /// terms which will amount to a slower query. /// <para/> /// @lucene.experimental /// </summary> public class TermQueryPrefixTreeStrategy : PrefixTreeStrategy { public TermQueryPrefixTreeStrategy(SpatialPrefixTree grid, string fieldName) : base(grid, fieldName, false)//do not simplify indexed cells { } public override Filter MakeFilter(SpatialArgs args) { // LUCENENET specific - added guard clause if (args is null) throw new ArgumentNullException(nameof(args)); SpatialOperation op = args.Operation; if (op != SpatialOperation.Intersects) { throw new UnsupportedSpatialOperationException(op); } IShape shape = args.Shape; int detailLevel = m_grid.GetLevelForDistance(args.ResolveDistErr(m_ctx, m_distErrPct)); IList<Cell> cells = m_grid.GetCells(shape, detailLevel, false /*no parents*/, true /*simplify*/); var terms = new BytesRef[cells.Count]; int i = 0; foreach (Cell cell in cells) { terms[i++] = new BytesRef(cell.TokenString);//TODO use cell.getTokenBytes() } return new TermsFilter(FieldName, terms); } } }
apache/lucenenet
src/Lucene.Net.Spatial/Prefix/TermQueryPrefixTreeStrategy.cs
C#
apache-2.0
2,939
/* * Swift Parallel Scripting Language (http://swift-lang.org) * Code from Java CoG Kit Project (see notice below) with modifications. * * Copyright 2005-2014 University of Chicago * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ //---------------------------------------------------------------------- //This code is developed as part of the Java CoG Kit project //The terms of the license can be found at http://www.cogkit.org/license //This message may not be removed or altered. //---------------------------------------------------------------------- /* * Created on Aug 7, 2013 */ package org.griphyn.vdl.karajan.monitor.processors.coasters; import org.griphyn.vdl.karajan.monitor.SystemState; import org.griphyn.vdl.karajan.monitor.items.StatefulItemClass; import org.griphyn.vdl.karajan.monitor.processors.SimpleParser; public class WorkerActiveProcessor extends AbstractRemoteLogProcessor { private CoasterStatusItem item; @Override public void initialize(SystemState state) { super.initialize(state); } @Override public String getMessageHeader() { return "WORKER_ACTIVE"; } @Override public void processMessage(SystemState state, SimpleParser p, Object details) { try { p.skip("blockid="); String blockId = p.word(); p.skip("id="); String workerId = p.word(); p.skip("node="); String node = p.word(); p.skip("cores="); int cores = Integer.parseInt(p.word()); CoasterStatusItem item = (CoasterStatusItem) state.getItemByID(CoasterStatusItem.ID, StatefulItemClass.MISC); item.workerActive(blockId, workerId, node, cores, state.getCurrentTime()); } catch (Exception e) { e.printStackTrace(); } } }
swift-lang/swift-k
src/org/griphyn/vdl/karajan/monitor/processors/coasters/WorkerActiveProcessor.java
Java
apache-2.0
2,365
package org.mindinformatics.gwt.domeo.client.ui.annotation.forms; /** * @author Paolo Ciccarese <paolo.ciccarese@gmail.com> */ public interface IAllowsMultipleTargets { }
rkboyce/DomeoClient
src/org/mindinformatics/gwt/domeo/client/ui/annotation/forms/IAllowsMultipleTargets.java
Java
apache-2.0
175
/*global require*/ 'use strict'; // Require.js allows us to configure shortcut alias require.config({ // The shim config allows us to configure dependencies for // scripts that do not call define() to register a module shim: { underscore: { exports: '_' }, backbone: { deps: [ 'underscore', 'jquery' ] } }, paths: { jquery: '../lib/jquery/jquery', underscore: '../lib/underscore/underscore', backbone: '../lib/backbone/backbone', text: '../lib/requirejs-text/text', domReady:'../lib/requirejs-domReady/domReady', handlebars:'../lib/handlebars/handlebars' } }); require([ 'backbone' ], function (backbone) { /*jshint nonew:false*/ // Initialize routing and start Backbone.history() console.log("main被调用"); //new Workspace(); backbone.history.start(); // Initialize the application view //new AppView(); });
jiangjianqing/flow-demo
activiti/activiti5.springmvc/src/main/webapp/app/main.js
JavaScript
apache-2.0
867
package org.apache.lucene.search.suggest.analyzing; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.TreeSet; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; // TODO: // - allow to use the search score /** * Extension of the AnalyzingInfixSuggester which transforms the weight * after search to take into account the position of the searched term into * the indexed text. * Please note that it increases the number of elements searched and applies the * ponderation after. It might be costly for long suggestions. * * @lucene.experimental */ public class BlendedInfixSuggester extends AnalyzingInfixSuggester { /** * Coefficient used for linear blending */ protected static double LINEAR_COEF = 0.10; /** * Default factor */ public static int DEFAULT_NUM_FACTOR = 10; /** * Factor to multiply the number of searched elements */ private final int numFactor; /** * Type of blender used by the suggester */ private final BlenderType blenderType; /** * The different types of blender. */ public static enum BlenderType { /** Application dependent; override {@link * #calculateCoefficient} to compute it. */ CUSTOM, /** weight*(1 - 0.10*position) */ POSITION_LINEAR, /** weight/(1+position) */ POSITION_RECIPROCAL, // TODO: //SCORE } /** * Create a new instance, loading from a previously built * directory, if it exists. */ public BlendedInfixSuggester(Version matchVersion, Directory dir, Analyzer analyzer) throws IOException { super(matchVersion, dir, analyzer); this.blenderType = BlenderType.POSITION_LINEAR; this.numFactor = DEFAULT_NUM_FACTOR; } /** * Create a new instance, loading from a previously built * directory, if it exists. * * @param blenderType Type of blending strategy, see BlenderType for more precisions * @param numFactor Factor to multiply the number of searched elements before ponderate * @throws IOException If there are problems opening the underlying Lucene index. */ public BlendedInfixSuggester(Version matchVersion, Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars, BlenderType blenderType, int numFactor) throws IOException { super(matchVersion, dir, indexAnalyzer, queryAnalyzer, minPrefixChars); this.blenderType = blenderType; this.numFactor = numFactor; } @Override public List<Lookup.LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, boolean onlyMorePopular, int num) throws IOException { // here we multiply the number of searched element by the defined factor return super.lookup(key, contexts, onlyMorePopular, num * numFactor); } @Override public List<Lookup.LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, int num, boolean allTermsRequired, boolean doHighlight) throws IOException { // here we multiply the number of searched element by the defined factor return super.lookup(key, contexts, num * numFactor, allTermsRequired, doHighlight); } @Override protected FieldType getTextFieldType() { FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); ft.setStoreTermVectors(true); ft.setStoreTermVectorPositions(true); ft.setOmitNorms(true); return ft; } @Override protected List<Lookup.LookupResult> createResults(IndexSearcher searcher, TopFieldDocs hits, int num, CharSequence key, boolean doHighlight, Set<String> matchedTokens, String prefixToken) throws IOException { BinaryDocValues textDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), TEXT_FIELD_NAME); assert textDV != null; // This will just be null if app didn't pass payloads to build(): // TODO: maybe just stored fields? they compress... BinaryDocValues payloadsDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), "payloads"); TreeSet<Lookup.LookupResult> results = new TreeSet<>(LOOKUP_COMP); // we reduce the num to the one initially requested int actualNum = num / numFactor; for (int i = 0; i < hits.scoreDocs.length; i++) { FieldDoc fd = (FieldDoc) hits.scoreDocs[i]; final String text = textDV.get(fd.doc).utf8ToString(); long weight = (Long) fd.fields[0]; BytesRef payload; if (payloadsDV != null) { payload = BytesRef.deepCopyOf(payloadsDV.get(fd.doc)); } else { payload = null; } double coefficient; if (text.startsWith(key.toString())) { // if hit starts with the key, we don't change the score coefficient = 1; } else { coefficient = createCoefficient(searcher, fd.doc, matchedTokens, prefixToken); } long score = (long) (weight * coefficient); LookupResult result; if (doHighlight) { result = new LookupResult(text, highlight(text, matchedTokens, prefixToken), score, payload); } else { result = new LookupResult(text, score, payload); } boundedTreeAdd(results, result, actualNum); } return new ArrayList<>(results.descendingSet()); } /** * Add an element to the tree respecting a size limit * * @param results the tree to add in * @param result the result we try to add * @param num size limit */ private static void boundedTreeAdd(TreeSet<Lookup.LookupResult> results, Lookup.LookupResult result, int num) { if (results.size() >= num) { if (results.first().value < result.value) { results.pollFirst(); } else { return; } } results.add(result); } /** * Create the coefficient to transform the weight. * * @param doc id of the document * @param matchedTokens tokens found in the query * @param prefixToken unfinished token in the query * @return the coefficient * @throws IOException If there are problems reading term vectors from the underlying Lucene index. */ private double createCoefficient(IndexSearcher searcher, int doc, Set<String> matchedTokens, String prefixToken) throws IOException { Terms tv = searcher.getIndexReader().getTermVector(doc, TEXT_FIELD_NAME); TermsEnum it = tv.iterator(TermsEnum.EMPTY); Integer position = Integer.MAX_VALUE; BytesRef term; // find the closest token position while ((term = it.next()) != null) { String docTerm = term.utf8ToString(); if (matchedTokens.contains(docTerm) || (prefixToken != null && docTerm.startsWith(prefixToken))) { DocsAndPositionsEnum docPosEnum = it.docsAndPositions(null, null, DocsAndPositionsEnum.FLAG_OFFSETS); docPosEnum.nextDoc(); // use the first occurrence of the term int p = docPosEnum.nextPosition(); if (p < position) { position = p; } } } // create corresponding coefficient based on position return calculateCoefficient(position); } /** * Calculate the weight coefficient based on the position of the first matching word. * Subclass should override it to adapt it to particular needs * @param position of the first matching word in text * @return the coefficient */ protected double calculateCoefficient(int position) { double coefficient; switch (blenderType) { case POSITION_LINEAR: coefficient = 1 - LINEAR_COEF * position; break; case POSITION_RECIPROCAL: coefficient = 1. / (position + 1); break; default: coefficient = 1; } return coefficient; } private static Comparator<Lookup.LookupResult> LOOKUP_COMP = new LookUpComparator(); private static class LookUpComparator implements Comparator<Lookup.LookupResult> { @Override public int compare(Lookup.LookupResult o1, Lookup.LookupResult o2) { // order on weight if (o1.value > o2.value) { return 1; } else if (o1.value < o2.value) { return -1; } // otherwise on alphabetic order return CHARSEQUENCE_COMPARATOR.compare(o1.key, o2.key); } } }
smartan/lucene
src/main/java/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggester.java
Java
apache-2.0
9,762
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.fieldsConflictMessage = fieldsConflictMessage; exports.OverlappingFieldsCanBeMerged = OverlappingFieldsCanBeMerged; var _error = require('../../error'); var _find = require('../../jsutils/find'); var _find2 = _interopRequireDefault(_find); var _kinds = require('../../language/kinds'); var _printer = require('../../language/printer'); var _definition = require('../../type/definition'); var _typeFromAST = require('../../utilities/typeFromAST'); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } /** * Copyright (c) 2015-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * strict */ function fieldsConflictMessage(responseName, reason) { return 'Fields "' + responseName + '" conflict because ' + reasonMessage(reason) + '. Use different aliases on the fields to fetch both if this was ' + 'intentional.'; } function reasonMessage(reason) { if (Array.isArray(reason)) { return reason.map(function (_ref) { var responseName = _ref[0], subreason = _ref[1]; return 'subfields "' + responseName + '" conflict because ' + reasonMessage(subreason); }).join(' and '); } return reason; } /** * Overlapping fields can be merged * * A selection set is only valid if all fields (including spreading any * fragments) either correspond to distinct response names or can be merged * without ambiguity. */ function OverlappingFieldsCanBeMerged(context) { // A memoization for when two fragments are compared "between" each other for // conflicts. Two fragments may be compared many times, so memoizing this can // dramatically improve the performance of this validator. var comparedFragmentPairs = new PairSet(); // A cache for the "field map" and list of fragment names found in any given // selection set. Selection sets may be asked for this information multiple // times, so this improves the performance of this validator. var cachedFieldsAndFragmentNames = new Map(); return { SelectionSet: function SelectionSet(selectionSet) { var conflicts = findConflictsWithinSelectionSet(context, cachedFieldsAndFragmentNames, comparedFragmentPairs, context.getParentType(), selectionSet); conflicts.forEach(function (_ref2) { var _ref2$ = _ref2[0], responseName = _ref2$[0], reason = _ref2$[1], fields1 = _ref2[1], fields2 = _ref2[2]; return context.reportError(new _error.GraphQLError(fieldsConflictMessage(responseName, reason), fields1.concat(fields2))); }); } }; } // Field name and reason. // Reason is a string, or a nested list of conflicts. // Tuple defining a field node in a context. // Map of array of those. /** * Algorithm: * * Conflicts occur when two fields exist in a query which will produce the same * response name, but represent differing values, thus creating a conflict. * The algorithm below finds all conflicts via making a series of comparisons * between fields. In order to compare as few fields as possible, this makes * a series of comparisons "within" sets of fields and "between" sets of fields. * * Given any selection set, a collection produces both a set of fields by * also including all inline fragments, as well as a list of fragments * referenced by fragment spreads. * * A) Each selection set represented in the document first compares "within" its * collected set of fields, finding any conflicts between every pair of * overlapping fields. * Note: This is the *only time* that a the fields "within" a set are compared * to each other. After this only fields "between" sets are compared. * * B) Also, if any fragment is referenced in a selection set, then a * comparison is made "between" the original set of fields and the * referenced fragment. * * C) Also, if multiple fragments are referenced, then comparisons * are made "between" each referenced fragment. * * D) When comparing "between" a set of fields and a referenced fragment, first * a comparison is made between each field in the original set of fields and * each field in the the referenced set of fields. * * E) Also, if any fragment is referenced in the referenced selection set, * then a comparison is made "between" the original set of fields and the * referenced fragment (recursively referring to step D). * * F) When comparing "between" two fragments, first a comparison is made between * each field in the first referenced set of fields and each field in the the * second referenced set of fields. * * G) Also, any fragments referenced by the first must be compared to the * second, and any fragments referenced by the second must be compared to the * first (recursively referring to step F). * * H) When comparing two fields, if both have selection sets, then a comparison * is made "between" both selection sets, first comparing the set of fields in * the first selection set with the set of fields in the second. * * I) Also, if any fragment is referenced in either selection set, then a * comparison is made "between" the other set of fields and the * referenced fragment. * * J) Also, if two fragments are referenced in both selection sets, then a * comparison is made "between" the two fragments. * */ // Find all conflicts found "within" a selection set, including those found // via spreading in fragments. Called when visiting each SelectionSet in the // GraphQL Document. function findConflictsWithinSelectionSet(context, cachedFieldsAndFragmentNames, comparedFragmentPairs, parentType, selectionSet) { var conflicts = []; var _getFieldsAndFragment = getFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, parentType, selectionSet), fieldMap = _getFieldsAndFragment[0], fragmentNames = _getFieldsAndFragment[1]; // (A) Find find all conflicts "within" the fields of this selection set. // Note: this is the *only place* `collectConflictsWithin` is called. collectConflictsWithin(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, fieldMap); if (fragmentNames.length !== 0) { // (B) Then collect conflicts between these fields and those represented by // each spread fragment name found. var comparedFragments = Object.create(null); for (var i = 0; i < fragmentNames.length; i++) { collectConflictsBetweenFieldsAndFragment(context, conflicts, cachedFieldsAndFragmentNames, comparedFragments, comparedFragmentPairs, false, fieldMap, fragmentNames[i]); // (C) Then compare this fragment with all other fragments found in this // selection set to collect conflicts between fragments spread together. // This compares each item in the list of fragment names to every other // item in that same list (except for itself). for (var j = i + 1; j < fragmentNames.length; j++) { collectConflictsBetweenFragments(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, false, fragmentNames[i], fragmentNames[j]); } } } return conflicts; } // Collect all conflicts found between a set of fields and a fragment reference // including via spreading in any nested fragments. function collectConflictsBetweenFieldsAndFragment(context, conflicts, cachedFieldsAndFragmentNames, comparedFragments, comparedFragmentPairs, areMutuallyExclusive, fieldMap, fragmentName) { // Memoize so a fragment is not compared for conflicts more than once. if (comparedFragments[fragmentName]) { return; } comparedFragments[fragmentName] = true; var fragment = context.getFragment(fragmentName); if (!fragment) { return; } var _getReferencedFieldsA = getReferencedFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, fragment), fieldMap2 = _getReferencedFieldsA[0], fragmentNames2 = _getReferencedFieldsA[1]; // Do not compare a fragment's fieldMap to itself. if (fieldMap === fieldMap2) { return; } // (D) First collect any conflicts between the provided collection of fields // and the collection of fields represented by the given fragment. collectConflictsBetween(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, fieldMap, fieldMap2); // (E) Then collect any conflicts between the provided collection of fields // and any fragment names found in the given fragment. for (var i = 0; i < fragmentNames2.length; i++) { collectConflictsBetweenFieldsAndFragment(context, conflicts, cachedFieldsAndFragmentNames, comparedFragments, comparedFragmentPairs, areMutuallyExclusive, fieldMap, fragmentNames2[i]); } } // Collect all conflicts found between two fragments, including via spreading in // any nested fragments. function collectConflictsBetweenFragments(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, fragmentName1, fragmentName2) { // No need to compare a fragment to itself. if (fragmentName1 === fragmentName2) { return; } // Memoize so two fragments are not compared for conflicts more than once. if (comparedFragmentPairs.has(fragmentName1, fragmentName2, areMutuallyExclusive)) { return; } comparedFragmentPairs.add(fragmentName1, fragmentName2, areMutuallyExclusive); var fragment1 = context.getFragment(fragmentName1); var fragment2 = context.getFragment(fragmentName2); if (!fragment1 || !fragment2) { return; } var _getReferencedFieldsA2 = getReferencedFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, fragment1), fieldMap1 = _getReferencedFieldsA2[0], fragmentNames1 = _getReferencedFieldsA2[1]; var _getReferencedFieldsA3 = getReferencedFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, fragment2), fieldMap2 = _getReferencedFieldsA3[0], fragmentNames2 = _getReferencedFieldsA3[1]; // (F) First, collect all conflicts between these two collections of fields // (not including any nested fragments). collectConflictsBetween(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, fieldMap1, fieldMap2); // (G) Then collect conflicts between the first fragment and any nested // fragments spread in the second fragment. for (var j = 0; j < fragmentNames2.length; j++) { collectConflictsBetweenFragments(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, fragmentName1, fragmentNames2[j]); } // (G) Then collect conflicts between the second fragment and any nested // fragments spread in the first fragment. for (var i = 0; i < fragmentNames1.length; i++) { collectConflictsBetweenFragments(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, fragmentNames1[i], fragmentName2); } } // Find all conflicts found between two selection sets, including those found // via spreading in fragments. Called when determining if conflicts exist // between the sub-fields of two overlapping fields. function findConflictsBetweenSubSelectionSets(context, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, parentType1, selectionSet1, parentType2, selectionSet2) { var conflicts = []; var _getFieldsAndFragment2 = getFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, parentType1, selectionSet1), fieldMap1 = _getFieldsAndFragment2[0], fragmentNames1 = _getFieldsAndFragment2[1]; var _getFieldsAndFragment3 = getFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, parentType2, selectionSet2), fieldMap2 = _getFieldsAndFragment3[0], fragmentNames2 = _getFieldsAndFragment3[1]; // (H) First, collect all conflicts between these two collections of field. collectConflictsBetween(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, fieldMap1, fieldMap2); // (I) Then collect conflicts between the first collection of fields and // those referenced by each fragment name associated with the second. if (fragmentNames2.length !== 0) { var comparedFragments = Object.create(null); for (var j = 0; j < fragmentNames2.length; j++) { collectConflictsBetweenFieldsAndFragment(context, conflicts, cachedFieldsAndFragmentNames, comparedFragments, comparedFragmentPairs, areMutuallyExclusive, fieldMap1, fragmentNames2[j]); } } // (I) Then collect conflicts between the second collection of fields and // those referenced by each fragment name associated with the first. if (fragmentNames1.length !== 0) { var _comparedFragments = Object.create(null); for (var i = 0; i < fragmentNames1.length; i++) { collectConflictsBetweenFieldsAndFragment(context, conflicts, cachedFieldsAndFragmentNames, _comparedFragments, comparedFragmentPairs, areMutuallyExclusive, fieldMap2, fragmentNames1[i]); } } // (J) Also collect conflicts between any fragment names by the first and // fragment names by the second. This compares each item in the first set of // names to each item in the second set of names. for (var _i = 0; _i < fragmentNames1.length; _i++) { for (var _j = 0; _j < fragmentNames2.length; _j++) { collectConflictsBetweenFragments(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, fragmentNames1[_i], fragmentNames2[_j]); } } return conflicts; } // Collect all Conflicts "within" one collection of fields. function collectConflictsWithin(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, fieldMap) { // A field map is a keyed collection, where each key represents a response // name and the value at that key is a list of all fields which provide that // response name. For every response name, if there are multiple fields, they // must be compared to find a potential conflict. Object.keys(fieldMap).forEach(function (responseName) { var fields = fieldMap[responseName]; // This compares every field in the list to every other field in this list // (except to itself). If the list only has one item, nothing needs to // be compared. if (fields.length > 1) { for (var i = 0; i < fields.length; i++) { for (var j = i + 1; j < fields.length; j++) { var conflict = findConflict(context, cachedFieldsAndFragmentNames, comparedFragmentPairs, false, // within one collection is never mutually exclusive responseName, fields[i], fields[j]); if (conflict) { conflicts.push(conflict); } } } } }); } // Collect all Conflicts between two collections of fields. This is similar to, // but different from the `collectConflictsWithin` function above. This check // assumes that `collectConflictsWithin` has already been called on each // provided collection of fields. This is true because this validator traverses // each individual selection set. function collectConflictsBetween(context, conflicts, cachedFieldsAndFragmentNames, comparedFragmentPairs, parentFieldsAreMutuallyExclusive, fieldMap1, fieldMap2) { // A field map is a keyed collection, where each key represents a response // name and the value at that key is a list of all fields which provide that // response name. For any response name which appears in both provided field // maps, each field from the first field map must be compared to every field // in the second field map to find potential conflicts. Object.keys(fieldMap1).forEach(function (responseName) { var fields2 = fieldMap2[responseName]; if (fields2) { var fields1 = fieldMap1[responseName]; for (var i = 0; i < fields1.length; i++) { for (var j = 0; j < fields2.length; j++) { var conflict = findConflict(context, cachedFieldsAndFragmentNames, comparedFragmentPairs, parentFieldsAreMutuallyExclusive, responseName, fields1[i], fields2[j]); if (conflict) { conflicts.push(conflict); } } } } }); } // Determines if there is a conflict between two particular fields, including // comparing their sub-fields. function findConflict(context, cachedFieldsAndFragmentNames, comparedFragmentPairs, parentFieldsAreMutuallyExclusive, responseName, field1, field2) { var parentType1 = field1[0], node1 = field1[1], def1 = field1[2]; var parentType2 = field2[0], node2 = field2[1], def2 = field2[2]; // If it is known that two fields could not possibly apply at the same // time, due to the parent types, then it is safe to permit them to diverge // in aliased field or arguments used as they will not present any ambiguity // by differing. // It is known that two parent types could never overlap if they are // different Object types. Interface or Union types might overlap - if not // in the current state of the schema, then perhaps in some future version, // thus may not safely diverge. var areMutuallyExclusive = parentFieldsAreMutuallyExclusive || parentType1 !== parentType2 && (0, _definition.isObjectType)(parentType1) && (0, _definition.isObjectType)(parentType2); // The return type for each field. var type1 = def1 && def1.type; var type2 = def2 && def2.type; if (!areMutuallyExclusive) { // Two aliases must refer to the same field. var name1 = node1.name.value; var name2 = node2.name.value; if (name1 !== name2) { return [[responseName, name1 + ' and ' + name2 + ' are different fields'], [node1], [node2]]; } // Two field calls must have the same arguments. if (!sameArguments(node1.arguments || [], node2.arguments || [])) { return [[responseName, 'they have differing arguments'], [node1], [node2]]; } } if (type1 && type2 && doTypesConflict(type1, type2)) { return [[responseName, 'they return conflicting types ' + String(type1) + ' and ' + String(type2)], [node1], [node2]]; } // Collect and compare sub-fields. Use the same "visited fragment names" list // for both collections so fields in a fragment reference are never // compared to themselves. var selectionSet1 = node1.selectionSet; var selectionSet2 = node2.selectionSet; if (selectionSet1 && selectionSet2) { var conflicts = findConflictsBetweenSubSelectionSets(context, cachedFieldsAndFragmentNames, comparedFragmentPairs, areMutuallyExclusive, (0, _definition.getNamedType)(type1), selectionSet1, (0, _definition.getNamedType)(type2), selectionSet2); return subfieldConflicts(conflicts, responseName, node1, node2); } } function sameArguments(arguments1, arguments2) { if (arguments1.length !== arguments2.length) { return false; } return arguments1.every(function (argument1) { var argument2 = (0, _find2.default)(arguments2, function (argument) { return argument.name.value === argument1.name.value; }); if (!argument2) { return false; } return sameValue(argument1.value, argument2.value); }); } function sameValue(value1, value2) { return !value1 && !value2 || (0, _printer.print)(value1) === (0, _printer.print)(value2); } // Two types conflict if both types could not apply to a value simultaneously. // Composite types are ignored as their individual field types will be compared // later recursively. However List and Non-Null types must match. function doTypesConflict(type1, type2) { if ((0, _definition.isListType)(type1)) { return (0, _definition.isListType)(type2) ? doTypesConflict(type1.ofType, type2.ofType) : true; } if ((0, _definition.isListType)(type2)) { return true; } if ((0, _definition.isNonNullType)(type1)) { return (0, _definition.isNonNullType)(type2) ? doTypesConflict(type1.ofType, type2.ofType) : true; } if ((0, _definition.isNonNullType)(type2)) { return true; } if ((0, _definition.isLeafType)(type1) || (0, _definition.isLeafType)(type2)) { return type1 !== type2; } return false; } // Given a selection set, return the collection of fields (a mapping of response // name to field nodes and definitions) as well as a list of fragment names // referenced via fragment spreads. function getFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, parentType, selectionSet) { var cached = cachedFieldsAndFragmentNames.get(selectionSet); if (!cached) { var nodeAndDefs = Object.create(null); var fragmentNames = Object.create(null); _collectFieldsAndFragmentNames(context, parentType, selectionSet, nodeAndDefs, fragmentNames); cached = [nodeAndDefs, Object.keys(fragmentNames)]; cachedFieldsAndFragmentNames.set(selectionSet, cached); } return cached; } // Given a reference to a fragment, return the represented collection of fields // as well as a list of nested fragment names referenced via fragment spreads. function getReferencedFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, fragment) { // Short-circuit building a type from the node if possible. var cached = cachedFieldsAndFragmentNames.get(fragment.selectionSet); if (cached) { return cached; } var fragmentType = (0, _typeFromAST.typeFromAST)(context.getSchema(), fragment.typeCondition); return getFieldsAndFragmentNames(context, cachedFieldsAndFragmentNames, fragmentType, fragment.selectionSet); } function _collectFieldsAndFragmentNames(context, parentType, selectionSet, nodeAndDefs, fragmentNames) { for (var i = 0; i < selectionSet.selections.length; i++) { var selection = selectionSet.selections[i]; switch (selection.kind) { case _kinds.Kind.FIELD: var fieldName = selection.name.value; var fieldDef = void 0; if ((0, _definition.isObjectType)(parentType) || (0, _definition.isInterfaceType)(parentType)) { fieldDef = parentType.getFields()[fieldName]; } var responseName = selection.alias ? selection.alias.value : fieldName; if (!nodeAndDefs[responseName]) { nodeAndDefs[responseName] = []; } nodeAndDefs[responseName].push([parentType, selection, fieldDef]); break; case _kinds.Kind.FRAGMENT_SPREAD: fragmentNames[selection.name.value] = true; break; case _kinds.Kind.INLINE_FRAGMENT: var typeCondition = selection.typeCondition; var inlineFragmentType = typeCondition ? (0, _typeFromAST.typeFromAST)(context.getSchema(), typeCondition) : parentType; _collectFieldsAndFragmentNames(context, inlineFragmentType, selection.selectionSet, nodeAndDefs, fragmentNames); break; } } } // Given a series of Conflicts which occurred between two sub-fields, generate // a single Conflict. function subfieldConflicts(conflicts, responseName, node1, node2) { if (conflicts.length > 0) { return [[responseName, conflicts.map(function (_ref3) { var reason = _ref3[0]; return reason; })], conflicts.reduce(function (allFields, _ref4) { var fields1 = _ref4[1]; return allFields.concat(fields1); }, [node1]), conflicts.reduce(function (allFields, _ref5) { var fields2 = _ref5[2]; return allFields.concat(fields2); }, [node2])]; } } /** * A way to keep track of pairs of things when the ordering of the pair does * not matter. We do this by maintaining a sort of double adjacency sets. */ var PairSet = function () { function PairSet() { _classCallCheck(this, PairSet); this._data = Object.create(null); } PairSet.prototype.has = function has(a, b, areMutuallyExclusive) { var first = this._data[a]; var result = first && first[b]; if (result === undefined) { return false; } // areMutuallyExclusive being false is a superset of being true, // hence if we want to know if this PairSet "has" these two with no // exclusivity, we have to ensure it was added as such. if (areMutuallyExclusive === false) { return result === false; } return true; }; PairSet.prototype.add = function add(a, b, areMutuallyExclusive) { _pairSetAdd(this._data, a, b, areMutuallyExclusive); _pairSetAdd(this._data, b, a, areMutuallyExclusive); }; return PairSet; }(); function _pairSetAdd(data, a, b, areMutuallyExclusive) { var map = data[a]; if (!map) { map = Object.create(null); data[a] = map; } map[b] = areMutuallyExclusive; }
Khan/khan-linter
node_modules/graphql/validation/rules/OverlappingFieldsCanBeMerged.js
JavaScript
apache-2.0
25,753
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/datapipeline/model/DescribeObjectsRequest.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::DataPipeline::Model; using namespace Aws::Utils::Json; using namespace Aws::Utils; DescribeObjectsRequest::DescribeObjectsRequest() : m_pipelineIdHasBeenSet(false), m_objectIdsHasBeenSet(false), m_evaluateExpressions(false), m_evaluateExpressionsHasBeenSet(false), m_markerHasBeenSet(false) { } Aws::String DescribeObjectsRequest::SerializePayload() const { JsonValue payload; if(m_pipelineIdHasBeenSet) { payload.WithString("pipelineId", m_pipelineId); } if(m_objectIdsHasBeenSet) { Array<JsonValue> objectIdsJsonList(m_objectIds.size()); for(unsigned objectIdsIndex = 0; objectIdsIndex < objectIdsJsonList.GetLength(); ++objectIdsIndex) { objectIdsJsonList[objectIdsIndex].AsString(m_objectIds[objectIdsIndex]); } payload.WithArray("objectIds", std::move(objectIdsJsonList)); } if(m_evaluateExpressionsHasBeenSet) { payload.WithBool("evaluateExpressions", m_evaluateExpressions); } if(m_markerHasBeenSet) { payload.WithString("marker", m_marker); } return payload.WriteReadable(); } Aws::Http::HeaderValueCollection DescribeObjectsRequest::GetRequestSpecificHeaders() const { Aws::Http::HeaderValueCollection headers; headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "DataPipeline.DescribeObjects")); return headers; }
chiaming0914/awe-cpp-sdk
aws-cpp-sdk-datapipeline/source/model/DescribeObjectsRequest.cpp
C++
apache-2.0
2,056
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/common/resource_messages.h" #include "net/base/load_timing_info.h" #include "net/http/http_response_headers.h" #include "webkit/glue/resource_loader_bridge.h" namespace IPC { void ParamTraits<scoped_refptr<net::HttpResponseHeaders> >::Write( Message* m, const param_type& p) { WriteParam(m, p.get() != NULL); if (p) { // Do not disclose Set-Cookie headers over IPC. p->Persist(m, net::HttpResponseHeaders::PERSIST_SANS_COOKIES); } } bool ParamTraits<scoped_refptr<net::HttpResponseHeaders> >::Read( const Message* m, PickleIterator* iter, param_type* r) { bool has_object; if (!ReadParam(m, iter, &has_object)) return false; if (has_object) *r = new net::HttpResponseHeaders(*m, iter); return true; } void ParamTraits<scoped_refptr<net::HttpResponseHeaders> >::Log( const param_type& p, std::string* l) { l->append("<HttpResponseHeaders>"); } void ParamTraits<webkit_base::DataElement>::Write( Message* m, const param_type& p) { WriteParam(m, static_cast<int>(p.type())); switch (p.type()) { case webkit_base::DataElement::TYPE_BYTES: { m->WriteData(p.bytes(), static_cast<int>(p.length())); break; } case webkit_base::DataElement::TYPE_FILE: { WriteParam(m, p.path()); WriteParam(m, p.offset()); WriteParam(m, p.length()); WriteParam(m, p.expected_modification_time()); break; } case webkit_base::DataElement::TYPE_FILE_FILESYSTEM: { WriteParam(m, p.url()); WriteParam(m, p.offset()); WriteParam(m, p.length()); WriteParam(m, p.expected_modification_time()); break; } default: { DCHECK(p.type() == webkit_base::DataElement::TYPE_BLOB); WriteParam(m, p.url()); WriteParam(m, p.offset()); WriteParam(m, p.length()); break; } } } bool ParamTraits<webkit_base::DataElement>::Read( const Message* m, PickleIterator* iter, param_type* r) { int type; if (!ReadParam(m, iter, &type)) return false; switch (type) { case webkit_base::DataElement::TYPE_BYTES: { const char* data; int len; if (!m->ReadData(iter, &data, &len)) return false; r->SetToBytes(data, len); break; } case webkit_base::DataElement::TYPE_FILE: { base::FilePath file_path; uint64 offset, length; base::Time expected_modification_time; if (!ReadParam(m, iter, &file_path)) return false; if (!ReadParam(m, iter, &offset)) return false; if (!ReadParam(m, iter, &length)) return false; if (!ReadParam(m, iter, &expected_modification_time)) return false; r->SetToFilePathRange(file_path, offset, length, expected_modification_time); break; } case webkit_base::DataElement::TYPE_FILE_FILESYSTEM: { GURL file_system_url; uint64 offset, length; base::Time expected_modification_time; if (!ReadParam(m, iter, &file_system_url)) return false; if (!ReadParam(m, iter, &offset)) return false; if (!ReadParam(m, iter, &length)) return false; if (!ReadParam(m, iter, &expected_modification_time)) return false; r->SetToFileSystemUrlRange(file_system_url, offset, length, expected_modification_time); break; } default: { DCHECK(type == webkit_base::DataElement::TYPE_BLOB); GURL blob_url; uint64 offset, length; if (!ReadParam(m, iter, &blob_url)) return false; if (!ReadParam(m, iter, &offset)) return false; if (!ReadParam(m, iter, &length)) return false; r->SetToBlobUrlRange(blob_url, offset, length); break; } } return true; } void ParamTraits<webkit_base::DataElement>::Log( const param_type& p, std::string* l) { l->append("<webkit_base::DataElement>"); } void ParamTraits<scoped_refptr<webkit_glue::ResourceDevToolsInfo> >::Write( Message* m, const param_type& p) { WriteParam(m, p.get() != NULL); if (p) { WriteParam(m, p->http_status_code); WriteParam(m, p->http_status_text); WriteParam(m, p->request_headers); WriteParam(m, p->response_headers); WriteParam(m, p->request_headers_text); WriteParam(m, p->response_headers_text); } } bool ParamTraits<scoped_refptr<webkit_glue::ResourceDevToolsInfo> >::Read( const Message* m, PickleIterator* iter, param_type* r) { bool has_object; if (!ReadParam(m, iter, &has_object)) return false; if (!has_object) return true; *r = new webkit_glue::ResourceDevToolsInfo(); return ReadParam(m, iter, &(*r)->http_status_code) && ReadParam(m, iter, &(*r)->http_status_text) && ReadParam(m, iter, &(*r)->request_headers) && ReadParam(m, iter, &(*r)->response_headers) && ReadParam(m, iter, &(*r)->request_headers_text) && ReadParam(m, iter, &(*r)->response_headers_text); } void ParamTraits<scoped_refptr<webkit_glue::ResourceDevToolsInfo> >::Log( const param_type& p, std::string* l) { l->append("("); if (p) { LogParam(p->request_headers, l); l->append(", "); LogParam(p->response_headers, l); } l->append(")"); } void ParamTraits<net::LoadTimingInfo>::Write( Message* m, const param_type& p) { WriteParam(m, p.socket_log_id); WriteParam(m, p.socket_reused); WriteParam(m, p.request_start_time.is_null()); if (p.request_start_time.is_null()) return; WriteParam(m, p.request_start_time); WriteParam(m, p.request_start); WriteParam(m, p.proxy_resolve_start); WriteParam(m, p.proxy_resolve_end); WriteParam(m, p.connect_timing.dns_start); WriteParam(m, p.connect_timing.dns_end); WriteParam(m, p.connect_timing.connect_start); WriteParam(m, p.connect_timing.connect_end); WriteParam(m, p.connect_timing.ssl_start); WriteParam(m, p.connect_timing.ssl_end); WriteParam(m, p.send_start); WriteParam(m, p.send_end); WriteParam(m, p.receive_headers_end); } bool ParamTraits<net::LoadTimingInfo>::Read( const Message* m, PickleIterator* iter, param_type* r) { bool has_no_times; if (!ReadParam(m, iter, &r->socket_log_id) || !ReadParam(m, iter, &r->socket_reused) || !ReadParam(m, iter, &has_no_times)) { return false; } if (has_no_times) return true; return ReadParam(m, iter, &r->request_start_time) && ReadParam(m, iter, &r->request_start) && ReadParam(m, iter, &r->proxy_resolve_start) && ReadParam(m, iter, &r->proxy_resolve_end) && ReadParam(m, iter, &r->connect_timing.dns_start) && ReadParam(m, iter, &r->connect_timing.dns_end) && ReadParam(m, iter, &r->connect_timing.connect_start) && ReadParam(m, iter, &r->connect_timing.connect_end) && ReadParam(m, iter, &r->connect_timing.ssl_start) && ReadParam(m, iter, &r->connect_timing.ssl_end) && ReadParam(m, iter, &r->send_start) && ReadParam(m, iter, &r->send_end) && ReadParam(m, iter, &r->receive_headers_end); } void ParamTraits<net::LoadTimingInfo>::Log(const param_type& p, std::string* l) { l->append("("); LogParam(p.socket_log_id, l); l->append(","); LogParam(p.socket_reused, l); l->append(","); LogParam(p.request_start_time, l); l->append(", "); LogParam(p.request_start, l); l->append(", "); LogParam(p.proxy_resolve_start, l); l->append(", "); LogParam(p.proxy_resolve_end, l); l->append(", "); LogParam(p.connect_timing.dns_start, l); l->append(", "); LogParam(p.connect_timing.dns_end, l); l->append(", "); LogParam(p.connect_timing.connect_start, l); l->append(", "); LogParam(p.connect_timing.connect_end, l); l->append(", "); LogParam(p.connect_timing.ssl_start, l); l->append(", "); LogParam(p.connect_timing.ssl_end, l); l->append(", "); LogParam(p.send_start, l); l->append(", "); LogParam(p.send_end, l); l->append(", "); LogParam(p.receive_headers_end, l); l->append(")"); } void ParamTraits<scoped_refptr<webkit_glue::ResourceRequestBody> >::Write( Message* m, const param_type& p) { WriteParam(m, p.get() != NULL); if (p) { WriteParam(m, *p->elements()); WriteParam(m, p->identifier()); } } bool ParamTraits<scoped_refptr<webkit_glue::ResourceRequestBody> >::Read( const Message* m, PickleIterator* iter, param_type* r) { bool has_object; if (!ReadParam(m, iter, &has_object)) return false; if (!has_object) return true; std::vector<webkit_base::DataElement> elements; if (!ReadParam(m, iter, &elements)) return false; int64 identifier; if (!ReadParam(m, iter, &identifier)) return false; *r = new webkit_glue::ResourceRequestBody; (*r)->swap_elements(&elements); (*r)->set_identifier(identifier); return true; } void ParamTraits<scoped_refptr<webkit_glue::ResourceRequestBody> >::Log( const param_type& p, std::string* l) { l->append("<webkit_glue::ResourceRequestBody>"); } } // namespace IPC
plxaye/chromium
src/content/common/resource_messages.cc
C++
apache-2.0
9,210
/* * #%L * SparkCommerce Framework * %% * Copyright (C) 2009 - 2013 Spark Commerce * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.sparkcommerce.core.util.domain; import java.io.Serializable; @Deprecated public interface CodeType extends Serializable { public void setId(Long id); public Long getId(); public void setCodeType(String type); public String getCodeType(); public void setKey(String key); public String getKey(); public void setDescription(String description); public String getDescription(); public void setModifiable(Boolean modifiable); public Boolean getModifiable(); public Boolean isModifiable(); }
akdasari/SparkCore
spark-framework/src/main/java/org/sparkcommerce/core/util/domain/CodeType.java
Java
apache-2.0
1,219
#include "search/geometry_utils.hpp" #include "indexer/scales.hpp" #include "geometry/mercator.hpp" namespace search { double PointDistance(m2::PointD const & a, m2::PointD const & b) { return MercatorBounds::DistanceOnEarth(a, b); } bool IsEqualMercator(m2::RectD const & r1, m2::RectD const & r2, double eps) { return m2::IsEqual(r1, r2, eps, eps); } } // namespace search
VladiMihaylenko/omim
search/geometry_utils.cpp
C++
apache-2.0
384
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.kudu.client; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.List; import com.google.common.collect.ImmutableList; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.apache.kudu.ColumnSchema; import org.apache.kudu.Schema; import org.apache.kudu.Type; import org.apache.kudu.client.KuduPredicate.ComparisonOp; import org.apache.kudu.test.KuduTestHarness; public class TestPartitionPruner { private KuduClient client; @Rule public KuduTestHarness harness = new KuduTestHarness(); @Before public void setUp() { client = harness.getClient(); } /** * Counts the partitions touched by a scan with optional primary key bounds. * The table is assumed to have three INT8 columns as the primary key. * * @param expectedTablets the expected number of tablets to satisfy the scan * @param table the table to scan * @param partitions the partitions of the table * @param lowerBoundPrimaryKey the optional lower bound primary key * @param upperBoundPrimaryKey the optional upper bound primary key */ private void checkPartitionsPrimaryKey(int expectedTablets, KuduTable table, List<Partition> partitions, byte[] lowerBoundPrimaryKey, byte[] upperBoundPrimaryKey) throws Exception { KuduScanToken.KuduScanTokenBuilder scanBuilder = client.newScanTokenBuilder(table); if (lowerBoundPrimaryKey != null) { PartialRow lower = table.getSchema().newPartialRow(); for (int i = 0; i < 3; i++) { lower.addByte(i, lowerBoundPrimaryKey[i]); } scanBuilder.lowerBound(lower); } if (upperBoundPrimaryKey != null) { PartialRow upper = table.getSchema().newPartialRow(); for (int i = 0; i < 3; i++) { upper.addByte(i, upperBoundPrimaryKey[i]); } scanBuilder.exclusiveUpperBound(upper); } PartitionPruner pruner = PartitionPruner.create(scanBuilder); int scannedPartitions = 0; for (Partition partition : partitions) { if (!pruner.shouldPruneForTests(partition)) { scannedPartitions++; } } // Check that the number of ScanTokens built for the scan matches. assertEquals(expectedTablets, scannedPartitions); assertEquals(scannedPartitions, scanBuilder.build().size()); assertEquals(expectedTablets == 0 ? 0 : 1, pruner.numRangesRemainingForTests()); } /** * Checks the number of tablets and pruner ranges generated for a scan. * * @param expectedTablets the expected number of tablets to satisfy the scan * @param expectedPrunerRanges the expected number of generated partition pruner ranges * @param table the table to scan * @param partitions the partitions of the table * @param predicates the predicates to apply to the scan */ private void checkPartitions(int expectedTablets, int expectedPrunerRanges, KuduTable table, List<Partition> partitions, KuduPredicate... predicates) { checkPartitions(expectedTablets, expectedPrunerRanges, table, partitions, null, null, predicates); } /** * Checks the number of tablets and pruner ranges generated for a scan with * predicates and optional partition key bounds. * * @param expectedTablets the expected number of tablets to satisfy the scan * @param expectedPrunerRanges the expected number of generated partition pruner ranges * @param table the table to scan * @param partitions the partitions of the table * @param lowerBoundPartitionKey an optional lower bound partition key * @param upperBoundPartitionKey an optional upper bound partition key * @param predicates the predicates to apply to the scan */ private void checkPartitions(int expectedTablets, int expectedPrunerRanges, KuduTable table, List<Partition> partitions, byte[] lowerBoundPartitionKey, byte[] upperBoundPartitionKey, KuduPredicate... predicates) { // Partition key bounds can't be applied to the ScanTokenBuilder. KuduScanner.KuduScannerBuilder scanBuilder = client.newScannerBuilder(table); for (KuduPredicate predicate : predicates) { scanBuilder.addPredicate(predicate); } if (lowerBoundPartitionKey != null) { scanBuilder.lowerBoundPartitionKeyRaw(lowerBoundPartitionKey); } if (upperBoundPartitionKey != null) { scanBuilder.exclusiveUpperBoundPartitionKeyRaw(upperBoundPartitionKey); } PartitionPruner pruner = PartitionPruner.create(scanBuilder); int scannedPartitions = 0; for (Partition partition : partitions) { if (!pruner.shouldPruneForTests(partition)) { scannedPartitions++; } } assertEquals(expectedTablets, scannedPartitions); assertEquals(expectedPrunerRanges, pruner.numRangesRemainingForTests()); // Check that the scan token builder comes up with the same amount. // The scan token builder does not allow for upper/lower partition keys. if (lowerBoundPartitionKey == null && upperBoundPartitionKey == null) { KuduScanToken.KuduScanTokenBuilder tokenBuilder = client.newScanTokenBuilder(table); for (KuduPredicate predicate : predicates) { tokenBuilder.addPredicate(predicate); } // Check that the number of ScanTokens built for the scan matches. assertEquals(expectedTablets, tokenBuilder.build().size()); } } /** * Retrieves the partitions of a table. * * @param table the table * @return the partitions of the table */ private List<Partition> getTablePartitions(KuduTable table) { List<Partition> partitions = new ArrayList<>(); for (KuduScanToken token : client.newScanTokenBuilder(table).build()) { partitions.add(token.getTablet().getPartition()); } return partitions; } @Test public void testPrimaryKeyRangePruning() throws Exception { // CREATE TABLE t // (a INT8, b INT8, c INT8) // PRIMARY KEY (a, b, c)) // PARTITION BY RANGE (a, b, c) // (PARTITION VALUES < (0, 0, 0), // PARTITION (0, 0, 0) <= VALUES < (10, 10, 10) // PARTITION (10, 10, 10) <= VALUES); ArrayList<ColumnSchema> columns = new ArrayList<>(3); columns.add(new ColumnSchema.ColumnSchemaBuilder("a", Type.INT8).key(true).build()); columns.add(new ColumnSchema.ColumnSchemaBuilder("b", Type.INT8).key(true).build()); columns.add(new ColumnSchema.ColumnSchemaBuilder("c", Type.INT8).key(true).build()); Schema schema = new Schema(columns); CreateTableOptions tableBuilder = new CreateTableOptions(); tableBuilder.setRangePartitionColumns(ImmutableList.of("a", "b", "c")); PartialRow split = schema.newPartialRow(); split.addByte("a", (byte) 0); split.addByte("b", (byte) 0); split.addByte("c", (byte) 0); tableBuilder.addSplitRow(split); split.addByte("a", (byte) 10); split.addByte("b", (byte) 10); split.addByte("c", (byte) 10); tableBuilder.addSplitRow(split); String tableName = "testPrimaryKeyRangePruning-" + System.currentTimeMillis(); client.createTable(tableName, schema, tableBuilder); KuduTable table = client.openTable(tableName); List<Partition> partitions = getTablePartitions(table); byte min = Byte.MIN_VALUE; // No bounds checkPartitionsPrimaryKey(3, table, partitions, null, null); // PK < (-1, min, min) checkPartitionsPrimaryKey(1, table, partitions, null, new byte[] { -1, min, min }); // PK < (0, 0, 0) checkPartitionsPrimaryKey(1, table, partitions, null, new byte[] { 0, 0, 0 }); // PK < (0, 0, min) checkPartitionsPrimaryKey(1, table, partitions, null, new byte[] { 0, 0, min }); // PK < (10, 10, 10) checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { 10, 10, 10 }); // PK < (100, min, min) checkPartitionsPrimaryKey(3, table, partitions, null, new byte[] { 100, min, min }); // PK >= (-10, -10, -10) checkPartitionsPrimaryKey(3, table, partitions, new byte[] { -10, -10, -10 }, null); // PK >= (0, 0, 0) checkPartitionsPrimaryKey(2, table, partitions, new byte[] { 0, 0, 0 }, null); // PK >= (100, 0, 0) checkPartitionsPrimaryKey(1, table, partitions, new byte[] { 100, 0, 0 }, null); // PK >= (-10, 0, 0) // PK < (100, 0, 0) checkPartitionsPrimaryKey(3, table, partitions, new byte[] { -10, 0, 0 }, new byte[] { 100, 0, 0 }); // PK >= (0, 0, 0) // PK < (10, 10, 10) checkPartitionsPrimaryKey(1, table, partitions, new byte[] { 0, 0, 0 }, new byte[] { 10, 0, 0 }); // PK >= (0, 0, 0) // PK < (10, 10, 11) checkPartitionsPrimaryKey(1, table, partitions, new byte[] { 0, 0, 0 }, new byte[] { 10, 0, 0 }); // PK < (0, 0, 0) // PK >= (10, 10, 11) checkPartitionsPrimaryKey(0, table, partitions, new byte[] { 10, 0, 0 }, new byte[] { 0, 0, 0 }); } @Test public void testPrimaryKeyPrefixRangePruning() throws Exception { // CREATE TABLE t // (a INT8, b INT8, c INT8) // PRIMARY KEY (a, b, c)) // PARTITION BY RANGE (a, b) // (PARTITION VALUES < (0, 0, 0)); ArrayList<ColumnSchema> columns = new ArrayList<>(3); columns.add(new ColumnSchema.ColumnSchemaBuilder("a", Type.INT8).key(true).build()); columns.add(new ColumnSchema.ColumnSchemaBuilder("b", Type.INT8).key(true).build()); columns.add(new ColumnSchema.ColumnSchemaBuilder("c", Type.INT8).key(true).build()); Schema schema = new Schema(columns); CreateTableOptions tableBuilder = new CreateTableOptions(); tableBuilder.setRangePartitionColumns(ImmutableList.of("a", "b")); PartialRow split = schema.newPartialRow(); split.addByte("a", (byte) 0); split.addByte("b", (byte) 0); tableBuilder.addSplitRow(split); String tableName = "testPrimaryKeyPrefixRangePruning-" + System.currentTimeMillis(); client.createTable(tableName, schema, tableBuilder); KuduTable table = client.openTable(tableName); List<Partition> partitions = getTablePartitions(table); final byte min = Byte.MIN_VALUE; final byte max = Byte.MAX_VALUE; // No bounds checkPartitionsPrimaryKey(2, table, partitions, null, null); // PK < (-1, min, min) // TODO(KUDU-2178): prune the upper partition. checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { -1, min, min }); // PK < (0, 0, min) // TODO(KUDU-2178): prune the upper partition. checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { 0, 0, min }); // PK < (0, 0, 0) checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { 0, 0, 0 }); // PK < (0, 1, min) checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { 0, 1, min }); // PK < (0, 1, 0) checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { 0, 1, 0 }); // PK < (max, max, min) checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { max, max, min }); // PK < (max, max, 0) checkPartitionsPrimaryKey(2, table, partitions, null, new byte[] { max, max, 0 }); // PK >= (0, 0, min) // TODO(KUDU-2178): prune the lower partition. checkPartitionsPrimaryKey(2, table, partitions, new byte[] { 0, 0, min }, null); // PK >= (0, 0, 0) // TODO(KUDU-2178): prune the lower partition. checkPartitionsPrimaryKey(2, table, partitions, new byte[] { 0, 0, 0 }, null); // PK >= (0, -1, 0) checkPartitionsPrimaryKey(2, table, partitions, new byte[] { 0, -1, 0 }, null); } @Test public void testRangePartitionPruning() throws Exception { // CREATE TABLE t // (a INT8, b STRING, c INT8) // PRIMARY KEY (a, b, c)) // PARTITION BY RANGE (c, b) // (PARTITION VALUES < (0, "m"), // PARTITION (0, "m") <= VALUES < (10, "r") // PARTITION (10, "r") <= VALUES); ColumnSchema a = new ColumnSchema.ColumnSchemaBuilder("a", Type.INT8).key(true).build(); ColumnSchema b = new ColumnSchema.ColumnSchemaBuilder("b", Type.STRING).key(true).build(); ColumnSchema c = new ColumnSchema.ColumnSchemaBuilder("c", Type.INT8).key(true).build(); Schema schema = new Schema(ImmutableList.of(a, b, c)); CreateTableOptions tableBuilder = new CreateTableOptions(); tableBuilder.setRangePartitionColumns(ImmutableList.of("c", "b")); PartialRow split = schema.newPartialRow(); split.addByte("c", (byte) 0); split.addString("b", "m"); tableBuilder.addSplitRow(split); split.addByte("c", (byte) 10); split.addString("b", "r"); tableBuilder.addSplitRow(split); String tableName = "testRangePartitionPruning-" + System.currentTimeMillis(); client.createTable(tableName, schema, tableBuilder); KuduTable table = client.openTable(tableName); List<Partition> partitions = getTablePartitions(table); // No Predicates checkPartitions(3, 1, table, partitions); // c < -10 checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, -10)); // c = -10 checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, -10)); // c < 10 checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, 10)); // c < 100 checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, 100)); // c < MIN checkPartitions(0, 0, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, Byte.MIN_VALUE)); // c < MAX checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, Byte.MAX_VALUE)); // c >= -10 checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, -10)); // c >= 0 checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, -10)); // c >= 5 checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, 5)); // c >= 10 checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, 10)); // c >= 100 checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, 100)); // c >= MIN checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, Byte.MIN_VALUE)); // c >= MAX checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, Byte.MAX_VALUE)); // c >= -10 // c < 0 checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, -10), KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, 0)); // c >= 5 // c < 100 checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, 5), KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, 100)); // b = "" checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.EQUAL, "")); // b >= "z" checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.GREATER_EQUAL, "z")); // b < "a" checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "a")); // b >= "m" // b < "z" checkPartitions(3, 1, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.GREATER_EQUAL, "m"), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "z")); // c >= 10 // b >= "r" checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, 10), KuduPredicate.newComparisonPredicate(b, ComparisonOp.GREATER_EQUAL, "r")); // c >= 10 // b < "r" checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.GREATER_EQUAL, 10), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "r")); // c = 10 // b < "r" checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 10), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "r")); // c < 0 // b < "m" checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 0), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "m")); // c < 0 // b < "z" checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.LESS, 0), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "z")); // c = 0 // b = "m\0" checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 0), KuduPredicate.newComparisonPredicate(b, ComparisonOp.EQUAL, "m\0")); // c = 0 // b < "m" checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 0), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "m")); // c = 0 // b < "m\0" checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 0), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "m\0")); // c = 0 // c = 2 checkPartitions(0, 0, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 0), KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 2)); // c = MIN checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, Byte.MIN_VALUE)); // c = MAX checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, Byte.MAX_VALUE)); // c IN (1, 2) checkPartitions(1, 1, table, partitions, KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 1, (byte) 2))); // c IN (0, 1, 2) checkPartitions(2, 1, table, partitions, KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1, (byte) 2))); // c IN (-10, 0) // b < "m" checkPartitions(1, 1, table, partitions, KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) -10, (byte) 0)), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "m")); // c IN (-10, 0) // b < "m\0" checkPartitions(2, 1, table, partitions, KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) -10, (byte) 0)), KuduPredicate.newComparisonPredicate(b, ComparisonOp.LESS, "m\0")); } @Test public void testHashPartitionPruning() throws Exception { // CREATE TABLE t // (a INT8, b INT8, c INT8) // PRIMARY KEY (a, b, c) // PARTITION BY HASH (a) PARTITIONS 2, // HASH (b, c) PARTITIONS 2; ColumnSchema a = new ColumnSchema.ColumnSchemaBuilder("a", Type.INT8).key(true).build(); ColumnSchema b = new ColumnSchema.ColumnSchemaBuilder("b", Type.INT8).key(true).build(); ColumnSchema c = new ColumnSchema.ColumnSchemaBuilder("c", Type.INT8).key(true).build(); final Schema schema = new Schema(ImmutableList.of(a, b, c)); CreateTableOptions tableBuilder = new CreateTableOptions(); tableBuilder.setRangePartitionColumns(new ArrayList<>()); tableBuilder.addHashPartitions(ImmutableList.of("a"), 2); tableBuilder.addHashPartitions(ImmutableList.of("b", "c"), 2); String tableName = "testHashPartitionPruning-" + System.currentTimeMillis(); client.createTable(tableName, schema, tableBuilder); KuduTable table = client.openTable(tableName); List<Partition> partitions = getTablePartitions(table); // No Predicates checkPartitions(4, 1, table, partitions); // a = 0; checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(a, ComparisonOp.EQUAL, 0)); // a >= 0; checkPartitions(4, 1, table, partitions, KuduPredicate.newComparisonPredicate(a, ComparisonOp.GREATER_EQUAL, 0)); // a >= 0; // a < 1; checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(a, ComparisonOp.GREATER_EQUAL, 0), KuduPredicate.newComparisonPredicate(a, ComparisonOp.LESS, 1)); // a >= 0; // a < 2; checkPartitions(4, 1, table, partitions, KuduPredicate.newComparisonPredicate(a, ComparisonOp.GREATER_EQUAL, 0), KuduPredicate.newComparisonPredicate(a, ComparisonOp.LESS, 2)); // b = 1; checkPartitions(4, 1, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.EQUAL, 1)); // b = 1; // c = 2; checkPartitions(2, 2, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.EQUAL, 1), KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 2)); // a = 0; // b = 1; // c = 2; checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(a, ComparisonOp.EQUAL, 0), KuduPredicate.newComparisonPredicate(b, ComparisonOp.EQUAL, 1), KuduPredicate.newComparisonPredicate(c, ComparisonOp.EQUAL, 2)); // a IN (0, 10) checkPartitions(4, 1, table, partitions, KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 10))); } @Test public void testInListHashPartitionPruning() throws Exception { // CREATE TABLE t // (a INT8, b INT8, c INT8) // PRIMARY KEY (a, b, c) // PARTITION BY HASH (a) PARTITIONS 3, // HASH (b) PARTITIONS 3, // HASH (c) PARTITIONS 3; ColumnSchema a = new ColumnSchema.ColumnSchemaBuilder("a", Type.INT8).key(true).build(); ColumnSchema b = new ColumnSchema.ColumnSchemaBuilder("b", Type.INT8).key(true).build(); ColumnSchema c = new ColumnSchema.ColumnSchemaBuilder("c", Type.INT8).key(true).build(); final Schema schema = new Schema(ImmutableList.of(a, b, c)); CreateTableOptions tableBuilder = new CreateTableOptions(); tableBuilder.setRangePartitionColumns(new ArrayList<>()); tableBuilder.addHashPartitions(ImmutableList.of("a"), 3); tableBuilder.addHashPartitions(ImmutableList.of("b"), 3); tableBuilder.addHashPartitions(ImmutableList.of("c"), 3); String tableName = "testInListHashPartitionPruning-" + System.currentTimeMillis(); client.createTable(tableName, schema, tableBuilder); KuduTable table = client.openTable(tableName); List<Partition> partitions = getTablePartitions(table); // a in [0, 1]; checkPartitions(18, 2, table, partitions, KuduPredicate.newInListPredicate(a, ImmutableList.of((byte) 0, (byte) 1))); // a in [0, 1, 8]; checkPartitions(27, 1, table, partitions, KuduPredicate.newInListPredicate(a, ImmutableList.of((byte) 0, (byte) 1, (byte) 8))); // b in [0, 1]; checkPartitions(18, 6, table, partitions, KuduPredicate.newInListPredicate(b, ImmutableList.of((byte) 0, (byte) 1))); // c in [0, 1]; checkPartitions(18, 18, table, partitions, KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); // b in [0, 1], c in [0, 1]; checkPartitions(12, 12, table, partitions, KuduPredicate.newInListPredicate(b, ImmutableList.of((byte) 0, (byte) 1)), KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); // a in [0, 1], b in [0, 1], c in [0, 1]; checkPartitions(8, 8, table, partitions, KuduPredicate.newInListPredicate(a, ImmutableList.of((byte) 0, (byte) 1)), KuduPredicate.newInListPredicate(b, ImmutableList.of((byte) 0, (byte) 1)), KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); } @Test public void testMultiColumnInListHashPruning() throws Exception { // CREATE TABLE t // (a INT8, b INT8, c INT8) // PRIMARY KEY (a, b, c) // PARTITION BY HASH (a) PARTITIONS 3, // HASH (b, c) PARTITIONS 3; ColumnSchema a = new ColumnSchema.ColumnSchemaBuilder("a", Type.INT8).key(true).build(); ColumnSchema b = new ColumnSchema.ColumnSchemaBuilder("b", Type.INT8).key(true).build(); ColumnSchema c = new ColumnSchema.ColumnSchemaBuilder("c", Type.INT8).key(true).build(); final Schema schema = new Schema(ImmutableList.of(a, b, c)); CreateTableOptions tableBuilder = new CreateTableOptions(); tableBuilder.setRangePartitionColumns(new ArrayList<>()); tableBuilder.addHashPartitions(ImmutableList.of("a"), 3); tableBuilder.addHashPartitions(ImmutableList.of("b", "c"), 3); String tableName = "testMultiColumnInListHashPartitionPruning-" + System.currentTimeMillis(); client.createTable(tableName, schema, tableBuilder); KuduTable table = client.openTable(tableName); List<Partition> partitions = getTablePartitions(table); // a in [0, 1]; checkPartitions(6, 2, table, partitions, KuduPredicate.newInListPredicate(a, ImmutableList.of((byte) 0, (byte) 1))); // a in [0, 1, 8]; checkPartitions(9, 1, table, partitions, KuduPredicate.newInListPredicate(a, ImmutableList.of((byte) 0, (byte) 1, (byte) 8))); // b in [0, 1]; checkPartitions(9, 1, table, partitions, KuduPredicate.newInListPredicate(b, ImmutableList.of((byte) 0, (byte) 1))); // c in [0, 1]; checkPartitions(9, 1, table, partitions, KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); // b in [0, 1], c in [0, 1] // (0, 0) in bucket 2 // (0, 1) in bucket 2 // (1, 0) in bucket 1 // (1, 1) in bucket 0 checkPartitions(9, 1, table, partitions, KuduPredicate.newInListPredicate(b, ImmutableList.of((byte) 0, (byte) 1)), KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); // b = 0, c in [0, 1] checkPartitions(3, 3, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.EQUAL, 0), KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); // b = 1, c in [0, 1] checkPartitions(6, 6, table, partitions, KuduPredicate.newComparisonPredicate(b, ComparisonOp.EQUAL, 1), KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); // a in [0, 1], b in [0, 1], c in [0, 1]; checkPartitions(6, 2, table, partitions, KuduPredicate.newInListPredicate(a, ImmutableList.of((byte) 0, (byte) 1)), KuduPredicate.newInListPredicate(b, ImmutableList.of((byte) 0, (byte) 1)), KuduPredicate.newInListPredicate(c, ImmutableList.of((byte) 0, (byte) 1))); } @Test public void testPruning() throws Exception { // CREATE TABLE timeseries // (host STRING, metric STRING, timestamp UNIXTIME_MICROS, value DOUBLE) // PRIMARY KEY (host, metric, time) // DISTRIBUTE BY // RANGE(time) // (PARTITION VALUES < 10, // PARTITION VALUES >= 10); // HASH (host, metric) 2 PARTITIONS; ColumnSchema host = new ColumnSchema.ColumnSchemaBuilder("host", Type.STRING).key(true).build(); ColumnSchema metric = new ColumnSchema.ColumnSchemaBuilder("metric", Type.STRING).key(true).build(); ColumnSchema timestamp = new ColumnSchema.ColumnSchemaBuilder("timestamp", Type.UNIXTIME_MICROS) .key(true).build(); ColumnSchema value = new ColumnSchema.ColumnSchemaBuilder("value", Type.DOUBLE).build(); Schema schema = new Schema(ImmutableList.of(host, metric, timestamp, value)); CreateTableOptions tableBuilder = new CreateTableOptions(); tableBuilder.setRangePartitionColumns(ImmutableList.of("timestamp")); PartialRow split = schema.newPartialRow(); split.addLong("timestamp", 10); tableBuilder.addSplitRow(split); tableBuilder.addHashPartitions(ImmutableList.of("host", "metric"), 2); String tableName = "testPruning-" + System.currentTimeMillis(); client.createTable(tableName, schema, tableBuilder); KuduTable table = client.openTable(tableName); List<Partition> partitions = getTablePartitions(table); // No Predicates checkPartitions(4, 1, table, partitions); // host = "a" checkPartitions(4, 1, table, partitions, KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a")); // host = "a" // metric = "a" checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(metric, ComparisonOp.EQUAL, "a")); // host = "a" // metric = "a" // timestamp >= 9; checkPartitions(2, 1, table, partitions, KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(metric, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.GREATER_EQUAL, 9)); // host = "a" // metric = "a" // timestamp >= 10; // timestamp < 20; checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(metric, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.GREATER_EQUAL, 10), KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.LESS, 20)); // host = "a" // metric = "a" // timestamp < 10; checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(metric, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.LESS, 10)); // host = "a" // metric = "a" // timestamp >= 10; checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(metric, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.GREATER_EQUAL, 10)); // host = "a" // metric = "a" // timestamp = 10; checkPartitions(1, 1, table, partitions, KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(metric, ComparisonOp.EQUAL, "a"), KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.EQUAL, 10)); byte[] hash1 = new byte[] { 0, 0, 0, 1 }; // partition key < (hash=1) checkPartitions(2, 1, table, partitions, null, hash1); // partition key >= (hash=1) checkPartitions(2, 1, table, partitions, hash1, null); // timestamp = 10 // partition key < (hash=1) checkPartitions(1, 1, table, partitions, null, hash1, KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.EQUAL, 10)); // timestamp = 10 // partition key >= (hash=1) checkPartitions(1, 1, table, partitions, hash1,null, KuduPredicate.newComparisonPredicate(timestamp, ComparisonOp.EQUAL, 10)); // timestamp IN (0, 9) // host = "a" // metric IN ("foo", "baz") checkPartitions(1, 1, table, partitions, KuduPredicate.newInListPredicate(timestamp, ImmutableList.of(0L, 9L)), KuduPredicate.newComparisonPredicate(host, ComparisonOp.EQUAL, "a"), KuduPredicate.newInListPredicate(metric, ImmutableList.of("foo", "baz"))); // timestamp IN (10, 100) checkPartitions(2, 2, table, partitions, KuduPredicate.newInListPredicate(timestamp, ImmutableList.of(10L, 100L))); // timestamp IN (9, 10) checkPartitions(4, 2, table, partitions, KuduPredicate.newInListPredicate(timestamp, ImmutableList.of(9L, 10L))); // timestamp IS NOT NULL checkPartitions(4, 1, table, partitions, KuduPredicate.newIsNotNullPredicate(timestamp)); // timestamp IS NULL checkPartitions(0, 0, table, partitions, KuduPredicate.newIsNullPredicate(timestamp)); } }
helifu/kudu
java/kudu-client/src/test/java/org/apache/kudu/client/TestPartitionPruner.java
Java
apache-2.0
35,532
package com.cronutils.model.time.generator; import com.cronutils.model.field.expression.FieldExpression; import org.apache.commons.lang3.Validate; import java.util.List; /* * Copyright 2015 jmrozanec * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Provides a strategy to generate values. * Strategy is valid for 0+ numbers */ public abstract class FieldValueGenerator { protected static int NO_VALUE = Integer.MIN_VALUE; protected FieldExpression expression; public FieldValueGenerator(FieldExpression expression){ Validate.notNull(expression); Validate.isTrue(matchesFieldExpressionClass(expression), "FieldExpression does not match required class"); this.expression = expression; } /** * Generates next valid value from reference * @param reference - reference value * @return generated value - Integer * @throws NoSuchValueException - if there is no next value */ public abstract int generateNextValue(int reference) throws NoSuchValueException; /** * Generates previous valid value from reference * @param reference - reference value * @return generated value - Integer * @throws NoSuchValueException - if there is no previous value */ public abstract int generatePreviousValue(int reference) throws NoSuchValueException; protected abstract List<Integer> generateCandidatesNotIncludingIntervalExtremes(int start, int end); public abstract boolean isMatch(int value); public final List<Integer> generateCandidates(int start, int end){ List<Integer> candidates = generateCandidatesNotIncludingIntervalExtremes(start, end); if(isMatch(start)){ candidates.add(start); } if(isMatch(end)){ candidates.add(end); } return candidates; } protected abstract boolean matchesFieldExpressionClass(FieldExpression fieldExpression); }
ucpwang/cron-utils
src/main/java/com/cronutils/model/time/generator/FieldValueGenerator.java
Java
apache-2.0
2,439
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.impl; import com.hazelcast.cluster.ClusterState; import com.hazelcast.cluster.Member; import com.hazelcast.cluster.impl.MemberImpl; import com.hazelcast.core.HazelcastInstanceNotActiveException; import com.hazelcast.function.FunctionEx; import com.hazelcast.instance.impl.Node; import com.hazelcast.internal.cluster.ClusterService; import com.hazelcast.internal.metrics.MetricDescriptor; import com.hazelcast.internal.metrics.MetricsRegistry; import com.hazelcast.internal.metrics.Probe; import com.hazelcast.internal.partition.impl.InternalPartitionServiceImpl; import com.hazelcast.internal.partition.impl.PartitionServiceState; import com.hazelcast.internal.serialization.Data; import com.hazelcast.internal.util.Clock; import com.hazelcast.internal.util.counters.Counter; import com.hazelcast.internal.util.counters.MwCounter; import com.hazelcast.jet.JetException; import com.hazelcast.jet.JobAlreadyExistsException; import com.hazelcast.jet.config.JetConfig; import com.hazelcast.jet.config.JobConfig; import com.hazelcast.jet.core.DAG; import com.hazelcast.jet.core.JobNotFoundException; import com.hazelcast.jet.core.JobStatus; import com.hazelcast.jet.core.JobSuspensionCause; import com.hazelcast.jet.core.TopologyChangedException; import com.hazelcast.jet.core.Vertex; import com.hazelcast.jet.core.metrics.MetricNames; import com.hazelcast.jet.core.metrics.MetricTags; import com.hazelcast.jet.datamodel.Tuple2; import com.hazelcast.jet.impl.exception.EnteringPassiveClusterStateException; import com.hazelcast.jet.impl.execution.DoneItem; import com.hazelcast.jet.impl.metrics.RawJobMetrics; import com.hazelcast.jet.impl.observer.ObservableImpl; import com.hazelcast.jet.impl.observer.WrappedThrowable; import com.hazelcast.jet.impl.operation.GetJobIdsOperation.GetJobIdsResult; import com.hazelcast.jet.impl.operation.NotifyMemberShutdownOperation; import com.hazelcast.jet.impl.pipeline.PipelineImpl; import com.hazelcast.jet.impl.pipeline.PipelineImpl.Context; import com.hazelcast.jet.impl.util.LoggingUtil; import com.hazelcast.jet.impl.util.Util; import com.hazelcast.logging.ILogger; import com.hazelcast.ringbuffer.OverflowPolicy; import com.hazelcast.ringbuffer.Ringbuffer; import com.hazelcast.security.SecurityContext; import com.hazelcast.spi.exception.RetryableHazelcastException; import com.hazelcast.spi.impl.NodeEngineImpl; import com.hazelcast.spi.impl.executionservice.ExecutionService; import com.hazelcast.spi.properties.HazelcastProperties; import com.hazelcast.version.Version; import javax.annotation.CheckReturnValue; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.security.auth.Subject; import java.security.Permission; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.Spliterators; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import static com.hazelcast.cluster.ClusterState.IN_TRANSITION; import static com.hazelcast.cluster.ClusterState.PASSIVE; import static com.hazelcast.cluster.memberselector.MemberSelectors.DATA_MEMBER_SELECTOR; import static com.hazelcast.internal.util.executor.ExecutorType.CACHED; import static com.hazelcast.jet.Util.idToString; import static com.hazelcast.jet.core.JobStatus.COMPLETING; import static com.hazelcast.jet.core.JobStatus.NOT_RUNNING; import static com.hazelcast.jet.core.JobStatus.RUNNING; import static com.hazelcast.jet.core.JobStatus.SUSPENDED; import static com.hazelcast.jet.datamodel.Tuple2.tuple2; import static com.hazelcast.jet.impl.JobClassLoaderService.JobPhase.COORDINATOR; import static com.hazelcast.jet.impl.TerminationMode.CANCEL_FORCEFUL; import static com.hazelcast.jet.impl.execution.init.CustomClassLoadedObject.deserializeWithCustomClassLoader; import static com.hazelcast.jet.impl.operation.GetJobIdsOperation.ALL_JOBS; import static com.hazelcast.jet.impl.util.ExceptionUtil.sneakyThrow; import static com.hazelcast.jet.impl.util.ExceptionUtil.withTryCatch; import static com.hazelcast.jet.impl.util.LoggingUtil.logFine; import static com.hazelcast.jet.impl.util.LoggingUtil.logFinest; import static com.hazelcast.spi.properties.ClusterProperty.JOB_SCAN_PERIOD; import static java.util.Collections.emptyList; import static java.util.Comparator.comparing; import static java.util.concurrent.CompletableFuture.completedFuture; import static java.util.concurrent.TimeUnit.HOURS; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.function.Function.identity; import static java.util.stream.Collectors.toList; /** * A service that handles MasterContexts on the coordinator member. * Job-control operations from client are handled here. */ public class JobCoordinationService { private static final String COORDINATOR_EXECUTOR_NAME = "jet:coordinator"; /** * The delay before retrying to start/scale up a job. */ private static final long RETRY_DELAY_IN_MILLIS = SECONDS.toMillis(2); private static final ThreadLocal<Boolean> IS_JOB_COORDINATOR_THREAD = ThreadLocal.withInitial(() -> false); private static final int COORDINATOR_THREADS_POOL_SIZE = 4; private static final int MIN_JOB_SCAN_PERIOD_MILLIS = 100; /** * Inserted temporarily to {@link #lightMasterContexts} to safely check for double job submission. * When reading, it's treated as if the job doesn't exist. */ private static final Object UNINITIALIZED_LIGHT_JOB_MARKER = new Object(); private final NodeEngineImpl nodeEngine; private final JetServiceBackend jetServiceBackend; private final JetConfig config; private final ILogger logger; private final JobRepository jobRepository; private final ConcurrentMap<Long, MasterContext> masterContexts = new ConcurrentHashMap<>(); private final ConcurrentMap<Long, Object> lightMasterContexts = new ConcurrentHashMap<>(); private final ConcurrentMap<UUID, CompletableFuture<Void>> membersShuttingDown = new ConcurrentHashMap<>(); private final ConcurrentMap<Long, ScheduledFuture<?>> scheduledJobTimeouts = new ConcurrentHashMap<>(); /** * Map of {memberUuid; removeTime}. * * A collection of UUIDs of members which left the cluster and for which we * didn't receive {@link NotifyMemberShutdownOperation}. */ private final Map<UUID, Long> removedMembers = new ConcurrentHashMap<>(); private final Object lock = new Object(); private volatile boolean isClusterEnteringPassiveState; private volatile boolean jobsScanned; private final AtomicInteger scaleUpScheduledCount = new AtomicInteger(); @Probe(name = MetricNames.JOBS_SUBMITTED) private final Counter jobSubmitted = MwCounter.newMwCounter(); @Probe(name = MetricNames.JOBS_COMPLETED_SUCCESSFULLY) private final Counter jobCompletedSuccessfully = MwCounter.newMwCounter(); @Probe(name = MetricNames.JOBS_COMPLETED_WITH_FAILURE) private final Counter jobCompletedWithFailure = MwCounter.newMwCounter(); private long maxJobScanPeriodInMillis; JobCoordinationService( NodeEngineImpl nodeEngine, JetServiceBackend jetServiceBackend, JetConfig config, JobRepository jobRepository ) { this.nodeEngine = nodeEngine; this.jetServiceBackend = jetServiceBackend; this.config = config; this.logger = nodeEngine.getLogger(getClass()); this.jobRepository = jobRepository; ExecutionService executionService = nodeEngine.getExecutionService(); executionService.register(COORDINATOR_EXECUTOR_NAME, COORDINATOR_THREADS_POOL_SIZE, Integer.MAX_VALUE, CACHED); // register metrics MetricsRegistry registry = nodeEngine.getMetricsRegistry(); MetricDescriptor descriptor = registry.newMetricDescriptor() .withTag(MetricTags.MODULE, "jet"); registry.registerStaticMetrics(descriptor, this); } public JobRepository jobRepository() { return jobRepository; } public void startScanningForJobs() { ExecutionService executionService = nodeEngine.getExecutionService(); HazelcastProperties properties = nodeEngine.getProperties(); maxJobScanPeriodInMillis = properties.getMillis(JOB_SCAN_PERIOD); executionService.schedule(COORDINATOR_EXECUTOR_NAME, this::scanJobs, 0, MILLISECONDS); } public CompletableFuture<Void> submitJob( long jobId, Data serializedJobDefinition, JobConfig jobConfig, Subject subject ) { CompletableFuture<Void> res = new CompletableFuture<>(); submitToCoordinatorThread(() -> { MasterContext masterContext; try { assertIsMaster("Cannot submit job " + idToString(jobId) + " to non-master node"); checkOperationalState(); // the order of operations is important. // first, check if the job is already completed JobResult jobResult = jobRepository.getJobResult(jobId); if (jobResult != null) { logger.fine("Not starting job " + idToString(jobId) + " since already completed with result: " + jobResult); return; } if (!config.isResourceUploadEnabled() && !jobConfig.getResourceConfigs().isEmpty()) { throw new JetException(Util.JET_RESOURCE_UPLOAD_DISABLED_MESSAGE); } int quorumSize = jobConfig.isSplitBrainProtectionEnabled() ? getQuorumSize() : 0; Object jobDefinition = deserializeJobDefinition(jobId, jobConfig, serializedJobDefinition); DAG dag; Data serializedDag; if (jobDefinition instanceof PipelineImpl) { int coopThreadCount = config.getCooperativeThreadCount(); dag = ((PipelineImpl) jobDefinition).toDag(new Context() { @Override public int defaultLocalParallelism() { return coopThreadCount; } }); serializedDag = nodeEngine().getSerializationService().toData(dag); } else { dag = (DAG) jobDefinition; serializedDag = serializedJobDefinition; } checkPermissions(subject, dag); Set<String> ownedObservables = ownedObservables(dag); JobRecord jobRecord = new JobRecord(nodeEngine.getClusterService().getClusterVersion(), jobId, serializedDag, dagToJson(dag), jobConfig, ownedObservables, subject); JobExecutionRecord jobExecutionRecord = new JobExecutionRecord(jobId, quorumSize); masterContext = createMasterContext(jobRecord, jobExecutionRecord); boolean hasDuplicateJobName; synchronized (lock) { assertIsMaster("Cannot submit job " + idToString(jobId) + " to non-master node"); checkOperationalState(); hasDuplicateJobName = jobConfig.getName() != null && hasActiveJobWithName(jobConfig.getName()); if (!hasDuplicateJobName) { // just try to initiate the coordination MasterContext prev = masterContexts.putIfAbsent(jobId, masterContext); if (prev != null) { logger.fine("Joining to already existing masterContext " + prev.jobIdString()); return; } } } if (hasDuplicateJobName) { jobRepository.deleteJob(jobId); throw new JobAlreadyExistsException("Another active job with equal name (" + jobConfig.getName() + ") exists: " + idToString(jobId)); } // If job is not currently running, it might be that it is just completed if (completeMasterContextIfJobAlreadyCompleted(masterContext)) { return; } // If there is no master context and job result at the same time, it means this is the first submission jobSubmitted.inc(); jobRepository.putNewJobRecord(jobRecord); logger.info("Starting job " + idToString(masterContext.jobId()) + " based on submit request"); } catch (Throwable e) { jetServiceBackend.getJobClassLoaderService() .tryRemoveClassloadersForJob(jobId, COORDINATOR); res.completeExceptionally(e); throw e; } finally { res.complete(null); } tryStartJob(masterContext); }); return res; } public CompletableFuture<Void> submitLightJob( long jobId, Data serializedJobDefinition, JobConfig jobConfig, Subject subject ) { Object jobDefinition = nodeEngine().getSerializationService().toObject(serializedJobDefinition); DAG dag; if (jobDefinition instanceof DAG) { dag = (DAG) jobDefinition; } else { int coopThreadCount = config.getCooperativeThreadCount(); dag = ((PipelineImpl) jobDefinition).toDag(new Context() { @Override public int defaultLocalParallelism() { return coopThreadCount; } }); } // First insert just a marker into the map. This is to prevent initializing the light job if the jobId // was submitted twice. This can happen e.g. if the client retries. Object oldContext = lightMasterContexts.putIfAbsent(jobId, UNINITIALIZED_LIGHT_JOB_MARKER); if (oldContext != null) { throw new JetException("duplicate jobId " + idToString(jobId)); } checkPermissions(subject, dag); // Initialize and start the job (happens in the constructor). We do this before adding the actual // LightMasterContext to the map to avoid possible races of the job initialization and cancellation. LightMasterContext mc = new LightMasterContext(nodeEngine, this, dag, jobId, jobConfig, subject); oldContext = lightMasterContexts.put(jobId, mc); assert oldContext == UNINITIALIZED_LIGHT_JOB_MARKER; scheduleJobTimeout(jobId, jobConfig.getTimeoutMillis()); return mc.getCompletionFuture() .whenComplete((r, t) -> { Object removed = lightMasterContexts.remove(jobId); assert removed instanceof LightMasterContext : "LMC not found: " + removed; unscheduleJobTimeout(jobId); }); } public long getJobSubmittedCount() { return jobSubmitted.get(); } private void checkPermissions(Subject subject, DAG dag) { SecurityContext securityContext = nodeEngine.getNode().securityContext; if (securityContext == null || subject == null) { return; } for (Vertex vertex : dag) { Permission requiredPermission = vertex.getMetaSupplier().getRequiredPermission(); if (requiredPermission != null) { securityContext.checkPermission(subject, requiredPermission); } } } private static Set<String> ownedObservables(DAG dag) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(dag.iterator(), 0), false) .map(vertex -> vertex.getMetaSupplier().getTags().get(ObservableImpl.OWNED_OBSERVABLE)) .filter(Objects::nonNull) .collect(Collectors.toSet()); } @SuppressWarnings("WeakerAccess") // used by jet-enterprise MasterContext createMasterContext(JobRecord jobRecord, JobExecutionRecord jobExecutionRecord) { return new MasterContext(nodeEngine, this, jobRecord, jobExecutionRecord); } private boolean hasActiveJobWithName(@Nonnull String jobName) { // if scanJob() has not run yet, master context objects may not be initialized. // in this case, we cannot check if the new job submission has a duplicate job name. // therefore, we will retry until scanJob() task runs at least once. if (!jobsScanned) { throw new RetryableHazelcastException("Cannot submit job with name '" + jobName + "' before the master node initializes job coordination service's state"); } return masterContexts.values() .stream() .anyMatch(ctx -> jobName.equals(ctx.jobConfig().getName())); } public CompletableFuture<Void> prepareForPassiveClusterState() { assertIsMaster("Cannot prepare for passive cluster state on a non-master node"); synchronized (lock) { isClusterEnteringPassiveState = true; } return submitToCoordinatorThread(() -> { CompletableFuture[] futures = masterContexts .values().stream() .map(mc -> mc.jobContext().gracefullyTerminate()) .toArray(CompletableFuture[]::new); return CompletableFuture.allOf(futures); }).thenCompose(identity()); } public void clusterChangeDone() { synchronized (lock) { isClusterEnteringPassiveState = false; } } public void reset() { assert !isMaster() : "this member is a master"; List<MasterContext> contexts; synchronized (lock) { contexts = new ArrayList<>(masterContexts.values()); masterContexts.clear(); jobsScanned = false; } contexts.forEach(ctx -> ctx.jobContext().setFinalResult(new CancellationException())); } public CompletableFuture<Void> joinSubmittedJob(long jobId) { checkOperationalState(); CompletableFuture<CompletableFuture<Void>> future = callWithJob(jobId, mc -> mc.jobContext().jobCompletionFuture() .handle((r, t) -> { if (t == null) { return null; } if (t instanceof CancellationException || t instanceof JetException) { throw sneakyThrow(t); } throw new JetException(t.toString(), t); }), JobResult::asCompletableFuture, jobRecord -> { JobExecutionRecord jobExecutionRecord = ensureExecutionRecord(jobId, jobRepository.getJobExecutionRecord(jobId)); return startJobIfNotStartedOrCompleted(jobRecord, jobExecutionRecord, "join request from client"); }, null ); return future .thenCompose(identity()); // unwrap the inner future } public CompletableFuture<Void> joinLightJob(long jobId) { Object mc = lightMasterContexts.get(jobId); if (mc == null || mc == UNINITIALIZED_LIGHT_JOB_MARKER) { throw new JobNotFoundException(jobId); } return ((LightMasterContext) mc).getCompletionFuture(); } public CompletableFuture<Void> terminateJob(long jobId, TerminationMode terminationMode) { return runWithJob(jobId, masterContext -> { // User can cancel in any state, other terminations are allowed only when running. // This is not technically required (we can request termination in any state), // but this method is only called by the user. It would be weird for the client to // request a restart if the job didn't start yet etc. // Also, it would be weird to restart the job during STARTING: as soon as it will start, // it will restart. // In any case, it doesn't make sense to restart a suspended job. JobStatus jobStatus = masterContext.jobStatus(); if (jobStatus != RUNNING && terminationMode != CANCEL_FORCEFUL) { throw new IllegalStateException("Cannot " + terminationMode + ", job status is " + jobStatus + ", should be " + RUNNING); } String terminationResult = masterContext.jobContext().requestTermination(terminationMode, false).f1(); if (terminationResult != null) { throw new IllegalStateException("Cannot " + terminationMode + ": " + terminationResult); } }, jobResult -> { if (terminationMode != CANCEL_FORCEFUL) { throw new IllegalStateException("Cannot " + terminationMode + " job " + idToString(jobId) + " because it already has a result: " + jobResult); } logger.fine("Ignoring cancellation of a completed job " + idToString(jobId)); }, jobRecord -> { // we'll eventually learn of the job through scanning of records or from a join operation throw new RetryableHazelcastException("No MasterContext found for job " + idToString(jobId) + " for " + terminationMode); } ); } public void terminateLightJob(long jobId) { Object mc = lightMasterContexts.get(jobId); if (mc == null || mc == UNINITIALIZED_LIGHT_JOB_MARKER) { throw new JobNotFoundException(jobId); } ((LightMasterContext) mc).requestTermination(); } /** * Return the job IDs of jobs with the given name, sorted by * {active/completed, creation time}, active & newest first. */ public CompletableFuture<GetJobIdsResult> getJobIds(@Nullable String onlyName, long onlyJobId) { if (onlyName != null) { assertIsMaster("Cannot query list of job IDs by name on non-master node"); } return submitToCoordinatorThread(() -> { if (onlyJobId != ALL_JOBS) { Object lmc = lightMasterContexts.get(onlyJobId); if (lmc != null && lmc != UNINITIALIZED_LIGHT_JOB_MARKER) { return new GetJobIdsResult(onlyJobId, true); } if (isMaster()) { try { callWithJob(onlyJobId, mc -> null, jobResult -> null, jobRecord -> null, null) .get(); } catch (ExecutionException e) { if (e.getCause() instanceof JobNotFoundException) { return GetJobIdsResult.EMPTY; } throw e; } return new GetJobIdsResult(onlyJobId, false); } return GetJobIdsResult.EMPTY; } List<Tuple2<Long, Boolean>> result = new ArrayList<>(); // add light jobs - only if no name is requested, light jobs can't have a name if (onlyName == null) { for (Object ctx : lightMasterContexts.values()) { if (ctx != UNINITIALIZED_LIGHT_JOB_MARKER) { result.add(tuple2(((LightMasterContext) ctx).getJobId(), true)); } } } // add normal jobs - only on master if (isMaster()) { if (onlyName != null) { // we first need to collect to a map where the jobId is the key to eliminate possible duplicates // in JobResult and also to be able to sort from newest to oldest Map<Long, Long> jobs = new HashMap<>(); for (MasterContext ctx : masterContexts.values()) { if (onlyName.equals(ctx.jobConfig().getName())) { jobs.put(ctx.jobId(), Long.MAX_VALUE); } } for (JobResult jobResult : jobRepository.getJobResults(onlyName)) { jobs.put(jobResult.getJobId(), jobResult.getCreationTime()); } jobs.entrySet().stream() .sorted(comparing(Entry<Long, Long>::getValue).reversed()) .forEach(entry -> result.add(tuple2(entry.getKey(), false))); } else { for (Long jobId : jobRepository.getAllJobIds()) { result.add(tuple2(jobId, false)); } } } return new GetJobIdsResult(result); }); } /** * Returns the job status or fails with {@link JobNotFoundException} * if the requested job is not found. */ public CompletableFuture<JobStatus> getJobStatus(long jobId) { return callWithJob(jobId, mc -> { // When the job finishes running, we write NOT_RUNNING to jobStatus first and then // write null to requestedTerminationMode (see MasterJobContext.finalizeJob()). We // have to read them in the opposite order. TerminationMode terminationMode = mc.jobContext().requestedTerminationMode(); JobStatus jobStatus = mc.jobStatus(); return jobStatus == RUNNING && terminationMode != null ? COMPLETING : jobStatus; }, JobResult::getJobStatus, jobRecord -> NOT_RUNNING, jobExecutionRecord -> jobExecutionRecord.isSuspended() ? SUSPENDED : NOT_RUNNING ); } /** * Returns the reason why this job has been suspended in a human-readable * form. * <p> * Fails with {@link JobNotFoundException} if the requested job is not found. * <p> * Fails with {@link IllegalStateException} if the requested job is not * currently in a suspended state. */ public CompletableFuture<JobSuspensionCause> getJobSuspensionCause(long jobId) { FunctionEx<JobExecutionRecord, JobSuspensionCause> jobExecutionRecordHandler = jobExecutionRecord -> { JobSuspensionCause cause = jobExecutionRecord.getSuspensionCause(); if (cause == null) { throw new IllegalStateException("Job not suspended"); } return cause; }; return callWithJob(jobId, mc -> { JobExecutionRecord jobExecutionRecord = mc.jobExecutionRecord(); return jobExecutionRecordHandler.apply(jobExecutionRecord); }, jobResult -> { throw new IllegalStateException("Job not suspended"); }, jobRecord -> { throw new IllegalStateException("Job not suspended"); }, jobExecutionRecordHandler ); } /** * Returns the latest metrics for a job or fails with {@link JobNotFoundException} * if the requested job is not found. */ public CompletableFuture<List<RawJobMetrics>> getJobMetrics(long jobId) { CompletableFuture<List<RawJobMetrics>> cf = new CompletableFuture<>(); runWithJob(jobId, mc -> mc.jobContext().collectMetrics(cf), jobResult -> { List<RawJobMetrics> metrics = jobRepository.getJobMetrics(jobId); cf.complete(metrics != null ? metrics : emptyList()); }, jobRecord -> cf.complete(emptyList()) ); return cf; } /** * Returns the job submission time or fails with {@link JobNotFoundException} * if the requested job is not found. */ public CompletableFuture<Long> getJobSubmissionTime(long jobId, boolean isLightJob) { if (isLightJob) { Object mc = lightMasterContexts.get(jobId); if (mc == null || mc == UNINITIALIZED_LIGHT_JOB_MARKER) { throw new JobNotFoundException(jobId); } return completedFuture(((LightMasterContext) mc).getStartTime()); } return callWithJob(jobId, mc -> mc.jobRecord().getCreationTime(), JobResult::getCreationTime, JobRecord::getCreationTime, null ); } public CompletableFuture<Void> resumeJob(long jobId) { return runWithJob(jobId, masterContext -> masterContext.jobContext().resumeJob(jobRepository::newExecutionId), jobResult -> { throw new IllegalStateException("Job already completed"); }, jobRecord -> { throw new RetryableHazelcastException("Job " + idToString(jobId) + " not yet discovered"); } ); } /** * Return a summary of all jobs */ public CompletableFuture<List<JobSummary>> getJobSummaryList() { return submitToCoordinatorThread(() -> { Map<Long, JobSummary> jobs = new HashMap<>(); if (isMaster()) { // running jobs jobRepository.getJobRecords().stream().map(this::getJobSummary).forEach(s -> jobs.put(s.getJobId(), s)); // completed jobs jobRepository.getJobResults().stream() .map(r -> new JobSummary( r.getJobId(), r.getJobNameOrId(), r.getJobStatus(), r.getCreationTime(), r.getCompletionTime(), r.getFailureText())) .forEach(s -> jobs.put(s.getJobId(), s)); } // light jobs lightMasterContexts.values().stream() .filter(lmc -> lmc != UNINITIALIZED_LIGHT_JOB_MARKER) .map(LightMasterContext.class::cast) .map(lmc -> new JobSummary( true, lmc.getJobId(), lmc.getJobId(), idToString(lmc.getJobId()), RUNNING, lmc.getStartTime())) .forEach(s -> jobs.put(s.getJobId(), s)); return jobs.values().stream().sorted(comparing(JobSummary::getSubmissionTime).reversed()).collect(toList()); }); } /** * Add the given member to shutting down members. This will prevent * submission of more executions until the member actually leaves the * cluster. The returned future will complete when all executions of which * the member is a participant terminate. * <p> * The method is idempotent, the {@link NotifyMemberShutdownOperation} * which calls it can be retried. */ @Nonnull public CompletableFuture<Void> addShuttingDownMember(UUID uuid) { CompletableFuture<Void> future = new CompletableFuture<>(); CompletableFuture<Void> oldFuture = membersShuttingDown.putIfAbsent(uuid, future); if (oldFuture != null) { return oldFuture; } if (removedMembers.containsKey(uuid)) { logFine(logger, "NotifyMemberShutdownOperation received for a member that was already " + "removed from the cluster: %s", uuid); return completedFuture(null); } logFine(logger, "Added a shutting-down member: %s", uuid); CompletableFuture[] futures = masterContexts.values().stream() .map(mc -> mc.jobContext().onParticipantGracefulShutdown(uuid)) .toArray(CompletableFuture[]::new); // Need to do this even if futures.length == 0, we need to perform the action in whenComplete CompletableFuture.allOf(futures) .whenComplete(withTryCatch(logger, (r, e) -> future.complete(null))); return future; } // only for testing public Map<Long, MasterContext> getMasterContexts() { return new HashMap<>(masterContexts); } // only for testing public Map<Long, Object> getLightMasterContexts() { return new HashMap<>(lightMasterContexts); } // only for testing public MasterContext getMasterContext(long jobId) { return masterContexts.get(jobId); } JetServiceBackend getJetServiceBackend() { return jetServiceBackend; } boolean shouldStartJobs() { if (!isMaster() || !nodeEngine.isRunning()) { return false; } ClusterState clusterState = nodeEngine.getClusterService().getClusterState(); if (isClusterEnteringPassiveState || clusterState == PASSIVE || clusterState == IN_TRANSITION) { logger.fine("Not starting jobs because cluster is in passive state or in transition."); return false; } // if there are any members in a shutdown process, don't start jobs if (!membersShuttingDown.isEmpty()) { LoggingUtil.logFine(logger, "Not starting jobs because members are shutting down: %s", membersShuttingDown.keySet()); return false; } Version clusterVersion = nodeEngine.getClusterService().getClusterVersion(); for (Member m : nodeEngine.getClusterService().getMembers()) { if (!clusterVersion.equals(m.getVersion().asVersion())) { logger.fine("Not starting non-light jobs because rolling upgrade is in progress"); return false; } } PartitionServiceState state = getInternalPartitionService().getPartitionReplicaStateChecker().getPartitionServiceState(); if (state != PartitionServiceState.SAFE) { logger.fine("Not starting jobs because partition replication is not in safe state, but in " + state); return false; } if (!getInternalPartitionService().getPartitionStateManager().isInitialized()) { logger.fine("Not starting jobs because partitions are not yet initialized."); return false; } return true; } private CompletableFuture<Void> runWithJob( long jobId, @Nonnull Consumer<MasterContext> masterContextHandler, @Nonnull Consumer<JobResult> jobResultHandler, @Nonnull Consumer<JobRecord> jobRecordHandler ) { return callWithJob(jobId, toNullFunction(masterContextHandler), toNullFunction(jobResultHandler), toNullFunction(jobRecordHandler), null ); } /** * Returns a function that passes its argument to the given {@code * consumer} and returns {@code null}. */ @Nonnull private <T, R> Function<T, R> toNullFunction(@Nonnull Consumer<T> consumer) { return val -> { consumer.accept(val); return null; }; } private <T> CompletableFuture<T> callWithJob( long jobId, @Nonnull Function<MasterContext, T> masterContextHandler, @Nonnull Function<JobResult, T> jobResultHandler, @Nonnull Function<JobRecord, T> jobRecordHandler, @Nullable Function<JobExecutionRecord, T> jobExecutionRecordHandler ) { assertIsMaster("Cannot do this task on non-master. jobId=" + idToString(jobId)); return submitToCoordinatorThread(() -> { // when job is finalized, actions happen in this order: // - JobResult and JobMetrics are created // - JobRecord and JobExecutionRecord are deleted // - masterContext is removed from the map // We check them in reverse order so that no race is possible. // // We check the JobResult after MasterContext for optimization because in most cases // there will either be MasterContext or JobResult. Neither of them is present only after // master failed and the new master didn't yet scan jobs. We check the JobResult // again at the end for correctness. // check masterContext first MasterContext mc = masterContexts.get(jobId); if (mc != null) { return masterContextHandler.apply(mc); } // early check of JobResult. JobResult jobResult = jobRepository.getJobResult(jobId); if (jobResult != null) { return jobResultHandler.apply(jobResult); } // the job might not be yet discovered by job record scanning JobExecutionRecord jobExRecord; if (jobExecutionRecordHandler != null && (jobExRecord = jobRepository.getJobExecutionRecord(jobId)) != null) { return jobExecutionRecordHandler.apply(jobExRecord); } JobRecord jobRecord; if ((jobRecord = jobRepository.getJobRecord(jobId)) != null) { return jobRecordHandler.apply(jobRecord); } // second check for JobResult, see comment at the top of the method jobResult = jobRepository.getJobResult(jobId); if (jobResult != null) { return jobResultHandler.apply(jobResult); } // job doesn't exist throw new JobNotFoundException(jobId); }); } void onMemberAdded(MemberImpl addedMember) { // the member can re-join with the same UUID in certain scenarios removedMembers.remove(addedMember.getUuid()); if (addedMember.isLiteMember()) { return; } updateQuorumValues(); scheduleScaleUp(config.getScaleUpDelayMillis()); } void onMemberRemoved(UUID uuid) { if (membersShuttingDown.remove(uuid) != null) { logFine(logger, "Removed a shutting-down member: %s, now shuttingDownMembers=%s", uuid, membersShuttingDown.keySet()); } else { removedMembers.put(uuid, System.nanoTime()); } // clean up old entries from removedMembers (the value is time when the member was removed) long removeThreshold = System.nanoTime() - HOURS.toNanos(1); removedMembers.entrySet().removeIf(en -> en.getValue() < removeThreshold); } boolean isQuorumPresent(int quorumSize) { return getDataMemberCount() >= quorumSize; } /** * Completes the job which is coordinated with the given master context object. */ @CheckReturnValue CompletableFuture<Void> completeJob(MasterContext masterContext, Throwable error, long completionTime) { return submitToCoordinatorThread(() -> { // the order of operations is important. List<RawJobMetrics> jobMetrics = masterContext.jobConfig().isStoreMetricsAfterJobCompletion() ? masterContext.jobContext().jobMetrics() : null; jobRepository.completeJob(masterContext, jobMetrics, error, completionTime); if (masterContexts.remove(masterContext.jobId(), masterContext)) { completeObservables(masterContext.jobRecord().getOwnedObservables(), error); logger.fine(masterContext.jobIdString() + " is completed"); (error == null ? jobCompletedSuccessfully : jobCompletedWithFailure).inc(); } else { MasterContext existing = masterContexts.get(masterContext.jobId()); if (existing != null) { logger.severe("Different master context found to complete " + masterContext.jobIdString() + ", master context execution " + idToString(existing.executionId())); } else { logger.severe("No master context found to complete " + masterContext.jobIdString()); } } unscheduleJobTimeout(masterContext.jobId()); }); } /** * Schedules a restart task that will be run in future for the given job */ void scheduleRestart(long jobId) { MasterContext masterContext = masterContexts.get(jobId); if (masterContext == null) { logger.severe("Master context for job " + idToString(jobId) + " not found to schedule restart"); return; } logger.fine("Scheduling restart on master for job " + masterContext.jobName()); nodeEngine.getExecutionService().schedule(COORDINATOR_EXECUTOR_NAME, () -> restartJob(jobId), RETRY_DELAY_IN_MILLIS, MILLISECONDS); } void scheduleSnapshot(MasterContext mc, long executionId) { long snapshotInterval = mc.jobConfig().getSnapshotIntervalMillis(); ExecutionService executionService = nodeEngine.getExecutionService(); if (logger.isFineEnabled()) { logger.fine(mc.jobIdString() + " snapshot is scheduled in " + snapshotInterval + "ms"); } executionService.schedule(COORDINATOR_EXECUTOR_NAME, () -> mc.snapshotContext().startScheduledSnapshot(executionId), snapshotInterval, MILLISECONDS); } /** * Restarts a job for a new execution if the cluster is stable. * Otherwise, it reschedules the restart task. */ void restartJob(long jobId) { MasterContext masterContext = masterContexts.get(jobId); if (masterContext == null) { logger.severe("Master context for job " + idToString(jobId) + " not found to restart"); return; } tryStartJob(masterContext); } private void checkOperationalState() { if (isClusterEnteringPassiveState) { throw new EnteringPassiveClusterStateException(); } } private void scheduleScaleUp(long delay) { int counter = scaleUpScheduledCount.incrementAndGet(); nodeEngine.getExecutionService().schedule(() -> scaleJobsUpNow(counter), delay, MILLISECONDS); } private void scaleJobsUpNow(int counter) { // if another scale-up was scheduled after this one, ignore this one if (scaleUpScheduledCount.get() != counter) { return; } // if we can't start jobs yet, we also won't tear them down if (!shouldStartJobs()) { scheduleScaleUp(RETRY_DELAY_IN_MILLIS); return; } submitToCoordinatorThread(() -> { boolean allSucceeded = true; int dataMembersCount = nodeEngine.getClusterService().getMembers(DATA_MEMBER_SELECTOR).size(); int partitionCount = nodeEngine.getPartitionService().getPartitionCount(); // If the number of partitions is lower than the data member count, some members won't have // any partitions assigned. Jet doesn't use such members. int dataMembersWithPartitionsCount = Math.min(dataMembersCount, partitionCount); for (MasterContext mc : masterContexts.values()) { allSucceeded &= mc.jobContext().maybeScaleUp(dataMembersWithPartitionsCount); } if (!allSucceeded) { scheduleScaleUp(RETRY_DELAY_IN_MILLIS); } }); } /** * Scans all job records and updates quorum size of a split-brain protection enabled * job with current cluster quorum size if the current cluster quorum size is larger */ private void updateQuorumValues() { if (!shouldCheckQuorumValues()) { return; } submitToCoordinatorThread(() -> { try { int currentQuorumSize = getQuorumSize(); for (JobRecord jobRecord : jobRepository.getJobRecords()) { try { if (!jobRecord.getConfig().isSplitBrainProtectionEnabled()) { continue; } MasterContext masterContext = masterContexts.get(jobRecord.getJobId()); // if MasterContext doesn't exist, update in the IMap directly, using a sync method if (masterContext == null) { jobRepository.updateJobQuorumSizeIfSmaller(jobRecord.getJobId(), currentQuorumSize); // check the master context again, it might have been just created and have picked // up the JobRecord before being updated masterContext = masterContexts.get(jobRecord.getJobId()); } if (masterContext != null) { masterContext.updateQuorumSize(currentQuorumSize); } } catch (Exception e) { logger.severe("Quorum of job " + idToString(jobRecord.getJobId()) + " could not be updated to " + currentQuorumSize, e); } } } catch (Exception e) { logger.severe("update quorum values task failed", e); } }); } private boolean shouldCheckQuorumValues() { return isMaster() && nodeEngine.isRunning() && getInternalPartitionService().getPartitionStateManager().isInitialized(); } private Object deserializeJobDefinition(long jobId, JobConfig jobConfig, Data jobDefinitionData) { JobClassLoaderService jobClassLoaderService = jetServiceBackend.getJobClassLoaderService(); ClassLoader classLoader = jobClassLoaderService.getOrCreateClassLoader(jobConfig, jobId, COORDINATOR); try { jobClassLoaderService.prepareProcessorClassLoaders(jobId); return deserializeWithCustomClassLoader(nodeEngine().getSerializationService(), classLoader, jobDefinitionData); } finally { jobClassLoaderService.clearProcessorClassLoaders(); } } private String dagToJson(DAG dag) { int coopThreadCount = config.getCooperativeThreadCount(); return dag.toJson(coopThreadCount).toString(); } private CompletableFuture<Void> startJobIfNotStartedOrCompleted( @Nonnull JobRecord jobRecord, @Nonnull JobExecutionRecord jobExecutionRecord, String reason ) { // the order of operations is important. long jobId = jobRecord.getJobId(); JobResult jobResult = jobRepository.getJobResult(jobId); if (jobResult != null) { logger.fine("Not starting job " + idToString(jobId) + ", already has result: " + jobResult); return jobResult.asCompletableFuture(); } MasterContext masterContext; MasterContext oldMasterContext; synchronized (lock) { checkOperationalState(); masterContext = createMasterContext(jobRecord, jobExecutionRecord); oldMasterContext = masterContexts.putIfAbsent(jobId, masterContext); } if (oldMasterContext != null) { return oldMasterContext.jobContext().jobCompletionFuture(); } // If job is not currently running, it might be that it just completed. // Since we've put the MasterContext into the masterContexts map, someone else could // have joined to the job in the meantime so we should notify its future. if (completeMasterContextIfJobAlreadyCompleted(masterContext)) { return masterContext.jobContext().jobCompletionFuture(); } if (jobExecutionRecord.isSuspended()) { logFinest(logger, "MasterContext for suspended %s is created", masterContext.jobIdString()); } else { logger.info("Starting job " + idToString(jobId) + ": " + reason); tryStartJob(masterContext); } return masterContext.jobContext().jobCompletionFuture(); } // If a job result is present, it completes the master context using the job result private boolean completeMasterContextIfJobAlreadyCompleted(MasterContext masterContext) { long jobId = masterContext.jobId(); JobResult jobResult = jobRepository.getJobResult(jobId); if (jobResult != null) { logger.fine("Completing master context for " + masterContext.jobIdString() + " since already completed with result: " + jobResult); masterContext.jobContext().setFinalResult(jobResult.getFailureAsThrowable()); return masterContexts.remove(jobId, masterContext); } if (!masterContext.jobConfig().isAutoScaling() && masterContext.jobExecutionRecord().executed()) { logger.info("Suspending or failing " + masterContext.jobIdString() + " since auto-restart is disabled and the job has been executed before"); masterContext.jobContext().finalizeJob(new TopologyChangedException()); return true; } return false; } private void tryStartJob(MasterContext masterContext) { masterContext.jobContext().tryStartJob(jobRepository::newExecutionId); if (masterContext.hasTimeout()) { long remainingTime = masterContext.remainingTime(Clock.currentTimeMillis()); scheduleJobTimeout(masterContext.jobId(), Math.max(1, remainingTime)); } } private int getQuorumSize() { return (getDataMemberCount() / 2) + 1; } private int getDataMemberCount() { ClusterService clusterService = nodeEngine.getClusterService(); return clusterService.getMembers(DATA_MEMBER_SELECTOR).size(); } private JobSummary getJobSummary(JobRecord record) { MasterContext ctx = masterContexts.get(record.getJobId()); long execId = ctx == null ? 0 : ctx.executionId(); JobStatus status; if (ctx == null) { JobExecutionRecord executionRecord = jobRepository.getJobExecutionRecord(record.getJobId()); status = executionRecord != null && executionRecord.isSuspended() ? JobStatus.SUSPENDED : JobStatus.NOT_RUNNING; } else { status = ctx.jobStatus(); } return new JobSummary(false, record.getJobId(), execId, record.getJobNameOrId(), status, record.getCreationTime()); } private InternalPartitionServiceImpl getInternalPartitionService() { Node node = nodeEngine.getNode(); return (InternalPartitionServiceImpl) node.getPartitionService(); } // runs periodically to restart jobs on coordinator failure and perform GC private void scanJobs() { long nextScanDelay = maxJobScanPeriodInMillis; try { // explicit check for master because we don't want to use shorter delay on non-master nodes // it will be checked again in shouldStartJobs() if (isMaster()) { if (shouldStartJobs()) { doScanJobs(); } else { // use a smaller delay when cluster is not in ready state nextScanDelay = MIN_JOB_SCAN_PERIOD_MILLIS; } } } catch (HazelcastInstanceNotActiveException ignored) { // ignore this exception } catch (Throwable e) { logger.severe("Scanning jobs failed", e); } ExecutionService executionService = nodeEngine.getExecutionService(); executionService.schedule(this::scanJobs, nextScanDelay, MILLISECONDS); } private void doScanJobs() { Collection<JobRecord> jobs = jobRepository.getJobRecords(); for (JobRecord jobRecord : jobs) { JobExecutionRecord jobExecutionRecord = ensureExecutionRecord(jobRecord.getJobId(), jobRepository.getJobExecutionRecord(jobRecord.getJobId())); startJobIfNotStartedOrCompleted(jobRecord, jobExecutionRecord, "discovered by scanning of JobRecords"); } jobRepository.cleanup(nodeEngine); if (!jobsScanned) { synchronized (lock) { jobsScanned = true; } } } private JobExecutionRecord ensureExecutionRecord(long jobId, JobExecutionRecord record) { return record != null ? record : new JobExecutionRecord(jobId, getQuorumSize()); } @SuppressWarnings("WeakerAccess") // used by jet-enterprise void assertIsMaster(String error) { if (!isMaster()) { throw new JetException(error + ". Master address: " + nodeEngine.getClusterService().getMasterAddress()); } } private boolean isMaster() { return nodeEngine.getClusterService().isMaster(); } @SuppressWarnings("unused") // used in jet-enterprise NodeEngineImpl nodeEngine() { return nodeEngine; } CompletableFuture<Void> submitToCoordinatorThread(Runnable action) { return submitToCoordinatorThread(() -> { action.run(); return null; }); } <T> CompletableFuture<T> submitToCoordinatorThread(Callable<T> action) { // if we are on our thread already, execute directly in a blocking way if (IS_JOB_COORDINATOR_THREAD.get()) { try { return completedFuture(action.call()); } catch (Throwable e) { // most callers ignore the failure on the returned future, let's log it at least logger.warning(null, e); return com.hazelcast.jet.impl.util.Util.exceptionallyCompletedFuture(e); } } Future<T> future = nodeEngine.getExecutionService().submit(COORDINATOR_EXECUTOR_NAME, () -> { assert !IS_JOB_COORDINATOR_THREAD.get() : "flag already raised"; IS_JOB_COORDINATOR_THREAD.set(true); try { return action.call(); } catch (Throwable e) { // most callers ignore the failure on the returned future, let's log it at least logger.warning(null, e); throw e; } finally { IS_JOB_COORDINATOR_THREAD.set(false); } }); return nodeEngine.getExecutionService().asCompletableFuture(future); } void assertOnCoordinatorThread() { assert IS_JOB_COORDINATOR_THREAD.get() : "not on coordinator thread"; } private void completeObservables(Set<String> observables, Throwable error) { for (String observable : observables) { try { String ringbufferName = ObservableImpl.ringbufferName(observable); Ringbuffer<Object> ringbuffer = nodeEngine.getHazelcastInstance().getRingbuffer(ringbufferName); Object completion = error == null ? DoneItem.DONE_ITEM : WrappedThrowable.of(error); ringbuffer.addAsync(completion, OverflowPolicy.OVERWRITE); } catch (Exception e) { logger.severe("Failed to complete observable '" + observable + "': " + e, e); } } } /** * From the given list of execution IDs returns those which are unknown to * this coordinator. */ public long[] findUnknownExecutions(long[] executionIds) { return Arrays.stream(executionIds).filter(key -> { Object lmc = lightMasterContexts.get(key); return lmc == null || lmc instanceof LightMasterContext && ((LightMasterContext) lmc).isCancelled(); }).toArray(); } private void scheduleJobTimeout(final long jobId, final long timeout) { if (timeout <= 0) { return; } scheduledJobTimeouts.computeIfAbsent(jobId, id -> scheduleJobTimeoutTask(id, timeout)); } private void unscheduleJobTimeout(final long jobId) { final ScheduledFuture<?> timeoutFuture = scheduledJobTimeouts.remove(jobId); if (timeoutFuture != null) { timeoutFuture.cancel(true); } } private ScheduledFuture<?> scheduleJobTimeoutTask(final long jobId, final long timeout) { return this.nodeEngine().getExecutionService().schedule(() -> { final MasterContext mc = masterContexts.get(jobId); final LightMasterContext lightMc = (LightMasterContext) lightMasterContexts.get(jobId); try { if (mc != null && isMaster() && !mc.jobStatus().isTerminal()) { terminateJob(jobId, CANCEL_FORCEFUL); } else if (lightMc != null && !lightMc.isCancelled()) { lightMc.requestTermination(); } } finally { scheduledJobTimeouts.remove(jobId); } }, timeout, MILLISECONDS); } boolean isMemberShuttingDown(UUID uuid) { return membersShuttingDown.containsKey(uuid); } }
jerrinot/hazelcast
hazelcast/src/main/java/com/hazelcast/jet/impl/JobCoordinationService.java
Java
apache-2.0
58,765
package zktopo import ( "encoding/json" "path" "strings" "github.com/golang/protobuf/proto" topodatapb "github.com/youtube/vitess/go/vt/proto/topodata" vschemapb "github.com/youtube/vitess/go/vt/proto/vschema" ) // This file contains utility functions to maintain backward compatibility // with old-style non-Backend Zookeeper topologies. The old // implementations (before 2016-08-17) used to deal with explicit data // types. We converted them to a generic []byte and path // interface. But the zookeeper implementation was not compatible with // this. // dataType is an enum for possible data types, used for backward // compatibility. type dataType int // Constants for type conversion const ( // newType is used to indicate a topology object type of // anything that is added after the topo.Backend refactor, // i.e. anything that doesn't require conversion between old // style topologies and the new style ones. The list of enum // values after this contain all types that exist at the // moment (2016-08-17) and doesn't need to be expanded when // something new is saved in the topology because it will be // saved in the new style, not in the old one. newType dataType = iota srvKeyspaceType srvVSchemaType ) // rawDataFromNodeValue convert the data of the given type into an []byte. // It is mindful of the backward compatibility, i.e. for newer objects // it doesn't do anything, but for old object types that were stored in JSON // format in converts them to proto3 binary encoding. func rawDataFromNodeValue(what dataType, data []byte) ([]byte, error) { var p proto.Message switch what { case srvKeyspaceType: p = &topodatapb.SrvKeyspace{} case srvVSchemaType: p = &vschemapb.SrvVSchema{} default: return data, nil } if err := json.Unmarshal(data, p); err != nil { return nil, err } return proto.Marshal(p) } // oldTypeAndFilePath returns the data type and old file path for a given path. func oldTypeAndFilePath(cell, filePath string) (dataType, string) { parts := strings.Split(filePath, "/") // SrvKeyspace: local cell, keyspaces/<keyspace>/SrvKeyspace if len(parts) == 3 && parts[0] == "keyspaces" && parts[2] == "SrvKeyspace" { return srvKeyspaceType, zkPathForSrvKeyspace(cell, parts[1]) } // SrvVSchema: local cell, SrvVSchema if len(parts) == 1 && parts[0] == "SrvVSchema" { return srvVSchemaType, zkPathForSrvVSchema(cell) } // General case. return newType, path.Join("/zk", cell, "vt", filePath) }
theskyinflames/bpulse-go-client
vendor/github.com/youtube/vitess/go/vt/zktopo/convert.go
GO
apache-2.0
2,481
/* * Copyright (c) 2013 Juniper Networks, Inc. All rights reserved. */ #include "base/os.h" #include <boost/assign/list_of.hpp> #include <cfg/cfg_init.h> #include <cfg/cfg_interface.h> #include <oper/operdb_init.h> #include <controller/controller_init.h> #include <controller/controller_ifmap.h> #include <pkt/pkt_init.h> #include <services/services_init.h> #include <vrouter/ksync/ksync_init.h> #include <cmn/agent_cmn.h> #include <base/task.h> #include <io/event_manager.h> #include <base/util.h> #include <oper/vn.h> #include <oper/vm.h> #include <oper/vm_interface.h> #include <oper/agent_sandesh.h> #include <oper/interface_common.h> #include <oper/vxlan.h> #include "vr_types.h" #include "testing/gunit.h" #include "test_cmn_util.h" #include "xmpp/test/xmpp_test_util.h" using namespace std; using namespace boost::assign; void RouterIdDepInit(Agent *agent) { } static void ValidateSandeshResponse(Sandesh *sandesh, vector<int> &result) { //TBD //Validate the response by the expectation } void DoInterfaceSandesh(std::string name) { ItfReq *itf_req = new ItfReq(); std::vector<int> result = list_of(1); Sandesh::set_response_callback(boost::bind(ValidateSandeshResponse, _1, result)); if (name != "") { itf_req->set_name(name); } itf_req->HandleRequest(); client->WaitForIdle(); itf_req->Release(); client->WaitForIdle(); } AgentIntfSandesh *CreateAgentIntfSandesh(const char *name) { return new AgentIntfSandesh("", "", "vnet1", "", "", "", "", "", "", "", "", ""); } class CfgTest : public ::testing::Test { public: virtual void SetUp() { agent_ = Agent::GetInstance(); } virtual void TearDown() { EXPECT_EQ(0U, Agent::GetInstance()->acl_table()->Size()); } Agent *agent_; }; TEST_F(CfgTest, AddDelVmPortNoVn_1) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, }; client->Reset(); IntfCfgAdd(input, 0); EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(4U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->interface_config_table()->Size()); client->Reset(); IntfCfgDel(input, 0); client->WaitForIdle(); EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_FALSE(VmPortFind(input, 0)); EXPECT_EQ(3U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->interface_config_table()->Size()); } TEST_F(CfgTest, AddDelExport) { client->Reset(); CfgIntKey *key = new CfgIntKey(MakeUuid(1)); CfgIntData *data = new CfgIntData(); boost::system::error_code ec; Ip4Address ip = Ip4Address::from_string("1.1.1.1", ec); data->Init(MakeUuid(1), MakeUuid(1), MakeUuid(kProjectUuid), "vnet1", ip, Ip6Address(), "00:00:00:01:01:01", "", VmInterface::kInvalidVlanId, VmInterface::kInvalidVlanId, CfgIntEntry::CfgIntVMPort, 0); DBRequest req; req.oper = DBRequest::DB_ENTRY_ADD_CHANGE; req.key.reset(key); req.data.reset(data); Agent::GetInstance()->interface_config_table()->Enqueue(&req); CfgIntKey *key1 = new CfgIntKey(MakeUuid(1)); CfgIntData *data1 = new CfgIntData(); ip = Ip4Address::from_string("1.1.1.1", ec); data1->Init(MakeUuid(1), MakeUuid(1), MakeUuid(kProjectUuid), "vnet1", ip, Ip6Address(), "00:00:00:01:01:01", "", VmInterface::kInvalidVlanId, VmInterface::kInvalidVlanId, CfgIntEntry::CfgIntVMPort, 0); req.key.reset(key1); req.data.reset(data1); req.oper = DBRequest::DB_ENTRY_DELETE; Agent::GetInstance()->interface_config_table()->Enqueue(&req); usleep(1000); EXPECT_EQ(0U, Agent::GetInstance()->interface_config_table()->Size()); } TEST_F(CfgTest, AddDelVmPortDepOnVmVn_1) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, }; // Nova Port add message - Should be inactive since VM and VN not present client->Reset(); IntfCfgAdd(input, 0); EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(4U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->interface_config_table()->Size()); // Config VM Add - Port inactive since VN not present AddVm("vm1", 1); EXPECT_TRUE(client->VmNotifyWait(1)); EXPECT_TRUE(VmFind(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(1U, Agent::GetInstance()->vm_table()->Size()); AddVrf("vrf1"); client->WaitForIdle(); EXPECT_TRUE(client->VrfNotifyWait(1)); EXPECT_TRUE(VrfFind("vrf1")); // Config VN Add - Port inactive since interface oper-db not aware of // VM and VN added AddVn("vn1", 1); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_TRUE(VnFind(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); // Config Port add - Interface oper-db still inactive since no link between // VN and VRF client->Reset(); AddPort(input[0].name, input[0].intf_id); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add VN and VRF link. Port in-active since not linked to VM and VN client->Reset(); AddLink("virtual-network", "vn1", "routing-instance", "vrf1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add VM and Port link. Port in-active since port not linked to VN client->Reset(); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); //EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); // Add Port to VN link - Port is active client->Reset(); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VnFind(1)); EXPECT_TRUE(VmPortInactive(input, 0)); AddVmPortVrf("vnet1", "", 0); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); client->Reset(); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); AddLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); AddLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); // Delete Port to VN link. Port is inactive client->Reset(); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmFind(1)); EXPECT_TRUE(VmPortInactive(input, 0)); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf1"); DelLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); DelLink("instance-ip", "instance0", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface-routing-instance", "vnet1"); client->WaitForIdle(); // Delete config port entry. Port still present but inactive client->Reset(); DelLink("virtual-network", "vn1", "routing-instance", "vrf1"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface", "vnet1"); DelNode("virtual-machine", "vm1"); DelNode("virtual-network", "vn1"); client->WaitForIdle(); EXPECT_TRUE(VmPortFind(input, 0)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_FALSE(VmFind(1)); DelNode("routing-instance", "vrf1"); DelInstanceIp("instance0"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("vrf1")); // Delete Nova Port entry. client->Reset(); IntfCfgDel(input, 0); client->WaitForIdle(); EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_FALSE(VmPortFind(input, 0)); EXPECT_EQ(3U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->interface_config_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vn_table()->Size()); EXPECT_FALSE(VnFind(1)); } TEST_F(CfgTest, AddDelVmPortDepOnVmVn_2) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, }; // Config VM Add - Port inactive since VN not present client->Reset(); AddVm("vm1", 1); EXPECT_TRUE(client->VmNotifyWait(1)); EXPECT_TRUE(VmFind(1)); EXPECT_EQ(1U, Agent::GetInstance()->vm_table()->Size()); // Nova Port add message - Should be inactive since VN not present client->Reset(); IntfCfgAdd(input, 0); EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(4U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->interface_config_table()->Size()); // Config VN Add - Port inactive since interface oper-db not aware of // VM and VN added AddVn("vn1", 1); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_TRUE(VnFind(1)); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); // Add link between VN and VRF. Interface still inactive client->Reset(); AddVrf("vrf2"); AddLink("virtual-network", "vn1", "routing-instance", "vrf2"); client->WaitForIdle(); EXPECT_TRUE(VrfFind("vrf2")); // Config Port add - Interface still inactive client->Reset(); AddPort(input[0].name, input[0].intf_id); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add Port to VM link - Port is inactive client->Reset(); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add vm-port interface to vrf link AddVmPortVrf("vnet1", "", 0); AddLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf2"); AddLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add Port to VN link - Port is active client->Reset(); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VnFind(1)); EXPECT_TRUE(VmPortActive(input, 0)); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf2"); DelLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface-routing-instance", "vnet1"); DelLink("instance-ip", "instance0", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); // Delete Nova Port entry. client->Reset(); IntfCfgDel(input, 0); EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_FALSE(VmPortFind(input, 0)); EXPECT_EQ(3U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->interface_config_table()->Size()); client->Reset(); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); DelNode("virtual-machine-interface", "vnet1"); client->WaitForIdle(); DelNode("virtual-machine", "vm1"); client->WaitForIdle(); DelLink("virtual-network", "vn1", "routing-instance", "vrf2"); client->WaitForIdle(); DelNode("virtual-network", "vn1"); client->WaitForIdle(); EXPECT_FALSE(VnFind(1)); EXPECT_FALSE(VmFind(1)); EXPECT_FALSE(VmPortFind(input, 0)); DelNode("routing-instance", "vrf2"); DelInstanceIp("instance0"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("vrf2")); } TEST_F(CfgTest, AddDelVmPortDepOnVmVn_3) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, }; // Nova Port add message - Should be inactive since VM and VN not present client->Reset(); IntfCfgAdd(input, 0); EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(4U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->interface_config_table()->Size()); // Config VM Add - Port inactive since VN not present AddVm("vm1", 1); EXPECT_TRUE(client->VmNotifyWait(1)); EXPECT_TRUE(VmFind(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(1U, Agent::GetInstance()->vm_table()->Size()); AddVrf("vrf1"); client->WaitForIdle(); EXPECT_TRUE(client->VrfNotifyWait(1)); EXPECT_TRUE(VrfFind("vrf1")); // Config VN Add - Port inactive since interface oper-db not aware of // VM and VN added AddVn("vn1", 1); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_TRUE(VnFind(1)); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); // Config Port add - Interface oper-db still inactive since no link between // VN and VRF client->Reset(); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); client->Reset(); AddPort(input[0].name, input[0].intf_id); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add vm-port interface to vrf link AddVmPortVrf("vnet1", "", 0); AddLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf1"); AddLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add VN and VRF link. Port in-active since not linked to VM and VN client->Reset(); AddLink("virtual-network", "vn1", "routing-instance", "vrf1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add VM and Port link. Port in-active since port not linked to VN client->Reset(); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); //EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); //Add instance ip configuration AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); client->WaitForIdle(); //EXPECT_TRUE(client->PortNotifyWait(1)); EXPECT_TRUE(VmPortInactive(input, 0)); // Add Port to VN link - Port is active client->Reset(); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VnFind(1)); EXPECT_TRUE(VmPortActive(input, 0)); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf1"); DelLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); DelLink( "virtual-machine-interface", "vnet1", "instance-ip", "instance0"); DelNode("virtual-machine-interface-routing-instance", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); client->WaitForIdle(); // Delete VM and its associated links. INSTANCE_MSG is still not deleted // Vmport should be inactive DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine", "vm1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); DelLink("virtual-network", "vn1", "routing-instance", "vrf1"); DelNode("routing-instance", "vrf1"); DelNode("virtual-network", "vn1"); DelNode("virtual-machine-interface", "vnet1"); DelInstanceIp("instance0"); client->WaitForIdle(); IntfCfgDel(input, 0); client->WaitForIdle(); } // VN has ACL set before VM Port is created TEST_F(CfgTest, VmPortPolicy_1) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, {"vnet2", 2, "1.1.1.2", "00:00:00:02:02:02", 1, 1}, }; client->Reset(); AddVm("vm1", 1); client->WaitForIdle(); AddAcl("acl1", 1); client->WaitForIdle(); AddVrf("vrf3"); client->WaitForIdle(); AddVn("vn1", 1); client->WaitForIdle(); EXPECT_TRUE(client->AclNotifyWait(1)); EXPECT_TRUE(client->VmNotifyWait(1)); EXPECT_TRUE(client->VrfNotifyWait(1)); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->acl_table()->Size()); EXPECT_TRUE(VrfFind("vrf3")); // Add vm-port interface to vrf link AddVmPortVrf("vmvrf1", "", 0); AddVmPortVrf("vmvrf2", "", 0); client->WaitForIdle(); AddPort(input[0].name, input[0].intf_id); AddPort(input[1].name, input[1].intf_id); AddLink("virtual-network", "vn1", "routing-instance", "vrf3"); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet2"); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet2"); AddLink("virtual-machine-interface-routing-instance", "vmvrf1", "routing-instance", "vrf3"); AddLink("virtual-machine-interface-routing-instance", "vmvrf1", "virtual-machine-interface", "vnet1"); AddLink("virtual-machine-interface-routing-instance", "vmvrf2", "routing-instance", "vrf3"); AddLink("virtual-machine-interface-routing-instance", "vmvrf2", "virtual-machine-interface", "vnet2"); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddInstanceIp("instance1", input[0].vm_id, input[1].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); AddLink("virtual-machine-interface", input[1].name, "instance-ip", "instance1"); client->WaitForIdle(); client->Reset(); IntfCfgAdd(input, 0); IntfCfgAdd(input, 1); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); EXPECT_TRUE(VmPortActive(input, 1)); EXPECT_FALSE(VmPortPolicyEnable(input, 0)); EXPECT_FALSE(VmPortPolicyEnable(input, 1)); AddLink("virtual-network", "vn1", "access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(VmPortPolicyEnable(input, 0)); EXPECT_TRUE(VmPortPolicyEnable(input, 1)); client->Reset(); DelLink("virtual-network", "vn1", "access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(client->AclNotifyWait(0)); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_TRUE(VmPortPolicyDisable(input, 0)); EXPECT_TRUE(VmPortPolicyDisable(input, 1)); client->Reset(); DelNode("access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(client->AclNotifyWait(1)); EXPECT_EQ(0U, Agent::GetInstance()->acl_table()->Size()); // Del VN to VRF link. Port should become inactive client->Reset(); DelLink("virtual-network", "vn1", "routing-instance", "vrf3"); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); EXPECT_TRUE(VmPortActive(input, 1)); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vmvrf1", "routing-instance", "vrf3"); DelLink("virtual-machine-interface-routing-instance", "vmvrf1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface-routing-instance", "vmvrf1"); DelLink("virtual-machine-interface-routing-instance", "vmvrf2", "routing-instance", "vrf3"); DelLink("virtual-machine-interface-routing-instance", "vmvrf2", "virtual-machine-interface", "vnet2"); DelNode("virtual-machine-interface-routing-instance", "vmvrf2"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_TRUE(VmPortInactive(input, 1)); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet2"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet2"); DelLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); DelLink("virtual-machine-interface", input[1].name, "instance-ip", "instance1"); // Delete config vm entry - no-op for oper-db. Port is active client->Reset(); DelNode("virtual-machine", "vm1"); client->WaitForIdle(); // VM not deleted. Interface still refers to it EXPECT_FALSE(VmFind(1)); client->Reset(); DelNode("virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface", "vnet2"); EXPECT_TRUE(client->PortNotifyWait(2)); //After deleting vmport interface config, verify config name is set to "" const Interface *intf = VmPortGet(1); const VmInterface *vm_intf = static_cast<const VmInterface *>(intf); EXPECT_TRUE((vm_intf->cfg_name() == "")); intf = VmPortGet(2); vm_intf = static_cast<const VmInterface *>(intf); EXPECT_TRUE((vm_intf->cfg_name() == "")); // Delete Nova Port entry. client->Reset(); IntfCfgDel(input, 0); IntfCfgDel(input, 1); EXPECT_TRUE(client->PortDelNotifyWait(2)); EXPECT_FALSE(VmFind(1)); EXPECT_FALSE(VmPortFind(input, 0)); EXPECT_EQ(3U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); DelNode("virtual-network", "vn1"); client->WaitForIdle(); EXPECT_FALSE(VnFind(1)); DelNode("routing-instance", "vrf3"); DelInstanceIp("instance0"); DelInstanceIp("instance1"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("vrf3")); } // ACL added after VM Port is created TEST_F(CfgTest, VmPortPolicy_2) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, {"vnet2", 2, "1.1.1.2", "00:00:00:02:02:02", 1, 1}, }; client->Reset(); AddVm("vm1", 1); EXPECT_TRUE(client->VmNotifyWait(1)); EXPECT_TRUE(VmFind(1)); AddVn("vn1", 1); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); client->Reset(); AddAcl("acl1", 1); EXPECT_TRUE(client->AclNotifyWait(1)); client->Reset(); AddPort(input[0].name, input[0].intf_id); AddPort(input[1].name, input[1].intf_id); client->Reset(); IntfCfgAdd(input, 0); IntfCfgAdd(input, 1); EXPECT_TRUE(client->PortNotifyWait(2)); // Port inactive since VRF is not yet present EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_TRUE(VmPortInactive(input, 1)); EXPECT_TRUE(VmPortPolicyDisable(input, 0)); EXPECT_TRUE(VmPortPolicyDisable(input, 1)); EXPECT_EQ(5U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(2U, Agent::GetInstance()->interface_config_table()->Size()); AddVrf("vrf4"); client->WaitForIdle(); EXPECT_TRUE(VrfFind("vrf4")); // Add vm-port interface to vrf link AddVmPortVrf("vmvrf1", "", 0); AddVmPortVrf("vmvrf2", "", 0); AddLink("virtual-machine-interface-routing-instance", "vmvrf1", "routing-instance", "vrf4"); AddLink("virtual-machine-interface-routing-instance", "vmvrf1", "virtual-machine-interface", "vnet1"); AddLink("virtual-machine-interface-routing-instance", "vmvrf2", "routing-instance", "vrf4"); AddLink("virtual-machine-interface-routing-instance", "vmvrf2", "virtual-machine-interface", "vnet2"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); EXPECT_TRUE(VmPortInactive(input, 1)); AddLink("virtual-network", "vn1", "routing-instance", "vrf4"); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet2"); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet2"); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddInstanceIp("instance1", input[0].vm_id, input[1].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); AddLink("virtual-machine-interface", input[1].name, "instance-ip", "instance1"); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); EXPECT_TRUE(VmPortActive(input, 1)); client->Reset(); AddLink("virtual-network", "vn1", "access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(VmPortPolicyEnable(input, 0)); EXPECT_TRUE(VmPortPolicyEnable(input, 1)); client->Reset(); DelLink("virtual-network", "vn1", "access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(client->AclNotifyWait(0)); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_TRUE(VmPortPolicyDisable(input, 0)); EXPECT_TRUE(VmPortPolicyDisable(input, 1)); client->Reset(); DelNode("access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(client->AclNotifyWait(1)); EXPECT_EQ(0U, Agent::GetInstance()->acl_table()->Size()); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vmvrf1", "routing-instance", "vrf4"); DelLink("virtual-machine-interface-routing-instance", "vmvrf1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface-routing-instance", "vmvrf1"); DelLink("virtual-machine-interface-routing-instance", "vmvrf2", "routing-instance", "vrf4"); DelLink("virtual-machine-interface-routing-instance", "vmvrf2", "virtual-machine-interface", "vnet2"); DelNode("virtual-machine-interface-routing-instance", "vmvrf2"); client->WaitForIdle(); DelLink("virtual-network", "vn1", "routing-instance", "vrf4"); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet2"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet2"); DelLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); DelLink("virtual-machine-interface", input[1].name, "instance-ip", "instance1"); // Delete config vm entry - no-op for oper-db. Port is active client->Reset(); DelNode("virtual-machine", "vm1"); client->WaitForIdle(); EXPECT_TRUE(VnFind(1)); EXPECT_FALSE(VmFind(1)); EXPECT_TRUE(VmPortFind(input, 0)); EXPECT_EQ(5U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(2U, Agent::GetInstance()->interface_config_table()->Size()); DelPort(input[0].name); DelPort(input[1].name); client->Reset(); // Delete Nova Port entry. client->Reset(); IntfCfgDel(input, 0); IntfCfgDel(input, 1); EXPECT_TRUE(client->PortDelNotifyWait(2)); EXPECT_FALSE(VmFind(1)); EXPECT_FALSE(VmPortFind(input, 0)); EXPECT_EQ(3U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); // Del VN to VRF link. Port should become inactive client->Reset(); DelNode("virtual-network", "vn1"); DelInstanceIp("instance0"); DelInstanceIp("instance1"); client->WaitForIdle(); EXPECT_FALSE(VnFind(1)); DelNode("routing-instance", "vrf4"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("vrf4")); } TEST_F(CfgTest, VnDepOnVrfAcl_1) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, {"vnet2", 2, "1.1.1.2", "00:00:00:02:02:02", 1, 1}, }; client->Reset(); AddVm("vm1", 1); EXPECT_TRUE(client->VmNotifyWait(1)); EXPECT_TRUE(VmFind(1)); client->Reset(); AddVrf("vrf5"); EXPECT_TRUE(client->VrfNotifyWait(1)); EXPECT_TRUE(VrfFind("vrf5")); AddVn("vn1", 1); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); client->Reset(); AddAcl("acl1", 1); EXPECT_TRUE(client->AclNotifyWait(1)); AddLink("virtual-network", "vn1", "routing-instance", "vrf5"); client->WaitForIdle(); VnEntry *vn = VnGet(1); EXPECT_TRUE(vn->GetVrf() != NULL); EXPECT_TRUE(vn->GetAcl() == NULL); AddLink("virtual-network", "vn1", "access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(vn->GetVrf() != NULL); EXPECT_TRUE(vn->GetAcl() != NULL); AddPort(input[0].name, input[0].intf_id); AddPort(input[1].name, input[1].intf_id); client->Reset(); client->Reset(); IntfCfgAdd(input, 0); IntfCfgAdd(input, 1); EXPECT_TRUE(client->PortNotifyWait(2)); // Add vm-port interface to vrf link AddVmPortVrf("vnet1", "", 0); AddLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf5"); AddLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Add vm-port interface to vrf link AddVmPortVrf("vnet2", "", 0); AddLink("virtual-machine-interface-routing-instance", "vnet2", "routing-instance", "vrf5"); AddLink("virtual-machine-interface-routing-instance", "vnet2", "virtual-machine-interface", "vnet2"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 1)); // Port Active since VRF and VM already added AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet2"); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet2"); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddInstanceIp("instance1", input[0].vm_id, input[1].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); AddLink("virtual-machine-interface", input[1].name, "instance-ip", "instance1"); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); EXPECT_TRUE(VmPortActive(input, 1)); EXPECT_TRUE(VmPortPolicyEnable(input, 0)); EXPECT_TRUE(VmPortPolicyEnable(input, 1)); EXPECT_EQ(5U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(2U, Agent::GetInstance()->interface_config_table()->Size()); client->Reset(); AddLink("virtual-network", "vn1", "access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); EXPECT_TRUE(VmPortActive(input, 1)); EXPECT_TRUE(VmPortPolicyEnable(input, 0)); EXPECT_TRUE(VmPortPolicyEnable(input, 1)); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vnet1", "routing-instance", "vrf5"); DelLink("virtual-machine-interface-routing-instance", "vnet1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface-routing-instance", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 0)); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vnet2", "routing-instance", "vrf5"); DelLink("virtual-machine-interface-routing-instance", "vnet2", "virtual-machine-interface", "vnet2"); DelNode("virtual-machine-interface-routing-instance", "vnet2"); client->WaitForIdle(); EXPECT_TRUE(VmPortInactive(input, 1)); client->Reset(); DelLink("virtual-network", "vn1", "access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_TRUE(VmPortPolicyDisable(input, 0)); EXPECT_TRUE(VmPortPolicyDisable(input, 1)); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet2"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet2"); DelLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); DelLink("virtual-machine-interface", input[1].name, "instance-ip", "instance1"); client->WaitForIdle(); DelPort(input[0].name); DelPort(input[1].name); client->Reset(); client->Reset(); DelNode("access-control-list", "acl1"); client->WaitForIdle(); EXPECT_TRUE(client->AclNotifyWait(1)); EXPECT_EQ(0U, Agent::GetInstance()->acl_table()->Size()); // Delete config vm entry - no-op for oper-db. Port is active client->Reset(); DelNode("virtual-machine", "vm1"); client->WaitForIdle(); EXPECT_TRUE(VnFind(1)); EXPECT_FALSE(VmFind(1)); EXPECT_TRUE(VmPortFind(input, 0)); EXPECT_EQ(5U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(2U, Agent::GetInstance()->interface_config_table()->Size()); // Delete Nova Port entry. client->Reset(); IntfCfgDel(input, 0); IntfCfgDel(input, 1); EXPECT_TRUE(client->PortNotifyWait(2)); EXPECT_FALSE(VmFind(1)); EXPECT_FALSE(VmPortFind(input, 0)); WAIT_FOR(100, 1000, (3U == Agent::GetInstance()->interface_table()->Size())); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); // Del VN to VRF link. Port should become inactive client->Reset(); DelLink("virtual-network", "vn1", "routing-instance", "vrf5"); DelNode("virtual-network", "vn1"); DelInstanceIp("instance0"); DelInstanceIp("instance1"); client->WaitForIdle(); EXPECT_FALSE(VnFind(1)); DelNode("routing-instance", "vrf5"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("vrf5")); } //TBD //Reduce the waitforidle to improve on timing of UT TEST_F(CfgTest, FloatingIp_1) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1}, }; client->WaitForIdle(); client->Reset(); AddVm("vm1", 1); client->WaitForIdle(); EXPECT_TRUE(client->VmNotifyWait(1)); EXPECT_TRUE(VmFind(1)); client->Reset(); AddVrf("vrf6"); client->WaitForIdle(); EXPECT_TRUE(client->VrfNotifyWait(1)); EXPECT_TRUE(VrfFind("vrf6")); AddVn("vn1", 1); client->WaitForIdle(); EXPECT_TRUE(client->VnNotifyWait(1)); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); AddLink("virtual-network", "vn1", "routing-instance", "vrf6"); client->WaitForIdle(); client->Reset(); IntfCfgAdd(input, 0); client->WaitForIdle(); EXPECT_TRUE(client->PortNotifyWait(1)); AddPort(input[0].name, input[0].intf_id); client->WaitForIdle(); // Create floating-ip on default-project:vn2 client->Reset(); AddVn("default-project:vn2", 2); client->WaitForIdle(); EXPECT_TRUE(client->VnNotifyWait(1)); AddVrf("default-project:vn2:vn2"); AddVrf("vrf8"); client->WaitForIdle(); EXPECT_TRUE(client->VrfNotifyWait(2)); EXPECT_TRUE(VrfFind("default-project:vn2:vn2")); EXPECT_TRUE(VrfFind("vrf8")); AddFloatingIpPool("fip-pool1", 1); AddFloatingIp("fip1", 1, "1.1.1.1"); AddFloatingIp("fip3", 3, "2.2.2.5"); AddFloatingIp("fip4", 4, "2.2.2.1"); client->WaitForIdle(); AddLink("virtual-network", "default-project:vn2", "routing-instance", "default-project:vn2:vn2"); AddLink("floating-ip-pool", "fip-pool1", "virtual-network", "default-project:vn2"); AddLink("floating-ip", "fip1", "floating-ip-pool", "fip-pool1"); AddLink("floating-ip", "fip3", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); AddLink("floating-ip", "fip4", "floating-ip-pool", "fip-pool1"); AddLink("virtual-machine-interface", "vnet1", "floating-ip", "fip1"); AddLink("virtual-machine-interface", "vnet1", "floating-ip", "fip3"); AddLink("virtual-machine-interface", "vnet1", "floating-ip", "fip4"); client->WaitForIdle(); LOG(DEBUG, "Adding Floating-ip fip2"); AddFloatingIp("fip2", 2, "2.2.2.2"); client->WaitForIdle(); // Port Active since VRF and VM already added client->Reset(); AddLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); AddLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); client->WaitForIdle(); // Add vm-port interface to vrf link AddVmPortVrf("vnvrf1", "", 0); AddLink("virtual-machine-interface-routing-instance", "vnvrf1", "routing-instance", "vrf6"); AddLink("virtual-machine-interface-routing-instance", "vnvrf1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); EXPECT_TRUE(VmPortFloatingIpCount(1, 3)); LOG(DEBUG, "Link fip2 to fip-pool1"); AddLink("floating-ip", "fip2", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); AddLink("virtual-machine-interface", "vnet1", "floating-ip", "fip2"); DelLink("virtual-machine-interface", "vnet1", "floating-ip", "fip3"); DelLink("floating-ip", "fip3", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelNode("floating-ip", "fip3"); client->WaitForIdle(); DelLink("virtual-machine-interface", "vnet1", "floating-ip", "fip4"); DelLink("floating-ip", "fip4", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelNode("floating-ip", "fip4"); client->WaitForIdle(); EXPECT_TRUE(VmPortFloatingIpCount(1, 2)); AddLink("virtual-network", "default-project:vn2", "routing-instance", "vrf6"); client->WaitForIdle(); AddLink("virtual-network", "default-project:vn2", "routing-instance", "vrf8"); client->WaitForIdle(); DelLink("virtual-network", "vn1", "routing-instance", "vrf6"); client->WaitForIdle(); DelLink("virtual-network", "default-project:vn2", "routing-instance", "vrf6"); client->WaitForIdle(); DelLink("virtual-network", "default-project:vn2", "routing-instance", "vrf8"); client->WaitForIdle(); DelLink("virtual-network", "default-project:vn2", "routing-instance", "default-project:vn2:vn2"); client->WaitForIdle(); DelLink("virtual-machine-interface", "vnet1", "floating-ip", "fip1"); client->WaitForIdle(); DelLink("virtual-machine-interface", "vnet1", "floating-ip", "fip2"); client->WaitForIdle(); DelLink("floating-ip", "fip1", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelLink("floating-ip", "fip2", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelLink("virtual-network", "default-project:vn2", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); DelLink("virtual-machine-interface", input[0].name, "instance-ip", "instance1"); client->WaitForIdle(); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vnvrf1", "routing-instance", "vrf6"); DelLink("virtual-machine-interface-routing-instance", "vnvrf1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface-routing-instance", "vnvrf1"); client->WaitForIdle(); DelNode("floating-ip", "fip1"); client->WaitForIdle(); DelNode("floating-ip", "fip2"); client->WaitForIdle(); EXPECT_TRUE(VmPortFloatingIpCount(1, 0)); DelNode("floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelNode("routing-instance", "vrf6"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("vrf6")); DelNode("routing-instance", "default-project:vn2:vn2"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("default-project:vn2:vn2")); DelNode("routing-instance", "vrf8"); client->WaitForIdle(); EXPECT_FALSE(VrfFind("vrf8")); DelNode("virtual-network", "vn1"); client->WaitForIdle(); EXPECT_FALSE(VnFind(1)); DelNode("virtual-network", "default-project:vn2"); client->WaitForIdle(); EXPECT_FALSE(VnFind(2)); DelNode("virtual-machine", "vm1"); DelLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); DelInstanceIp("instance0"); client->WaitForIdle(); EXPECT_FALSE(VmFind(1)); IntfCfgDel(input, 0); client->WaitForIdle(); EXPECT_FALSE(VmPortFind(input, 0)); #if 0 DelLink("virtual-network", "vn1", "virtual-machine-interface", "vnet1"); DelLink("virtual-machine", "vm1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); LOG(DEBUG, "Cleanup implementation pending..."); // Delete config vm entry - no-op for oper-db. Port is active client->Reset(); DelNode("virtual-machine", "vm1"); client->WaitForIdle(); EXPECT_TRUE(VnFind(1)); EXPECT_FALSE(VmFind(1)); EXPECT_TRUE(VmPortFind(input, 0)); EXPECT_EQ(4U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(2U, Agent::GetInstance()->interface_config_table()->Size()); // Delete Nova Port entry. client->Reset(); IntfCfgDel(input, 0); IntfCfgDel(input, 1); EXPECT_TRUE(client->PortNotifyWait(2)); EXPECT_FALSE(VmFind(1)); EXPECT_FALSE(VmPortFind(input, 0)); EXPECT_EQ(2U, Agent::GetInstance()->interface_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); EXPECT_EQ(1U, Agent::GetInstance()->vn_table()->Size()); EXPECT_EQ(0U, Agent::GetInstance()->vm_table()->Size()); // Del VN to VRF link. Port should become inactive client->Reset(); DelLink("virtual-network", "vn1", "routing-instance", "vrf5"); DelNode("virtual-network", "vn1"); client->WaitForIdle(); EXPECT_FALSE(VnFind(1)); #endif } TEST_F(CfgTest, Basic_1) { string eth_intf = "eth10"; string vrf_name = "__non_existent_vrf__"; //char buff[4096]; //int len = 0; struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 5, 5}, }; PhysicalInterfaceKey key(eth_intf); PhysicalInterface *phy_intf = NULL; client->Reset(); PhysicalInterface::CreateReq(Agent::GetInstance()->interface_table(), eth_intf, Agent::GetInstance()->fabric_vrf_name(), PhysicalInterface::FABRIC, PhysicalInterface::ETHERNET, false, nil_uuid(), Ip4Address(0), Interface::TRANSPORT_ETHERNET); client->WaitForIdle(); phy_intf = static_cast<PhysicalInterface *> (agent_->interface_table()->FindActiveEntry(&key)); EXPECT_TRUE(phy_intf->persistent() == false); EXPECT_TRUE(phy_intf->subtype() == PhysicalInterface::FABRIC); InetInterface::CreateReq(Agent::GetInstance()->interface_table(), "vhost10", InetInterface::VHOST, Agent::GetInstance()->fabric_vrf_name(), Ip4Address(0), 0, Ip4Address(0), eth_intf, "", Interface::TRANSPORT_ETHERNET); client->WaitForIdle(); AddVn("default-project:vn5", 5); client->WaitForIdle(); EXPECT_TRUE(client->VnNotifyWait(1)); AddVm("vm5", 5); client->WaitForIdle(); EXPECT_TRUE(client->VmNotifyWait(1)); AddVrf("default-project:vn5:vn5"); client->WaitForIdle(); EXPECT_TRUE(client->VrfNotifyWait(1)); EXPECT_TRUE(VrfFind("default-project:vn5:vn5")); AddFloatingIpPool("fip-pool1", 1); AddFloatingIp("fip1", 1, "10.10.10.1"); AddFloatingIp("fip2", 2, "2.2.2.2"); AddFloatingIp("fip3", 3, "30.30.30.1"); client->WaitForIdle(); IntfCfgAdd(input, 0); client->WaitForIdle(); AddPort(input[0].name, input[0].intf_id); client->WaitForIdle(); AddLink("virtual-network", "default-project:vn5", "routing-instance", "default-project:vn5:vn5"); client->WaitForIdle(); AddLink("floating-ip-pool", "fip-pool1", "virtual-network", "default-project:vn5"); client->WaitForIdle(); AddLink("floating-ip", "fip1", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); AddLink("floating-ip", "fip2", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); AddLink("floating-ip", "fip3", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); AddLink("virtual-machine-interface", "vnet1", "floating-ip", "fip1"); client->WaitForIdle(); AddLink("virtual-machine-interface", "vnet1", "floating-ip", "fip2"); client->WaitForIdle(); AddLink("virtual-machine-interface", "vnet1", "floating-ip", "fip3"); client->WaitForIdle(); AddLink("virtual-network", "default-project:vn5", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); AddLink("virtual-machine", "vm5", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); AddInstanceIp("instance0", input[0].vm_id, input[0].addr); AddLink("virtual-machine-interface", input[0].name, "instance-ip", "instance0"); client->WaitForIdle(); // Add vm-port interface to vrf link AddVmPortVrf("vmvrf1", "", 0); AddLink("virtual-machine-interface-routing-instance", "vmvrf1", "routing-instance", "default-project:vn5:vn5"); AddLink("virtual-machine-interface-routing-instance", "vmvrf1", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); client->WaitForIdle(); std::vector<int> result = list_of(1); Sandesh::set_response_callback(boost::bind(ValidateSandeshResponse, _1, result)); AgentSandeshPtr sand_1(CreateAgentIntfSandesh("vnet1")); sand_1->DoSandesh(sand_1); client->WaitForIdle(); AgentSandeshPtr sand_2(CreateAgentIntfSandesh("eth10")); sand_2->DoSandesh(sand_2); client->WaitForIdle(); AgentSandeshPtr sand_3(CreateAgentIntfSandesh("pkt0")); sand_3->DoSandesh(sand_3); client->WaitForIdle(); AgentSandeshPtr sand_4(CreateAgentIntfSandesh("vhost10")); sand_4->DoSandesh(sand_4); client->WaitForIdle(); AgentSandeshPtr sand_5(CreateAgentIntfSandesh("vhost10")); sand_5->DoSandesh(sand_5, 0, 1); client->WaitForIdle(); InetInterface::DeleteReq(Agent::GetInstance()->interface_table(), "vhost10"); client->WaitForIdle(); PhysicalInterface::DeleteReq(Agent::GetInstance()->interface_table(), eth_intf); client->WaitForIdle(); client->Reset(); DelLink("virtual-network", "default-project:vn5", "routing-instance", "default-project:vn5:vn5"); client->WaitForIdle(); DelLink("floating-ip-pool", "fip-pool1", "virtual-network", "default-project:vn5"); client->WaitForIdle(); DelLink("floating-ip", "fip1", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelLink("floating-ip", "fip2", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelLink("floating-ip", "fip3", "floating-ip-pool", "fip-pool1"); client->WaitForIdle(); DelNode("floating-ip-pool", "fip-pool1"); DelLink("virtual-machine-interface", "vnet1", "floating-ip", "fip1"); client->WaitForIdle(); DelLink("virtual-machine-interface", "vnet1", "floating-ip", "fip2"); client->WaitForIdle(); DelLink("virtual-machine-interface", "vnet1", "floating-ip", "fip3"); client->WaitForIdle(); DelLink("virtual-network", "default-project:vn5", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); DelLink("virtual-machine", "vm5", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); DelLink("instance-ip", "instance0", "virtual-machine-interface", "vnet1"); client->WaitForIdle(); // Delete virtual-machine-interface to vrf link attribute DelLink("virtual-machine-interface-routing-instance", "vmvrf1", "routing-instance", "default-project:vn5:vn5"); DelLink("virtual-machine-interface-routing-instance", "vmvrf1", "virtual-machine-interface", "vnet1"); DelNode("virtual-machine-interface-routing-instance", "vmvrf1"); client->WaitForIdle(); client->Reset(); IntfCfgDel(input, 0); DelPort(input[0].name); client->WaitForIdle(); client->Reset(); DelNode("floating-ip", "fip1"); DelNode("floating-ip", "fip2"); DelNode("floating-ip", "fip3"); client->WaitForIdle(); DelNode("virtual-machine", "vm5"); client->WaitForIdle(); DelNode("routing-instance", "default-project:vn5:vn5"); DelInstanceIp("instance0"); client->WaitForIdle(); DelNode("virtual-network", "default-project:vn5"); client->WaitForIdle(); WAIT_FOR(1000, 1000, (0 == Agent::GetInstance()->vm_table()->Size())); WAIT_FOR(1000, 1000, (VnFind(5) == false)); WAIT_FOR(1000, 1000, (VmFind(5) == false)); } TEST_F(CfgTest, Basic_2) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1} }; CreateVmportEnv(input, 1); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); VmInterfaceKey key(AgentKey::ADD_DEL_CHANGE, MakeUuid(1), ""); VmInterface *intf = static_cast<VmInterface *> (Agent::GetInstance()->interface_table()->FindActiveEntry(&key)); EXPECT_TRUE(intf != NULL); if (intf == NULL) { return; } InetUnicastAgentRouteTable *table = Agent::GetInstance()->fabric_inet4_unicast_table(); InetUnicastRouteEntry *rt = static_cast<InetUnicastRouteEntry *> (table->FindRoute(intf->mdata_ip_addr())); EXPECT_TRUE(rt != NULL); if (rt == NULL) { return; } const NextHop *nh = rt->GetActiveNextHop(); EXPECT_TRUE(nh != NULL); if (nh == NULL) { return; } EXPECT_TRUE(nh->PolicyEnabled()); Ip4Address addr = Ip4Address::from_string("1.1.1.1"); table = static_cast<InetUnicastAgentRouteTable *> (Agent::GetInstance()->vrf_table()->GetInet4UnicastRouteTable("vrf1")); rt = table->FindRoute(addr); EXPECT_TRUE(rt != NULL); if (rt == NULL) { return; } nh = rt->GetActiveNextHop(); EXPECT_TRUE(nh != NULL); if (nh == NULL) { return; } EXPECT_FALSE(nh->PolicyEnabled()); DeleteVmportEnv(input, 1, true); client->WaitForIdle(); EXPECT_FALSE(VmPortFind(1)); } TEST_F(CfgTest, SecurityGroup_1) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1} }; CreateVmportEnv(input, 1); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); AddSg("sg1", 1); AddAcl("acl1", 1); AddLink("security-group", "sg1", "access-control-list", "acl1"); client->WaitForIdle(); AddLink("virtual-machine-interface", "vnet1", "security-group", "sg1"); client->WaitForIdle(); VmInterfaceKey key(AgentKey::ADD_DEL_CHANGE, MakeUuid(1), ""); VmInterface *intf = static_cast<VmInterface *> (Agent::GetInstance()->interface_table()->FindActiveEntry(&key)); EXPECT_TRUE(intf != NULL); if (intf == NULL) { return; } EXPECT_TRUE(intf->sg_list().list_.size() == 1); DoInterfaceSandesh("vnet1"); Ip4Address addr(Ip4Address::from_string("1.1.1.1")); InetUnicastAgentRouteTable *table = static_cast<InetUnicastAgentRouteTable *> (Agent::GetInstance()->vrf_table()->GetInet4UnicastRouteTable("vrf1")); InetUnicastRouteEntry *rt = table->FindRoute(addr); EXPECT_TRUE(rt != NULL); if (rt == NULL) { return; } const AgentPath *path = rt->GetActivePath(); EXPECT_EQ(path->sg_list().size(), 1); EXPECT_TRUE(path->vxlan_id() == VxLanTable::kInvalidvxlan_id); EXPECT_TRUE(path->tunnel_bmap() == TunnelType::MplsType()); DoInterfaceSandesh("vnet1"); DelLink("virtual-network", "vn1", "access-control-list", "acl1"); DelLink("virtual-machine-interface", "vnet1", "access-control-list", "acl1"); DelLink("virtual-machine-interface", "vnet1", "security-group", "sg1"); DelLink("security-group", "sg1", "access-control-list", "acl1"); client->WaitForIdle(); DelNode("access-control-list", "acl1"); client->WaitForIdle(); DeleteVmportEnv(input, 1, true); DelNode("security-group", "sg1"); client->WaitForIdle(); EXPECT_FALSE(VmPortFind(1)); } TEST_F(CfgTest, SecurityGroup_ignore_invalid_sgid_1) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1} }; CreateVmportEnv(input, 1); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); AddSg("sg1", 1, 0); AddAcl("acl1", 1); AddLink("security-group", "sg1", "access-control-list", "acl1"); client->WaitForIdle(); AddLink("virtual-machine-interface", "vnet1", "security-group", "sg1"); client->WaitForIdle(); //Query for SG SgKey *key = new SgKey(MakeUuid(1)); const SgEntry *sg_entry = static_cast<const SgEntry *>(Agent::GetInstance()->sg_table()-> FindActiveEntry(key)); EXPECT_TRUE(sg_entry == NULL); //Modify SGID AddSg("sg1", 1, 2); client->WaitForIdle(); sg_entry = static_cast<const SgEntry *>(Agent::GetInstance()->sg_table()-> FindActiveEntry(key)); EXPECT_TRUE(sg_entry != NULL); EXPECT_TRUE(sg_entry->GetSgId() == 2); AddSg("sg1", 1, 3); client->WaitForIdle(); sg_entry = static_cast<const SgEntry *>(Agent::GetInstance()->sg_table()-> FindActiveEntry(key)); EXPECT_TRUE(sg_entry != NULL); EXPECT_TRUE(sg_entry->GetSgId() == 3); DelLink("virtual-network", "vn1", "access-control-list", "acl1"); DelLink("virtual-machine-interface", "vnet1", "access-control-list", "acl1"); DelLink("virtual-machine-interface", "vnet1", "security-group", "sg1"); DelLink("security-group", "sg1", "access-control-list", "acl1"); client->WaitForIdle(); DelNode("access-control-list", "acl1"); client->WaitForIdle(); DeleteVmportEnv(input, 1, true); DelNode("security-group", "sg1"); client->WaitForIdle(); delete key; EXPECT_FALSE(VmPortFind(1)); } // Test invalid sgid with interface update TEST_F(CfgTest, SecurityGroup_ignore_invalid_sgid_2) { struct PortInfo input[] = { {"vnet1", 1, "1.1.1.1", "00:00:00:01:01:01", 1, 1} }; CreateVmportEnv(input, 1); client->WaitForIdle(); EXPECT_TRUE(VmPortActive(input, 0)); AddSg("sg1", 1, 0); AddAcl("acl1", 1); AddLink("security-group", "sg1", "access-control-list", "acl1"); client->WaitForIdle(); AddLink("virtual-machine-interface", "vnet1", "security-group", "sg1"); client->WaitForIdle(); VmInterfaceKey key(AgentKey::ADD_DEL_CHANGE, MakeUuid(1), ""); VmInterface *intf = static_cast<VmInterface *> (Agent::GetInstance()->interface_table()->FindActiveEntry(&key)); EXPECT_TRUE(intf != NULL); if (intf == NULL) { return; } EXPECT_TRUE(intf->sg_list().list_.size() == 0); // Add with proper sg id AddSg("sg1", 1, 1); client->WaitForIdle(); EXPECT_TRUE(intf->sg_list().list_.size() == 1); VmInterface::SecurityGroupEntrySet::const_iterator it = intf->sg_list().list_.begin(); EXPECT_TRUE(it->sg_.get() != NULL); EXPECT_TRUE(it->sg_->GetSgId() == 1); DelLink("virtual-network", "vn1", "access-control-list", "acl1"); DelLink("virtual-machine-interface", "vnet1", "access-control-list", "acl1"); DelLink("virtual-machine-interface", "vnet1", "security-group", "sg1"); DelLink("security-group", "sg1", "access-control-list", "acl1"); client->WaitForIdle(); DelNode("access-control-list", "acl1"); client->WaitForIdle(); DeleteVmportEnv(input, 1, true); DelNode("security-group", "sg1"); client->WaitForIdle(); EXPECT_FALSE(VmPortFind(1)); } int main(int argc, char **argv) { GETUSERARGS(); client = TestInit(init_file, ksync_init); int ret = RUN_ALL_TESTS(); TestShutdown(); delete client; return ret; }
tcpcloud/contrail-controller
src/vnsw/agent/test/test_vmport_cfg.cc
C++
apache-2.0
59,044
/** * @preserve Copyright 2012 Martijn van de Rijdt & Modilabs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ define( [ 'enketo-js/Widget', 'jquery', 'enketo-js/plugins' ], function( Widget, $ ) { 'use strict'; var pluginName = 'notewidget'; /** * Enhances notes * * @constructor * @param {Element} element [description] * @param {(boolean|{touch: boolean, repeat: boolean})} options options * @param {*=} e event */ function Notewidget( element, options ) { this.namespace = pluginName; Widget.call( this, element, options ); this._init(); } //copy the prototype functions from the Widget super class Notewidget.prototype = Object.create( Widget.prototype ); //ensure the constructor is the new one Notewidget.prototype.constructor = Notewidget; Notewidget.prototype._init = function() { var $el = $( this.element ); $el.find( '.question-label' ).markdownToHtml() .end().find( '[readonly]' ).addClass( 'ignore' ); if ( $el.is( '.note' ) && !$el.next().is( '.note' ) ) { $el.addClass( 'last-of-class' ); } }; Notewidget.prototype.destroy = function( element ) {}; $.fn[ pluginName ] = function( options, event ) { return this.each( function() { var $this = $( this ), data = $this.data( pluginName ); options = options || {}; if ( !data && typeof options === 'object' ) { $this.data( pluginName, ( data = new Notewidget( this, options, event ) ) ); } else if ( data && typeof options === 'string' ) { data[ options ]( this ); } } ); }; return pluginName; } );
alxndrsn/enketo-core
src/widget/note/notewidget.js
JavaScript
apache-2.0
2,293
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.lambda.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import static com.amazonaws.util.StringUtils.COMMA_SEPARATOR; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.lambda.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.SdkHttpUtils; import com.amazonaws.protocol.json.*; /** * GetFunctionConfigurationRequest Marshaller */ public class GetFunctionConfigurationRequestMarshaller implements Marshaller<Request<GetFunctionConfigurationRequest>, GetFunctionConfigurationRequest> { private static final String DEFAULT_CONTENT_TYPE = "application/x-amz-json-1.1"; private final SdkJsonProtocolFactory protocolFactory; public GetFunctionConfigurationRequestMarshaller( SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<GetFunctionConfigurationRequest> marshall( GetFunctionConfigurationRequest getFunctionConfigurationRequest) { if (getFunctionConfigurationRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<GetFunctionConfigurationRequest> request = new DefaultRequest<GetFunctionConfigurationRequest>( getFunctionConfigurationRequest, "AWSLambda"); request.setHttpMethod(HttpMethodName.GET); String uriResourcePath = "/2015-03-31/functions/{FunctionName}/configuration"; uriResourcePath = uriResourcePath .replace( "{FunctionName}", (getFunctionConfigurationRequest.getFunctionName() != null) ? SdkHttpUtils.urlEncode( StringUtils .fromString(getFunctionConfigurationRequest .getFunctionName()), false) : ""); request.setResourcePath(uriResourcePath); if (getFunctionConfigurationRequest.getQualifier() != null) { request.addParameter("Qualifier", StringUtils .fromString(getFunctionConfigurationRequest.getQualifier())); } request.setContent(new ByteArrayInputStream(new byte[0])); if (!request.getHeaders().containsKey("Content-Type")) { request.addHeader("Content-Type", DEFAULT_CONTENT_TYPE); } return request; } }
flofreud/aws-sdk-java
aws-java-sdk-lambda/src/main/java/com/amazonaws/services/lambda/model/transform/GetFunctionConfigurationRequestMarshaller.java
Java
apache-2.0
3,688
/* * * Copyright 2012-2014 Eurocommercial Properties NV * * * Licensed under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.estatio.fixture.party; import javax.inject.Inject; import org.isisaddons.module.security.dom.tenancy.ApplicationTenancies; import org.isisaddons.module.security.dom.tenancy.ApplicationTenancy; import org.estatio.dom.communicationchannel.CommunicationChannelContributions; import org.estatio.dom.communicationchannel.CommunicationChannelType; import org.estatio.dom.geography.Countries; import org.estatio.dom.geography.States; import org.estatio.dom.party.Parties; import org.estatio.dom.party.Party; import org.estatio.dom.party.PersonGenderType; import org.estatio.dom.party.Persons; import org.estatio.dom.party.relationship.PartyRelationships; import org.estatio.fixture.EstatioFixtureScript; public abstract class PersonAbstract extends EstatioFixtureScript { @Override protected abstract void execute(ExecutionContext executionContext); protected Party createPerson( final String atPath, final String reference, final String initials, final String firstName, final String lastName, final PersonGenderType gender, final ExecutionContext executionContext) { ApplicationTenancy applicationTenancy = applicationTenancies.findTenancyByPath(atPath); Party party = persons.newPerson(reference, initials, firstName, lastName, gender, applicationTenancy); return executionContext.addResult(this, party.getReference(), party); } protected Party createPerson( final String atPath, final String reference, final String initials, final String firstName, final String lastName, final PersonGenderType gender, final String phoneNumber, final String emailAddress, final String fromPartyStr, final String relationshipType, final ExecutionContext executionContext) { ApplicationTenancy applicationTenancy = applicationTenancies.findTenancyByPath(atPath); // new person Party party = persons.newPerson(reference, initials, firstName, lastName, gender, applicationTenancy); communicationChannelContributedActions.newEmail(party, CommunicationChannelType.EMAIL_ADDRESS, emailAddress); communicationChannelContributedActions.newPhoneOrFax(party, CommunicationChannelType.PHONE_NUMBER, phoneNumber); // associate person Party from = parties.findPartyByReference(fromPartyStr); partyRelationships.newRelationship(from, party, relationshipType, null); return executionContext.addResult(this, party.getReference(), party); } // ////////////////////////////////////// @Inject protected Countries countries; @Inject protected States states; @Inject protected Parties parties; @Inject protected Persons persons; @Inject protected CommunicationChannelContributions communicationChannelContributedActions; @Inject protected PartyRelationships partyRelationships; @Inject protected ApplicationTenancies applicationTenancies; }
kigsmtua/estatio
estatioapp/fixture/src/main/java/org/estatio/fixture/party/PersonAbstract.java
Java
apache-2.0
3,768
package wtf.pants.stamp.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * @author Pants * * Currently only works for methods */ @Retention(RetentionPolicy.CLASS) @Target({ElementType.METHOD}) public @interface StampPreserve { }
Pants/stamp-java-obfuscator
src/main/java/wtf/pants/stamp/annotations/StampPreserve.java
Java
apache-2.0
364
/* * Copyright 2019, EnMasse authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ // Code generated by go generate; DO NOT EDIT. package watchers import ( "fmt" tp "github.com/enmasseproject/enmasse/pkg/apis/enmasse/v1beta1" cp "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/enmasse/v1beta1" "github.com/enmasseproject/enmasse/pkg/consolegraphql/cache" "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/rest" "log" "math/rand" "reflect" "time" ) type AddressSpaceSchemaWatcher struct { Namespace string cache.Cache ClientInterface cp.EnmasseV1beta1Interface watching chan struct{} watchingStarted bool stopchan chan struct{} stoppedchan chan struct{} create func(*tp.AddressSpaceSchema) interface{} update func(*tp.AddressSpaceSchema, interface{}) bool restartCounter int32 resyncInterval *time.Duration } func NewAddressSpaceSchemaWatcher(c cache.Cache, resyncInterval *time.Duration, options ...WatcherOption) (ResourceWatcher, error) { kw := &AddressSpaceSchemaWatcher{ Namespace: v1.NamespaceAll, Cache: c, watching: make(chan struct{}), stopchan: make(chan struct{}), stoppedchan: make(chan struct{}), resyncInterval: resyncInterval, create: func(v *tp.AddressSpaceSchema) interface{} { return v }, update: func(v *tp.AddressSpaceSchema, e interface{}) bool { if !reflect.DeepEqual(v, e) { *e.(*tp.AddressSpaceSchema) = *v return true } else { return false } }, } for _, option := range options { option(kw) } if kw.ClientInterface == nil { return nil, fmt.Errorf("Client must be configured using the AddressSpaceSchemaWatcherConfig or AddressSpaceSchemaWatcherClient") } return kw, nil } func AddressSpaceSchemaWatcherFactory(create func(*tp.AddressSpaceSchema) interface{}, update func(*tp.AddressSpaceSchema, interface{}) bool) WatcherOption { return func(watcher ResourceWatcher) error { w := watcher.(*AddressSpaceSchemaWatcher) w.create = create w.update = update return nil } } func AddressSpaceSchemaWatcherConfig(config *rest.Config) WatcherOption { return func(watcher ResourceWatcher) error { w := watcher.(*AddressSpaceSchemaWatcher) var cl interface{} cl, _ = cp.NewForConfig(config) client, ok := cl.(cp.EnmasseV1beta1Interface) if !ok { return fmt.Errorf("unexpected type %T", cl) } w.ClientInterface = client return nil } } // Used to inject the fake client set for testing purposes func AddressSpaceSchemaWatcherClient(client cp.EnmasseV1beta1Interface) WatcherOption { return func(watcher ResourceWatcher) error { w := watcher.(*AddressSpaceSchemaWatcher) w.ClientInterface = client return nil } } func (kw *AddressSpaceSchemaWatcher) Watch() error { go func() { defer close(kw.stoppedchan) defer func() { if !kw.watchingStarted { close(kw.watching) } }() resource := kw.ClientInterface.AddressSpaceSchemas() log.Printf("AddressSpaceSchema - Watching") running := true for running { err := kw.doWatch(resource) if err != nil { log.Printf("AddressSpaceSchema - Restarting watch - %v", err) atomicInc(&kw.restartCounter) } else { running = false } } log.Printf("AddressSpaceSchema - Watching stopped") }() return nil } func (kw *AddressSpaceSchemaWatcher) AwaitWatching() { <-kw.watching } func (kw *AddressSpaceSchemaWatcher) Shutdown() { close(kw.stopchan) <-kw.stoppedchan } func (kw *AddressSpaceSchemaWatcher) GetRestartCount() int32 { return atomicGet(&kw.restartCounter) } func (kw *AddressSpaceSchemaWatcher) doWatch(resource cp.AddressSpaceSchemaInterface) error { resourceList, err := resource.List(v1.ListOptions{}) if err != nil { return err } keyCreator, err := kw.Cache.GetKeyCreator(cache.PrimaryObjectIndex) if err != nil { return err } curr := make(map[string]interface{}, 0) _, err = kw.Cache.Get(cache.PrimaryObjectIndex, "AddressSpaceSchema/", func(obj interface{}) (bool, bool, error) { gen, key, err := keyCreator(obj) if err != nil { return false, false, err } else if !gen { return false, false, fmt.Errorf("failed to generate key for existing object %+v", obj) } curr[key] = obj return false, true, nil }) var added = 0 var updated = 0 var unchanged = 0 for _, res := range resourceList.Items { copy := res.DeepCopy() kw.updateGroupVersionKind(copy) candidate := kw.create(copy) gen, key, err := keyCreator(candidate) if err != nil { return err } else if !gen { return fmt.Errorf("failed to generate key for new object %+v", copy) } if existing, ok := curr[key]; ok { err = kw.Cache.Update(func(target interface{}) (interface{}, error) { if kw.update(copy, target) { updated++ return target, nil } else { unchanged++ return nil, nil } }, existing) if err != nil { return err } delete(curr, key) } else { err = kw.Cache.Add(candidate) if err != nil { return err } added++ } } // Now remove any stale for _, stale := range curr { err = kw.Cache.Delete(stale) if err != nil { return err } } var stale = len(curr) log.Printf("AddressSpaceSchema - Cache initialised population added %d, updated %d, unchanged %d, stale %d", added, updated, unchanged, stale) watchOptions := v1.ListOptions{ ResourceVersion: resourceList.ResourceVersion, } if kw.resyncInterval != nil { ts := int64(kw.resyncInterval.Seconds() * (rand.Float64() + 1.0)) watchOptions.TimeoutSeconds = &ts } resourceWatch, err := resource.Watch(watchOptions) if err != nil { return err } defer resourceWatch.Stop() if !kw.watchingStarted { close(kw.watching) kw.watchingStarted = true } ch := resourceWatch.ResultChan() for { select { case event, chok := <-ch: if !chok { return fmt.Errorf("watch ended due to channel error") } else if event.Type == watch.Error { return fmt.Errorf("watch ended in error") } var err error log.Printf("AddressSpaceSchema - Received event type %s", event.Type) res, ok := event.Object.(*tp.AddressSpaceSchema) if !ok { err = fmt.Errorf("Watch error - object of unexpected type, %T, received", event.Object) } else { copy := res.DeepCopy() kw.updateGroupVersionKind(copy) switch event.Type { case watch.Added: err = kw.Cache.Add(kw.create(copy)) case watch.Modified: updatingKey := kw.create(copy) err = kw.Cache.Update(func(target interface{}) (interface{}, error) { if kw.update(copy, target) { return target, nil } else { return nil, nil } }, updatingKey) case watch.Deleted: err = kw.Cache.Delete(kw.create(copy)) } } if err != nil { return err } case <-kw.stopchan: log.Printf("AddressSpaceSchema - Shutdown received") return nil } } } // KubernetesRBACAccessController relies on the GVK information to be set on objects. // List provides GVK (https://github.com/kubernetes/kubernetes/pull/63972) but Watch does not not so we set it ourselves. func (kw *AddressSpaceSchemaWatcher) updateGroupVersionKind(o *tp.AddressSpaceSchema) { if o.TypeMeta.Kind == "" || o.TypeMeta.APIVersion == "" { o.TypeMeta.SetGroupVersionKind(tp.SchemeGroupVersion.WithKind("AddressSpaceSchema")) } }
EnMasseProject/enmasse
pkg/consolegraphql/watchers/resource_watcher_addressspaceschema.go
GO
apache-2.0
7,437
/* * SessionFitFunctionsPresentationView.java * * Copyright 2006-2015 James F. Bowring and www.Earth-Time.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.earthtime.Tripoli.dataViews.fitFunctionPresentationViews; import java.awt.Cursor; import java.awt.Graphics2D; import java.awt.Rectangle; import javax.swing.JLayeredPane; import org.earthtime.Tripoli.dataModels.DataModelFitFunctionInterface; import org.earthtime.Tripoli.dataModels.sessionModels.AbstractSessionForStandardDataModel; import org.earthtime.Tripoli.dataViews.simpleViews.FitFunctionDataInterface; import org.earthtime.dataDictionaries.DataPresentationModeEnum; /** * * @author James F. Bowring */ public class SessionFitFunctionsPresentationView extends AbstractFitFunctionPresentationView { private final DataModelFitFunctionInterface sessionForStandardDataModel; /** * * * @param sampleSessionDataView * @param sessionForStandardDataModel * @param targetDataModelView * @param dataPresentationMode * @param bounds */ public SessionFitFunctionsPresentationView( // JLayeredPane sampleSessionDataView, // DataModelFitFunctionInterface sessionForStandardDataModel,// FitFunctionDataInterface targetDataModelView, // DataPresentationModeEnum dataPresentationMode, // Rectangle bounds) { super(targetDataModelView, bounds); setCursor(Cursor.getDefaultCursor()); this.sampleSessionDataView = sampleSessionDataView; this.sessionForStandardDataModel = sessionForStandardDataModel; this.dataPresentationMode = dataPresentationMode; this.standardValue = ((AbstractSessionForStandardDataModel) sessionForStandardDataModel).getStandardValue(); } /** * * @param g2d */ @Override public void paint(Graphics2D g2d) { paintInit(g2d); } /** * */ @Override public void preparePanel() { removeAll(); createFitFunctionPanes(sessionForStandardDataModel, true); } }
clementparizot/ET_Redux
src/main/java/org/earthtime/Tripoli/dataViews/fitFunctionPresentationViews/SessionFitFunctionsPresentationView.java
Java
apache-2.0
2,640
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"). You may // not use this file except in compliance with the License. A copy of the // License is located at // // http://aws.amazon.com/apache2.0/ // // or in the "license" file accompanying this file. This file is distributed // on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language governing // permissions and limitations under the License. package fsxwindowsfileserver import ( "errors" "strings" resourcestatus "github.com/aws/amazon-ecs-agent/agent/taskresource/status" ) // FSxWindowsFileServerVolumeStatus defines resource statuses for fsxwindowsfileserver resource type FSxWindowsFileServerVolumeStatus resourcestatus.ResourceStatus const ( // FSxWindowsFileServerVolumeStatusNone is the zero state of a task resource FSxWindowsFileServerVolumeStatusNone FSxWindowsFileServerVolumeStatus = iota // FSxWindowsFileServerVolumeCreated represents a task resource which has been created FSxWindowsFileServerVolumeCreated // FSxWindowsFileServerVolumeRemoved represents a task resource which has been cleaned up FSxWindowsFileServerVolumeRemoved ) var FSxWindowsFileServerVolumeStatusMap = map[string]FSxWindowsFileServerVolumeStatus{ "NONE": FSxWindowsFileServerVolumeStatusNone, "CREATED": FSxWindowsFileServerVolumeCreated, "REMOVED": FSxWindowsFileServerVolumeRemoved, } // StatusString returns a human readable string representation of this object func (fs FSxWindowsFileServerVolumeStatus) String() string { for k, v := range FSxWindowsFileServerVolumeStatusMap { if v == fs { return k } } return "NONE" } // MarshalJSON overrides the logic for JSON-encoding the ResourceStatus type func (fs *FSxWindowsFileServerVolumeStatus) MarshalJSON() ([]byte, error) { if fs == nil { return nil, nil } return []byte(`"` + fs.String() + `"`), nil } // UnmarshalJSON overrides the logic for parsing the JSON-encoded ResourceStatus data func (fs *FSxWindowsFileServerVolumeStatus) UnmarshalJSON(b []byte) error { if strings.ToLower(string(b)) == "null" { *fs = FSxWindowsFileServerVolumeStatusNone return nil } if b[0] != '"' || b[len(b)-1] != '"' { *fs = FSxWindowsFileServerVolumeStatusNone return errors.New("resource status unmarshal: status must be a string or null; Got " + string(b)) } strStatus := b[1 : len(b)-1] stat, ok := FSxWindowsFileServerVolumeStatusMap[string(strStatus)] if !ok { *fs = FSxWindowsFileServerVolumeStatusNone return errors.New("resource status unmarshal: unrecognized status") } *fs = stat return nil }
swipely/amazon-ecs-agent
agent/taskresource/fsxwindowsfileserver/fsxwindowsfileserverstatus.go
GO
apache-2.0
2,717
package com.couchbase.lite; import com.couchbase.lite.internal.RevisionInternal; import com.couchbase.lite.mockserver.MockDispatcher; import com.couchbase.lite.mockserver.MockHelper; import com.couchbase.lite.replicator.Replication; import com.couchbase.lite.support.FileDirUtils; import com.couchbase.lite.support.RevisionUtils; import com.couchbase.lite.util.Log; import com.squareup.okhttp.mockwebserver.MockWebServer; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; public class DatabaseTest extends LiteTestCase { public void testPruneRevsToMaxDepthViaCompact() throws Exception { Map<String, Object> properties = new HashMap<String, Object>(); properties.put("testName", "testDatabaseCompaction"); properties.put("tag", 1337); Document doc = createDocumentWithProperties(database, properties); SavedRevision rev = doc.getCurrentRevision(); database.setMaxRevTreeDepth(1); for (int i = 0; i < 10; i++) { Map<String, Object> properties2 = new HashMap<String, Object>(properties); properties2.put("tag", i); rev = rev.createRevision(properties2); } database.compact(); Document fetchedDoc = database.getDocument(doc.getId()); List<SavedRevision> revisions = fetchedDoc.getRevisionHistory(); assertEquals(1, revisions.size()); } /** * When making inserts in a transaction, the change notifications should * be batched into a single change notification (rather than a change notification * for each insert) */ public void testChangeListenerNotificationBatching() throws Exception { final int numDocs = 50; final AtomicInteger atomicInteger = new AtomicInteger(0); final CountDownLatch countDownLatch = new CountDownLatch(1); database.addChangeListener(new Database.ChangeListener() { @Override public void changed(Database.ChangeEvent event) { atomicInteger.incrementAndGet(); } }); database.runInTransaction(new TransactionalTask() { @Override public boolean run() { createDocuments(database, numDocs); countDownLatch.countDown(); return true; } }); boolean success = countDownLatch.await(30, TimeUnit.SECONDS); assertTrue(success); assertEquals(1, atomicInteger.get()); } /** * When making inserts outside of a transaction, there should be a change notification * for each insert (no batching) */ public void testChangeListenerNotification() throws Exception { final int numDocs = 50; final AtomicInteger atomicInteger = new AtomicInteger(0); database.addChangeListener(new Database.ChangeListener() { @Override public void changed(Database.ChangeEvent event) { atomicInteger.incrementAndGet(); } }); createDocuments(database, numDocs); assertEquals(numDocs, atomicInteger.get()); } /** * Change listeners should only be called once no matter how many times they're added. */ public void testAddChangeListenerIsIdempotent() throws Exception { final AtomicInteger count = new AtomicInteger(0); Database.ChangeListener listener = new Database.ChangeListener() { @Override public void changed(Database.ChangeEvent event) { count.incrementAndGet(); } }; database.addChangeListener(listener); database.addChangeListener(listener); createDocuments(database, 1); assertEquals(1, count.intValue()); } public void testGetActiveReplications() throws Exception { // create mock sync gateway that will serve as a pull target and return random docs int numMockDocsToServe = 0; MockDispatcher dispatcher = new MockDispatcher(); MockWebServer server = MockHelper.getPreloadedPullTargetMockCouchDB(dispatcher, numMockDocsToServe, 1); dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB); server.setDispatcher(dispatcher); server.play(); final Replication replication = database.createPullReplication(server.getUrl("/db")); assertEquals(0, database.getAllReplications().size()); assertEquals(0, database.getActiveReplications().size()); final CountDownLatch replicationRunning = new CountDownLatch(1); replication.addChangeListener(new ReplicationActiveObserver(replicationRunning)); replication.start(); boolean success = replicationRunning.await(30, TimeUnit.SECONDS); assertTrue(success); assertEquals(1, database.getAllReplications().size()); assertEquals(1, database.getActiveReplications().size()); final CountDownLatch replicationDoneSignal = new CountDownLatch(1); replication.addChangeListener(new ReplicationFinishedObserver(replicationDoneSignal)); success = replicationDoneSignal.await(60, TimeUnit.SECONDS); assertTrue(success); // workaround race condition. Since our replication change listener will get triggered // _before_ the internal change listener that updates the activeReplications map, we // need to pause briefly to let the internal change listener to update activeReplications. Thread.sleep(500); assertEquals(1, database.getAllReplications().size()); assertEquals(0, database.getActiveReplications().size()); server.shutdown(); } public void testGetDatabaseNameFromPath() throws Exception { assertEquals("baz", FileDirUtils.getDatabaseNameFromPath("foo/bar/baz.cblite")); } public void testEncodeDocumentJSON() throws Exception { Map<String, Object> props = new HashMap<String, Object>(); props.put("_local_seq", ""); RevisionInternal revisionInternal = new RevisionInternal(props); byte[] encoded = RevisionUtils.asCanonicalJSON(revisionInternal); assertNotNull(encoded); } /** * in Database_Tests.m * - (void) test075_UpdateDocInTransaction */ public void testUpdateDocInTransaction() throws InterruptedException { // Test for #256, "Conflict error when updating a document multiple times in transaction block" // https://github.com/couchbase/couchbase-lite-ios/issues/256 Map<String, Object> properties = new HashMap<String, Object>(); properties.put("testName", "testUpdateDocInTransaction"); properties.put("count", 1); final Document doc = createDocumentWithProperties(database, properties); final CountDownLatch latch = new CountDownLatch(1); database.addChangeListener(new Database.ChangeListener() { @Override public void changed(Database.ChangeEvent event) { Log.i(TAG, "-- changed() --"); latch.countDown(); } }); assertTrue(database.runInTransaction(new TransactionalTask() { @Override public boolean run() { // Update doc. The currentRevision should update, but no notification be posted (yet). Map<String, Object> props1 = new HashMap<String, Object>(); props1.putAll(doc.getProperties()); props1.put("count", 2); SavedRevision rev1 = null; try { rev1 = doc.putProperties(props1); } catch (CouchbaseLiteException e) { Log.e(Log.TAG_DATABASE, e.toString()); return false; } assertNotNull(rev1); assertEquals(doc.getCurrentRevision(), rev1); assertEquals(1, latch.getCount()); // Update doc again; this should succeed, in the same manner. Map<String, Object> props2 = new HashMap<String, Object>(); props2.putAll(doc.getProperties()); props2.put("count", 3); SavedRevision rev2 = null; try { rev2 = doc.putProperties(props2); } catch (CouchbaseLiteException e) { Log.e(Log.TAG_DATABASE, e.toString()); return false; } assertNotNull(rev2); assertEquals(doc.getCurrentRevision(), rev2); assertEquals(1, latch.getCount()); return true; } })); assertTrue(latch.await(0, TimeUnit.SECONDS)); } }
vladoatanasov/couchbase-lite-android
src/androidTest/java/com/couchbase/lite/DatabaseTest.java
Java
apache-2.0
8,892
package uk.ac.ebi.pride.psmindex.search.model; import org.apache.solr.client.solrj.beans.Field; import uk.ac.ebi.pride.archive.dataprovider.identification.ModificationProvider; import uk.ac.ebi.pride.archive.dataprovider.identification.PeptideSequenceProvider; import uk.ac.ebi.pride.archive.dataprovider.param.CvParamProvider; import uk.ac.ebi.pride.indexutils.helpers.CvParamHelper; import uk.ac.ebi.pride.indexutils.helpers.ModificationHelper; import java.util.ArrayList; import java.util.List; public class Psm implements PeptideSequenceProvider { @Field(PsmFields.ID) private String id; @Field(PsmFields.REPORTED_ID) private String reportedId; @Field(PsmFields.PEPTIDE_SEQUENCE) private String peptideSequence; @Field(PsmFields.PROTEIN_ACCESSION) private String proteinAccession; @Field(PsmFields.PROJECT_ACCESSION) private String projectAccession; @Field(PsmFields.ASSAY_ACCESSION) private String assayAccession; @Field(PsmFields.MOD_NAMES) private List<String> modificationNames; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getReportedId() { return reportedId; } public void setReportedId(String reportedId) { this.reportedId = reportedId; } public String getPeptideSequence() { return peptideSequence; } public void setPeptideSequence(String peptideSequence) { this.peptideSequence = peptideSequence; } public String getProteinAccession() { return proteinAccession; } public void setProteinAccession(String proteinAccession) { this.proteinAccession = proteinAccession; } public String getProjectAccession() { return projectAccession; } public void setProjectAccession(String projectAccession) { this.projectAccession = projectAccession; } public String getAssayAccession() { return assayAccession; } public void setAssayAccession(String assayAccession) { this.assayAccession = assayAccession; } public Iterable<String> getModificationNames() { return modificationNames; } public void setModificationNames(List<ModificationProvider> modifications) { this.modificationNames = new ArrayList<>(); if (modifications!=null && modifications.size()>0) { for (ModificationProvider modification : modifications) { addModificationNames(modification); } } } public void addModificationNames(ModificationProvider modification) { if (modificationNames==null) { modificationNames = new ArrayList<>(); } modificationNames.add(modification.getName()); } }
PRIDE-Archive/psm-index-service
src/main/java/uk/ac/ebi/pride/psmindex/search/model/Psm.java
Java
apache-2.0
2,617
/* * Copyright (C) 2015 Intel Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include "surfacepool.h" #include "common/log.h" namespace YamiMediaCodec{ SharedPtr<SurfacePool> SurfacePool::create(const SharedPtr<SurfaceAllocator>& alloc, uint32_t fourcc, uint32_t width, uint32_t height, uint32_t size) { SharedPtr<SurfacePool> pool(new SurfacePool); if (YAMI_SUCCESS != pool->init(alloc, fourcc, width, height, size)) pool.reset(); return pool; } SurfacePool::SurfacePool() { memset(&m_params, 0, sizeof(m_params)); } YamiStatus SurfacePool::init(const SharedPtr<SurfaceAllocator>& alloc, uint32_t fourcc, uint32_t width, uint32_t height, uint32_t size) { m_params.fourcc = fourcc; m_params.width = width; m_params.height = height; m_params.size = size; YamiStatus status = alloc->alloc(alloc.get(), &m_params); if (status != YAMI_SUCCESS) return status; //prepare surfaces for pool std::deque<SurfacePtr> surfaces; for (uint32_t i = 0; i < m_params.size; i++) { SurfacePtr s(new VaapiSurface(m_params.surfaces[i], width, height, fourcc)); surfaces.push_back(s); } m_pool = VideoPool<VaapiSurface>::create(surfaces); if (!m_pool) { ERROR("failed to create Surface Pool"); return YAMI_OUT_MEMORY; } m_alloc = alloc; return YAMI_SUCCESS; } SurfacePool::~SurfacePool() { if (m_alloc) { m_alloc->free(m_alloc.get(), &m_params); } } SurfacePtr SurfacePool::alloc() { return m_pool->alloc(); } } //YamiMediaCodec
zhaobob/libyami
common/surfacepool.cpp
C++
apache-2.0
2,169
package cn.itcast_05; public class SetThread implements Runnable { private Student s; private int x = 0; public SetThread(Student s) { this.s = s; } @Override public void run() { while (true) { synchronized (s) { //判断有没有 if(s.flag){ try { s.wait(); //t1等着,释放锁 } catch (InterruptedException e) { e.printStackTrace(); } } if (x % 2 == 0) { s.name = "林青霞"; s.age = 27; } else { s.name = "刘意"; s.age = 30; } x++; //x=1 //修改标记 s.flag = true; //唤醒线程 s.notify(); //唤醒t2,唤醒并不表示你立马可以执行,必须还得抢CPU的执行权。 } //t1有,或者t2有 } } }
aircjm/JavaSeStudy
day24/code/day24_Thread/src/cn/itcast_05/SetThread.java
Java
apache-2.0
748
/** * @license * Copyright 2019 Google LLC * SPDX-License-Identifier: Apache-2.0 */ /** * @fileoverview Blockly module for Node. It includes Blockly core, * built-in blocks, all the generators and the English locale. */ /* eslint-disable */ 'use strict'; // Include the EN Locale by default. Blockly.setLocale(En);
google/blockly
scripts/package/node/index.js
JavaScript
apache-2.0
339
/* * Copyright 2018 ImpactDevelopment * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package clientapi.gui.widget.data; /** * Default implementations of {@link WidgetAlignment} * * @see WidgetAlignment * * @author Brady * @since 5/28/2017 */ public enum DefaultWidgetAlignment implements WidgetAlignment { LEFT(-1.0F), CENTERED(-0.5F), RIGHT(0.0F); private float value; DefaultWidgetAlignment(float value) { this.value = value; } @Override public final float getValue() { return this.value; } }
ImpactDevelopment/ClientAPI
src/main/java/clientapi/gui/widget/data/DefaultWidgetAlignment.java
Java
apache-2.0
1,076
import { Component, Input } from '@angular/core'; import { User } from 'ngx-login-client'; @Component({ selector: 'f8-assignee', templateUrl: './assignee.component.html', styleUrls: ['./assignee.component.less'], }) export class AssigneesComponent { private assignees: User[] = []; @Input() truncateAfter: number; @Input() showFullName: boolean; @Input('assignees') set assigneeInput(val) { this.assignees = val; } @Input() overlapAvatar: boolean = false; constructor() {} }
fabric8-ui/fabric8-ui
packages/planner/src/app/components_ngrx/assignee/assignee.component.ts
TypeScript
apache-2.0
502
package gwt.material.design.client.ui.html; /* * #%L * GwtBootstrap3 * %% * Copyright (C) 2013 GwtBootstrap3 * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.gwt.dom.client.Document; import gwt.material.design.client.base.AbstractTextWidget; import gwt.material.design.client.base.helper.CodeHelper; /** * @author Ben Dol */ public class Code extends AbstractTextWidget { public Code() { super(Document.get().createElement("code")); } public Code(final String text) { this(); setHTML(text); } @Override public void setHTML(String html) { this.getElement().setInnerHTML(html); } }
GwtMaterialDesign/gwt-material
gwt-material/src/main/java/gwt/material/design/client/ui/html/Code.java
Java
apache-2.0
1,196
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Copyright 2008 Google Inc. All Rights Reserved. /** * @fileoverview A class for managing the editor toolbar. * * @see ../../demos/editor/editor.html */ goog.provide('goog.ui.editor.ToolbarController'); goog.require('goog.editor.Field.EventType'); goog.require('goog.events.EventHandler'); goog.require('goog.events.EventTarget'); goog.require('goog.ui.Component.EventType'); /** * A class for managing the editor toolbar. Acts as a bridge between * a {@link goog.editor.Field} and a {@link goog.ui.Toolbar}. * * The {@code toolbar} argument must be an instance of {@link goog.ui.Toolbar} * or a subclass. This class doesn't care how the toolbar was created. As * long as one or more controls hosted in the toolbar have IDs that match * built-in {@link goog.editor.Command}s, they will function as expected. It is * the caller's responsibility to ensure that the toolbar is already rendered * or that it decorates an existing element. * * * @param {!goog.editor.Field} field Editable field to be controlled by the * toolbar. * @param {!goog.ui.Toolbar} toolbar Toolbar to control the editable field. * @constructor * @extends {goog.events.EventTarget} */ goog.ui.editor.ToolbarController = function(field, toolbar) { goog.events.EventTarget.call(this); /** * Event handler to listen for field events and user actions. * @type {!goog.events.EventHandler} * @private */ this.handler_ = new goog.events.EventHandler(this); /** * The field instance controlled by the toolbar. * @type {!goog.editor.Field} * @private */ this.field_ = field; /** * The toolbar that controls the field. * @type {!goog.ui.Toolbar} * @private */ this.toolbar_ = toolbar; /** * Editing commands whose state is to be queried when updating the toolbar. * @type {!Array.<string>} * @private */ this.queryCommands_ = []; // Iterate over all buttons, and find those which correspond to // queryable commands. Add them to the list of commands to query on // each COMMAND_VALUE_CHANGE event. this.toolbar_.forEachChild(function(button) { if (button.queryable) { this.queryCommands_.push(this.getComponentId(button.getId())); } }, this); // Make sure the toolbar doesn't steal keyboard focus. this.toolbar_.setFocusable(false); // Hook up handlers that update the toolbar in response to field events, // and to execute editor commands in response to toolbar events. this.handler_. listen(this.field_, goog.editor.Field.EventType.COMMAND_VALUE_CHANGE, this.updateToolbar). listen(this.toolbar_, goog.ui.Component.EventType.ACTION, this.handleAction); }; goog.inherits(goog.ui.editor.ToolbarController, goog.events.EventTarget); /** * Returns the Closure component ID of the control that corresponds to the * given {@link goog.editor.Command} constant. * Subclasses may override this method if they want to use a custom mapping * scheme from commands to controls. * @param {string} command Editor command. * @return {string} Closure component ID of the corresponding toolbar * control, if any. * @protected */ goog.ui.editor.ToolbarController.prototype.getComponentId = function(command) { // The default implementation assumes that the component ID is the same as // the command constant. return command; }; /** * Returns the {@link goog.editor.Command} constant * that corresponds to the given Closure component ID. Subclasses may override * this method if they want to use a custom mapping scheme from controls to * commands. * @param {string} id Closure component ID of a toolbar control. * @return {string} Editor command or dialog constant corresponding to the * toolbar control, if any. * @protected */ goog.ui.editor.ToolbarController.prototype.getCommand = function(id) { // The default implementation assumes that the component ID is the same as // the command constant. return id; }; /** * Returns the event handler object for the editor toolbar. Useful for classes * that extend {@code goog.ui.editor.ToolbarController}. * @return {!goog.events.EventHandler} The event handler object. * @protected */ goog.ui.editor.ToolbarController.prototype.getHandler = function() { return this.handler_; }; /** * Returns the field instance managed by the toolbar. Useful for * classes that extend {@code goog.ui.editor.ToolbarController}. * @return {!goog.editor.Field} The field managed by the toolbar. * @protected */ goog.ui.editor.ToolbarController.prototype.getField = function() { return this.field_; }; /** * Returns the toolbar UI component that manages the editor. Useful for * classes that extend {@code goog.ui.editor.ToolbarController}. * @return {!goog.ui.Toolbar} The toolbar UI component. */ goog.ui.editor.ToolbarController.prototype.getToolbar = function() { return this.toolbar_; }; /** * @return {boolean} Whether the toolbar is visible. */ goog.ui.editor.ToolbarController.prototype.isVisible = function() { return this.toolbar_.isVisible(); }; /** * Shows or hides the toolbar. * @param {boolean} visible Whether to show or hide the toolbar. */ goog.ui.editor.ToolbarController.prototype.setVisible = function(visible) { this.toolbar_.setVisible(visible); }; /** * @return {boolean} Whether the toolbar is enabled. */ goog.ui.editor.ToolbarController.prototype.isEnabled = function() { return this.toolbar_.isEnabled(); }; /** * Enables or disables the toolbar. * @param {boolean} enabled Whether to enable or disable the toolbar. */ goog.ui.editor.ToolbarController.prototype.setEnabled = function(enabled) { this.toolbar_.setEnabled(enabled); }; /** * Programmatically blurs the editor toolbar, un-highlighting the currently * highlighted item, and closing the currently open menu (if any). */ goog.ui.editor.ToolbarController.prototype.blur = function() { // We can't just call this.toolbar_.getElement().blur(), because the toolbar // element itself isn't focusable, so goog.ui.Container#handleBlur isn't // registered to handle blur events. this.toolbar_.handleBlur(null); }; /** @inheritDoc */ goog.ui.editor.ToolbarController.prototype.disposeInternal = function() { goog.ui.editor.ToolbarController.superClass_.disposeInternal.call(this); if (this.handler_) { this.handler_.dispose(); delete this.handler_; } if (this.toolbar_) { this.toolbar_.dispose(); delete this.toolbar_; } delete this.field_; delete this.queryCommands_; }; /** * Updates the toolbar in response to editor events. Specifically, updates * button states based on {@code COMMAND_VALUE_CHANGE} events, reflecting the * effective formatting of the selection. * @param {goog.events.Event} e Editor event to handle. * @protected */ goog.ui.editor.ToolbarController.prototype.updateToolbar = function(e) { if (!this.toolbar_.isEnabled() || !this.dispatchEvent(goog.ui.Component.EventType.CHANGE)) { return; } var state; /** @preserveTry */ try { /** @type {Array.<string>} */ e.commands; // Added by dispatchEvent. // If the COMMAND_VALUE_CHANGE event specifies which commands changed // state, then we only need to update those ones, otherwise update all // commands. state = /** @type {Object} */ ( this.field_.queryCommandValue(e.commands || this.queryCommands_)); } catch (ex) { // TODO: Find out when/why this happens. state = {}; } this.updateToolbarFromState(state); }; /** * Updates the toolbar to reflect a given state. * @param {Object} state Object mapping editor commands to values. */ goog.ui.editor.ToolbarController.prototype.updateToolbarFromState = function(state) { for (var command in state) { var button = this.toolbar_.getChild(this.getComponentId(command)); if (button) { var value = state[command]; if (button.updateFromValue) { button.updateFromValue(value); } else { button.setChecked(!!value); } } } }; /** * Handles {@code ACTION} events dispatched by toolbar buttons in response to * user actions by executing the corresponding field command. * @param {goog.events.Event} e Action event to handle. * @protected */ goog.ui.editor.ToolbarController.prototype.handleAction = function(e) { var command = this.getCommand(e.target.getId()); this.field_.execCommand(command, e.target.getValue()); };
jay-hodgson/SynapseWebClient
src/main/webapp/js/goog/ui/editor/toolbarcontroller.js
JavaScript
apache-2.0
9,007
/* * Copyright (c) 2017. HSJ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hsj.common.rxbus; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * @Author:HSJ * @E-mail:shengjunhu@foxmail.com * @Date:2018/2/25/14:18 * @Version:V1.0 * @Class:BusThead * @Description:线程注解 */ @Documented @Inherited @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.SOURCE) public @interface BusThead { String CURRENT_THREAD = "current_thread"; String UI_THEAD = "ui_thread"; String MAIN_THEAD = "main_thread"; String NEW_THEAD = "new_thread"; String IO_THEAD = "io_thread"; }
ShengJunHu/FastAndroid
library/common/src/main/java/com/hsj/common/rxbus/BusThead.java
Java
apache-2.0
1,429
/* * Copyright (C) 2009 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.collect.CollectPreconditions.checkEntryNotNull; import com.google.common.annotations.Beta; import java.util.Comparator; import java.util.Map; import java.util.Map.Entry; import java.util.function.Function; import java.util.stream.Collector; /** * GWT emulation of {@link com.google.common.collect.ImmutableBiMap}. * * @author Hayward Chan */ public abstract class ImmutableBiMap<K, V> extends ForwardingImmutableMap<K, V> implements BiMap<K, V> { @Beta public static <T, K, V> Collector<T, ?, ImmutableBiMap<K, V>> toImmutableBiMap( Function<? super T, ? extends K> keyFunction, Function<? super T, ? extends V> valueFunction) { return CollectCollectors.toImmutableBiMap(keyFunction, valueFunction); } // Casting to any type is safe because the set will never hold any elements. @SuppressWarnings("unchecked") public static <K, V> ImmutableBiMap<K, V> of() { return (ImmutableBiMap<K, V>) RegularImmutableBiMap.EMPTY; } public static <K, V> ImmutableBiMap<K, V> of(K k1, V v1) { checkEntryNotNull(k1, v1); return new SingletonImmutableBiMap<K, V>(k1, v1); } public static <K, V> ImmutableBiMap<K, V> of(K k1, V v1, K k2, V v2) { return new RegularImmutableBiMap<K, V>(ImmutableMap.of(k1, v1, k2, v2)); } public static <K, V> ImmutableBiMap<K, V> of( K k1, V v1, K k2, V v2, K k3, V v3) { return new RegularImmutableBiMap<K, V>(ImmutableMap.of( k1, v1, k2, v2, k3, v3)); } public static <K, V> ImmutableBiMap<K, V> of( K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) { return new RegularImmutableBiMap<K, V>(ImmutableMap.of( k1, v1, k2, v2, k3, v3, k4, v4)); } public static <K, V> ImmutableBiMap<K, V> of( K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) { return new RegularImmutableBiMap<K, V>(ImmutableMap.of( k1, v1, k2, v2, k3, v3, k4, v4, k5, v5)); } public static <K, V> Builder<K, V> builder() { return new Builder<K, V>(); } public static final class Builder<K, V> extends ImmutableMap.Builder<K, V> { public Builder() {} Builder(int initCapacity) { super(initCapacity); } @Override public Builder<K, V> put(K key, V value) { super.put(key, value); return this; } @Override public Builder<K, V> put(Map.Entry<? extends K, ? extends V> entry) { super.put(entry); return this; } @Override public Builder<K, V> putAll(Map<? extends K, ? extends V> map) { super.putAll(map); return this; } @Override public Builder<K, V> putAll( Iterable<? extends Entry<? extends K, ? extends V>> entries) { super.putAll(entries); return this; } public Builder<K, V> orderEntriesByValue(Comparator<? super V> valueComparator) { super.orderEntriesByValue(valueComparator); return this; } Builder<K, V> combine(Builder<K, V> other) { super.combine(other); return this; } @Override public ImmutableBiMap<K, V> build() { ImmutableMap<K, V> map = super.build(); if (map.isEmpty()) { return of(); } return new RegularImmutableBiMap<K, V>(super.build()); } } public static <K, V> ImmutableBiMap<K, V> copyOf( Map<? extends K, ? extends V> map) { if (map instanceof ImmutableBiMap) { @SuppressWarnings("unchecked") // safe since map is not writable ImmutableBiMap<K, V> bimap = (ImmutableBiMap<K, V>) map; return bimap; } if (map.isEmpty()) { return of(); } ImmutableMap<K, V> immutableMap = ImmutableMap.copyOf(map); return new RegularImmutableBiMap<K, V>(immutableMap); } public static <K, V> ImmutableBiMap<K, V> copyOf( Iterable<? extends Entry<? extends K, ? extends V>> entries) { return new Builder<K, V>().putAll(entries).build(); } ImmutableBiMap(Map<K, V> delegate) { super(delegate); } public abstract ImmutableBiMap<V, K> inverse(); @Override public ImmutableSet<V> values() { return inverse().keySet(); } public final V forcePut(K key, V value) { throw new UnsupportedOperationException(); } }
DavesMan/guava
guava-gwt/src-super/com/google/common/collect/super/com/google/common/collect/ImmutableBiMap.java
Java
apache-2.0
4,820
# -*- coding: utf-8 -*- """ CSS Selectors based on XPath ============================ This module supports selecting XML/HTML elements based on CSS selectors. See the `CSSSelector` class for details. :copyright: (c) 2007-2012 Ian Bicking and contributors. See AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from cssselect.parser import (parse, Selector, FunctionalPseudoElement, SelectorError, SelectorSyntaxError) from cssselect.xpath import GenericTranslator, HTMLTranslator, ExpressionError VERSION = '1.0.1' __version__ = VERSION
frvannes16/Cops-Robbers-Coding-Challenge
src/competition_code/libs/cssselect/__init__.py
Python
apache-2.0
639
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection.PortableExecutable; using System.Text; using System.Threading; using Microsoft.CodeAnalysis; using Roslyn.Utilities; using EmitContext = Microsoft.CodeAnalysis.Emit.EmitContext; namespace Microsoft.Cci { internal sealed class PeWriter { /// <summary> /// True if we should attempt to generate a deterministic output (no timestamps or random data). /// </summary> private readonly bool _deterministic; private readonly IModule _module; private readonly string _pdbPathOpt; private readonly bool _emitRuntimeStartupStub; private readonly int _sizeOfImportAddressTable; private MemoryStream _headerStream = new MemoryStream(1024); private readonly MemoryStream _emptyStream = new MemoryStream(0); private readonly NtHeader _ntHeader = new NtHeader(); private readonly BinaryWriter _rdataWriter = new BinaryWriter(new MemoryStream()); private readonly BinaryWriter _sdataWriter = new BinaryWriter(new MemoryStream()); private readonly BinaryWriter _tlsDataWriter = new BinaryWriter(new MemoryStream()); private readonly BinaryWriter _win32ResourceWriter = new BinaryWriter(new MemoryStream(1024)); private readonly BinaryWriter _coverageDataWriter = new BinaryWriter(new MemoryStream()); private SectionHeader _coverSection; private SectionHeader _relocSection; private SectionHeader _resourceSection; private SectionHeader _rdataSection; private SectionHeader _sdataSection; private SectionHeader _textSection; private SectionHeader _tlsSection; private PeWriter(IModule module, string pdbPathOpt, bool deterministic) { _module = module; _emitRuntimeStartupStub = module.RequiresStartupStub; _pdbPathOpt = pdbPathOpt; _deterministic = deterministic; _sizeOfImportAddressTable = _emitRuntimeStartupStub ? (!_module.Requires64bits ? 8 : 16) : 0; } private bool EmitPdb => _pdbPathOpt != null; public static bool WritePeToStream( EmitContext context, CommonMessageProvider messageProvider, Func<Stream> getPeStream, PdbWriter nativePdbWriterOpt, string pdbPathOpt, bool allowMissingMethodBodies, bool deterministic, CancellationToken cancellationToken) { // If PDB writer is given, we have to have PDB path. Debug.Assert(nativePdbWriterOpt == null || pdbPathOpt != null); var peWriter = new PeWriter(context.Module, pdbPathOpt, deterministic); var mdWriter = FullMetadataWriter.Create(context, messageProvider, allowMissingMethodBodies, deterministic, cancellationToken); return peWriter.WritePeToStream(mdWriter, getPeStream, nativePdbWriterOpt); } private bool WritePeToStream(MetadataWriter mdWriter, Func<Stream> getPeStream, PdbWriter nativePdbWriterOpt) { // TODO: we can precalculate the exact size of IL stream var ilBuffer = new MemoryStream(32 * 1024); var ilWriter = new BinaryWriter(ilBuffer); var metadataBuffer = new MemoryStream(16 * 1024); var metadataWriter = new BinaryWriter(metadataBuffer); var mappedFieldDataBuffer = new MemoryStream(); var mappedFieldDataWriter = new BinaryWriter(mappedFieldDataBuffer); var managedResourceBuffer = new MemoryStream(1024); var managedResourceWriter = new BinaryWriter(managedResourceBuffer); nativePdbWriterOpt?.SetMetadataEmitter(mdWriter); // Since we are producing a full assembly, we should not have a module version ID // imposed ahead-of time. Instead we will compute a deterministic module version ID // based on the contents of the generated stream. Debug.Assert(_module.PersistentIdentifier == default(Guid)); uint moduleVersionIdOffsetInMetadataStream; var calculateMethodBodyStreamRva = new Func<MetadataSizes, int>(mdSizes => { FillInTextSectionHeader(mdSizes); return (int)_textSection.RelativeVirtualAddress + _sizeOfImportAddressTable + 72; }); MetadataSizes metadataSizes; uint entryPointToken; mdWriter.SerializeMetadataAndIL( nativePdbWriterOpt, metadataWriter, ilWriter, mappedFieldDataWriter, managedResourceWriter, calculateMethodBodyStreamRva, CalculateMappedFieldDataStreamRva, out moduleVersionIdOffsetInMetadataStream, out entryPointToken, out metadataSizes); ContentId pdbContentId; if (nativePdbWriterOpt != null) { if (entryPointToken != 0) { nativePdbWriterOpt.SetEntryPoint(entryPointToken); } var assembly = _module.AsAssembly; if (assembly != null && assembly.Kind == ModuleKind.WindowsRuntimeMetadata) { // Dev12: If compiling to winmdobj, we need to add to PDB source spans of // all types and members for better error reporting by WinMDExp. nativePdbWriterOpt.WriteDefinitionLocations(_module.GetSymbolToLocationMap()); } else { #if DEBUG // validate that all definitions are writeable // if same scenario would happen in an winmdobj project nativePdbWriterOpt.AssertAllDefinitionsHaveTokens(_module.GetSymbolToLocationMap()); #endif } pdbContentId = nativePdbWriterOpt.GetContentId(); // the writer shall not be used after this point for writing: nativePdbWriterOpt = null; } else { pdbContentId = default(ContentId); } FillInSectionHeaders(); // fill in header fields. FillInNtHeader(metadataSizes, CalculateMappedFieldDataStreamRva(metadataSizes)); var corHeader = CreateCorHeader(metadataSizes, entryPointToken); // write to pe stream. Stream peStream = getPeStream(); if (peStream == null) { return false; } long ntHeaderTimestampPosition; long metadataPosition; WriteHeaders(peStream, out ntHeaderTimestampPosition); WriteTextSection( peStream, corHeader, metadataBuffer, ilBuffer, mappedFieldDataBuffer, managedResourceBuffer, metadataSizes, pdbContentId, out metadataPosition); WriteRdataSection(peStream); WriteSdataSection(peStream); WriteCoverSection(peStream); WriteTlsSection(peStream); WriteResourceSection(peStream); WriteRelocSection(peStream); if (_deterministic) { var mvidPosition = metadataPosition + moduleVersionIdOffsetInMetadataStream; WriteDeterministicGuidAndTimestamps(peStream, mvidPosition, ntHeaderTimestampPosition); } return true; } private int CalculateMappedFieldDataStreamRva(MetadataSizes metadataSizes) { FillInTextSectionHeader(metadataSizes); Debug.Assert(metadataSizes.MappedFieldDataSize % MetadataWriter.MappedFieldDataAlignment == 0); return (int)(_textSection.RelativeVirtualAddress + _textSection.VirtualSize - metadataSizes.MappedFieldDataSize); } /// <summary> /// Compute a deterministic Guid and timestamp based on the contents of the stream, and replace /// the 16 zero bytes at the given position and one or two 4-byte values with that computed Guid and timestamp. /// </summary> /// <param name="peStream">PE stream.</param> /// <param name="mvidPosition">Position in the stream of 16 zero bytes to be replaced by a Guid</param> /// <param name="ntHeaderTimestampPosition">Position in the stream of four zero bytes to be replaced by a timestamp</param> private static void WriteDeterministicGuidAndTimestamps( Stream peStream, long mvidPosition, long ntHeaderTimestampPosition) { Debug.Assert(mvidPosition != 0); Debug.Assert(ntHeaderTimestampPosition != 0); var previousPosition = peStream.Position; // Compute and write deterministic guid data over the relevant portion of the stream peStream.Position = 0; var contentId = ContentId.FromHash(CryptographicHashProvider.ComputeSha1(peStream)); // The existing Guid should be zero. CheckZeroDataInStream(peStream, mvidPosition, contentId.Guid.Length); peStream.Position = mvidPosition; peStream.Write(contentId.Guid, 0, contentId.Guid.Length); // The existing timestamp should be zero. CheckZeroDataInStream(peStream, ntHeaderTimestampPosition, contentId.Stamp.Length); peStream.Position = ntHeaderTimestampPosition; peStream.Write(contentId.Stamp, 0, contentId.Stamp.Length); peStream.Position = previousPosition; } [Conditional("DEBUG")] private static void CheckZeroDataInStream(Stream stream, long position, int bytes) { stream.Position = position; for (int i = 0; i < bytes; i++) { int value = stream.ReadByte(); Debug.Assert(value == 0); } } private int ComputeStrongNameSignatureSize() { IAssembly assembly = _module.AsAssembly; if (assembly == null) { return 0; } // EDMAURER the count of characters divided by two because the each pair of characters will turn in to one byte. int keySize = (assembly.SignatureKey == null) ? 0 : assembly.SignatureKey.Length / 2; if (keySize == 0) { keySize = assembly.PublicKey.Length; } if (keySize == 0) { return 0; } return (keySize < 128 + 32) ? 128 : keySize - 32; } private int ComputeOffsetToDebugTable(MetadataSizes metadataSizes) { return ComputeOffsetToMetadata(metadataSizes.ILStreamSize) + metadataSizes.MetadataSize + metadataSizes.ResourceDataSize + ComputeStrongNameSignatureSize(); // size of strong name hash } private int ComputeOffsetToImportTable(MetadataSizes metadataSizes) { // TODO: add size of unmanaged export stubs (when and if these are ever supported). return ComputeOffsetToDebugTable(metadataSizes) + ComputeSizeOfDebugDirectory(); } private int ComputeOffsetToMetadata(int ilStreamLength) { return _sizeOfImportAddressTable + 72 + // size of CLR header BitArithmeticUtilities.Align(ilStreamLength, 4); } private const int ImageDebugDirectoryBaseSize = sizeof(uint) + // Characteristics sizeof(uint) + // TimeDataStamp sizeof(uint) + // Version sizeof(uint) + // Type sizeof(uint) + // SizeOfData sizeof(uint) + // AddressOfRawData sizeof(uint); // PointerToRawData private int ComputeSizeOfDebugDirectoryData() { return 4 + // 4B signature "RSDS" 16 + // GUID sizeof(uint) + // Age Encoding.UTF8.GetByteCount(_pdbPathOpt) + 1; // Null terminator } private int ComputeSizeOfDebugDirectory() { return EmitPdb ? ImageDebugDirectoryBaseSize + ComputeSizeOfDebugDirectoryData() : 0; } private uint ComputeSizeOfPeHeaders() { ushort numberOfSections = 1; // .text if (_emitRuntimeStartupStub) numberOfSections++; //.reloc if (_tlsDataWriter.BaseStream.Length > 0) numberOfSections++; //.tls if (_rdataWriter.BaseStream.Length > 0) numberOfSections++; //.rdata if (_sdataWriter.BaseStream.Length > 0) numberOfSections++; //.sdata if (_coverageDataWriter.BaseStream.Length > 0) numberOfSections++; //.cover if (!IteratorHelper.EnumerableIsEmpty(_module.Win32Resources) || _module.Win32ResourceSection != null) numberOfSections++; //.rsrc; _ntHeader.NumberOfSections = numberOfSections; uint sizeOfPeHeaders = 128 + 4 + 20 + 224 + 40u * numberOfSections; if (_module.Requires64bits) { sizeOfPeHeaders += 16; } return sizeOfPeHeaders; } private int ComputeSizeOfTextSection(MetadataSizes metadataSizes) { int textSectionLength = this.ComputeOffsetToImportTable(metadataSizes); if (_emitRuntimeStartupStub) { textSectionLength += !_module.Requires64bits ? 66 : 70; //size of import table textSectionLength += 14; //size of name table textSectionLength = BitArithmeticUtilities.Align(textSectionLength, !_module.Requires64bits ? 4 : 8); //optional padding to make startup stub's target address align on word or double word boundary textSectionLength += !_module.Requires64bits ? 8 : 16; //fixed size of runtime startup stub } Debug.Assert(metadataSizes.MappedFieldDataSize % MetadataWriter.MappedFieldDataAlignment == 0); textSectionLength += metadataSizes.MappedFieldDataSize; return textSectionLength; } private uint ComputeSizeOfWin32Resources(uint resourcesRva) { this.SerializeWin32Resources(resourcesRva); uint result = 0; if (_win32ResourceWriter.BaseStream.Length > 0) { result += BitArithmeticUtilities.Align(_win32ResourceWriter.BaseStream.Length, 4); } // result += Align(this.win32ResourceWriter.BaseStream.Length+1, 8); return result; } private CorHeader CreateCorHeader(MetadataSizes metadataSizes, uint entryPointToken) { CorHeader corHeader = new CorHeader(); corHeader.CodeManagerTable.RelativeVirtualAddress = 0; corHeader.CodeManagerTable.Size = 0; corHeader.EntryPointToken = entryPointToken; corHeader.ExportAddressTableJumps.RelativeVirtualAddress = 0; corHeader.ExportAddressTableJumps.Size = 0; corHeader.Flags = this.GetCorHeaderFlags(); corHeader.MajorRuntimeVersion = 2; corHeader.MetadataDirectory.RelativeVirtualAddress = _textSection.RelativeVirtualAddress + (uint)ComputeOffsetToMetadata(metadataSizes.ILStreamSize); corHeader.MetadataDirectory.Size = (uint)metadataSizes.MetadataSize; corHeader.MinorRuntimeVersion = 5; corHeader.Resources.RelativeVirtualAddress = corHeader.MetadataDirectory.RelativeVirtualAddress + corHeader.MetadataDirectory.Size; corHeader.Resources.Size = (uint)metadataSizes.ResourceDataSize; corHeader.StrongNameSignature.RelativeVirtualAddress = corHeader.Resources.RelativeVirtualAddress + corHeader.Resources.Size; corHeader.StrongNameSignature.Size = (uint)ComputeStrongNameSignatureSize(); corHeader.VTableFixups.RelativeVirtualAddress = 0; corHeader.VTableFixups.Size = 0; return corHeader; } private void FillInNtHeader(MetadataSizes metadataSizes, int mappedFieldDataStreamRva) { bool use32bitAddresses = !_module.Requires64bits; NtHeader ntHeader = _ntHeader; ntHeader.AddressOfEntryPoint = _emitRuntimeStartupStub ? (uint)mappedFieldDataStreamRva - (use32bitAddresses ? 6u : 10u) : 0; ntHeader.BaseOfCode = _textSection.RelativeVirtualAddress; ntHeader.BaseOfData = _rdataSection.RelativeVirtualAddress; ntHeader.PointerToSymbolTable = 0; ntHeader.SizeOfCode = _textSection.SizeOfRawData; ntHeader.SizeOfInitializedData = _rdataSection.SizeOfRawData + _coverSection.SizeOfRawData + _sdataSection.SizeOfRawData + _tlsSection.SizeOfRawData + _resourceSection.SizeOfRawData + _relocSection.SizeOfRawData; ntHeader.SizeOfHeaders = BitArithmeticUtilities.Align(this.ComputeSizeOfPeHeaders(), _module.FileAlignment); ntHeader.SizeOfImage = BitArithmeticUtilities.Align(_relocSection.RelativeVirtualAddress + _relocSection.VirtualSize, 0x2000); ntHeader.SizeOfUninitializedData = 0; // In the PE File Header this is a "Time/Date Stamp" whose description is "Time and date // the file was created in seconds since January 1st 1970 00:00:00 or 0" // However, when we want to make it deterministic we fill it in (later) with bits from the hash of the full PE file. ntHeader.TimeDateStamp = _deterministic ? 0 : (uint)(DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds; ntHeader.ImportAddressTable.RelativeVirtualAddress = (_emitRuntimeStartupStub) ? _textSection.RelativeVirtualAddress : 0; ntHeader.ImportAddressTable.Size = (uint)_sizeOfImportAddressTable; ntHeader.CliHeaderTable.RelativeVirtualAddress = _textSection.RelativeVirtualAddress + ntHeader.ImportAddressTable.Size; ntHeader.CliHeaderTable.Size = 72; ntHeader.ImportTable.RelativeVirtualAddress = _textSection.RelativeVirtualAddress + (uint)ComputeOffsetToImportTable(metadataSizes); if (!_emitRuntimeStartupStub) { ntHeader.ImportTable.Size = 0; ntHeader.ImportTable.RelativeVirtualAddress = 0; } else { ntHeader.ImportTable.Size = use32bitAddresses ? 66u : 70u; ntHeader.ImportTable.Size += 13; //size of nametable } ntHeader.BaseRelocationTable.RelativeVirtualAddress = (_emitRuntimeStartupStub) ? _relocSection.RelativeVirtualAddress : 0; ntHeader.BaseRelocationTable.Size = _relocSection.VirtualSize; ntHeader.BoundImportTable.RelativeVirtualAddress = 0; ntHeader.BoundImportTable.Size = 0; ntHeader.CertificateTable.RelativeVirtualAddress = 0; ntHeader.CertificateTable.Size = 0; ntHeader.CopyrightTable.RelativeVirtualAddress = 0; ntHeader.CopyrightTable.Size = 0; ntHeader.DebugTable.RelativeVirtualAddress = EmitPdb ? _textSection.RelativeVirtualAddress + (uint)ComputeOffsetToDebugTable(metadataSizes) : 0u; ntHeader.DebugTable.Size = EmitPdb ? ImageDebugDirectoryBaseSize : 0u; // Only the size of the fixed part of the debug table goes here. ntHeader.DelayImportTable.RelativeVirtualAddress = 0; ntHeader.DelayImportTable.Size = 0; ntHeader.ExceptionTable.RelativeVirtualAddress = 0; ntHeader.ExceptionTable.Size = 0; ntHeader.ExportTable.RelativeVirtualAddress = 0; ntHeader.ExportTable.Size = 0; ntHeader.GlobalPointerTable.RelativeVirtualAddress = 0; ntHeader.GlobalPointerTable.Size = 0; ntHeader.LoadConfigTable.RelativeVirtualAddress = 0; ntHeader.LoadConfigTable.Size = 0; ntHeader.Reserved.RelativeVirtualAddress = 0; ntHeader.Reserved.Size = 0; ntHeader.ResourceTable.RelativeVirtualAddress = _resourceSection.SizeOfRawData == 0 ? 0u : _resourceSection.RelativeVirtualAddress; ntHeader.ResourceTable.Size = _resourceSection.VirtualSize; ntHeader.ThreadLocalStorageTable.RelativeVirtualAddress = _tlsSection.SizeOfRawData == 0 ? 0u : _tlsSection.RelativeVirtualAddress; ntHeader.ThreadLocalStorageTable.Size = _tlsSection.SizeOfRawData; } private void FillInTextSectionHeader(MetadataSizes metadataSizes) { if (_textSection == null) { uint sizeOfPeHeaders = (uint)ComputeSizeOfPeHeaders(); uint sizeOfTextSection = (uint)ComputeSizeOfTextSection(metadataSizes); _textSection = new SectionHeader { Characteristics = 0x60000020, // section is read + execute + code Name = ".text", NumberOfLinenumbers = 0, NumberOfRelocations = 0, PointerToLinenumbers = 0, PointerToRawData = BitArithmeticUtilities.Align(sizeOfPeHeaders, _module.FileAlignment), PointerToRelocations = 0, RelativeVirtualAddress = BitArithmeticUtilities.Align(sizeOfPeHeaders, 0x2000), SizeOfRawData = BitArithmeticUtilities.Align(sizeOfTextSection, _module.FileAlignment), VirtualSize = sizeOfTextSection }; } } private void FillInSectionHeaders() { _rdataSection = new SectionHeader { Characteristics = 0x40000040, // section is read + initialized Name = ".rdata", NumberOfLinenumbers = 0, NumberOfRelocations = 0, PointerToLinenumbers = 0, PointerToRawData = _textSection.PointerToRawData + _textSection.SizeOfRawData, PointerToRelocations = 0, RelativeVirtualAddress = BitArithmeticUtilities.Align(_textSection.RelativeVirtualAddress + _textSection.VirtualSize, 0x2000), SizeOfRawData = BitArithmeticUtilities.Align(_rdataWriter.BaseStream.Length, _module.FileAlignment), VirtualSize = _rdataWriter.BaseStream.Length, }; _sdataSection = new SectionHeader { Characteristics = 0xC0000040, // section is write + read + initialized Name = ".sdata", NumberOfLinenumbers = 0, NumberOfRelocations = 0, PointerToLinenumbers = 0, PointerToRawData = _rdataSection.PointerToRawData + _rdataSection.SizeOfRawData, PointerToRelocations = 0, RelativeVirtualAddress = BitArithmeticUtilities.Align(_rdataSection.RelativeVirtualAddress + _rdataSection.VirtualSize, 0x2000), SizeOfRawData = BitArithmeticUtilities.Align(_sdataWriter.BaseStream.Length, _module.FileAlignment), VirtualSize = _sdataWriter.BaseStream.Length, }; _coverSection = new SectionHeader { Characteristics = 0xC8000040, // section is not paged + write + read + initialized Name = ".cover", NumberOfLinenumbers = 0, NumberOfRelocations = 0, PointerToLinenumbers = 0, PointerToRawData = _sdataSection.PointerToRawData + _sdataSection.SizeOfRawData, PointerToRelocations = 0, RelativeVirtualAddress = BitArithmeticUtilities.Align(_sdataSection.RelativeVirtualAddress + _sdataSection.VirtualSize, 0x2000), SizeOfRawData = BitArithmeticUtilities.Align(_coverageDataWriter.BaseStream.Length, _module.FileAlignment), VirtualSize = _coverageDataWriter.BaseStream.Length, }; _tlsSection = new SectionHeader { Characteristics = 0xC0000040, // section is write + read + initialized Name = ".tls", NumberOfLinenumbers = 0, NumberOfRelocations = 0, PointerToLinenumbers = 0, PointerToRawData = _coverSection.PointerToRawData + _coverSection.SizeOfRawData, PointerToRelocations = 0, RelativeVirtualAddress = BitArithmeticUtilities.Align(_coverSection.RelativeVirtualAddress + _coverSection.VirtualSize, 0x2000), SizeOfRawData = BitArithmeticUtilities.Align(_tlsDataWriter.BaseStream.Length, _module.FileAlignment), VirtualSize = _tlsDataWriter.BaseStream.Length, }; uint resourcesRva = BitArithmeticUtilities.Align(_tlsSection.RelativeVirtualAddress + _tlsSection.VirtualSize, 0x2000); uint sizeOfWin32Resources = this.ComputeSizeOfWin32Resources(resourcesRva); _resourceSection = new SectionHeader { Characteristics = 0x40000040, // section is read + initialized Name = ".rsrc", NumberOfLinenumbers = 0, NumberOfRelocations = 0, PointerToLinenumbers = 0, PointerToRawData = _tlsSection.PointerToRawData + _tlsSection.SizeOfRawData, PointerToRelocations = 0, RelativeVirtualAddress = resourcesRva, SizeOfRawData = BitArithmeticUtilities.Align(sizeOfWin32Resources, _module.FileAlignment), VirtualSize = sizeOfWin32Resources, }; _relocSection = new SectionHeader { Characteristics = 0x42000040, // section is read + discardable + initialized Name = ".reloc", NumberOfLinenumbers = 0, NumberOfRelocations = 0, PointerToLinenumbers = 0, PointerToRawData = _resourceSection.PointerToRawData + _resourceSection.SizeOfRawData, PointerToRelocations = 0, RelativeVirtualAddress = BitArithmeticUtilities.Align(_resourceSection.RelativeVirtualAddress + _resourceSection.VirtualSize, 0x2000), SizeOfRawData = _emitRuntimeStartupStub ? _module.FileAlignment : 0, VirtualSize = _emitRuntimeStartupStub ? (_module.Requires64bits && !_module.RequiresAmdInstructionSet ? 14u : 12u) : 0, }; } private CorFlags GetCorHeaderFlags() { CorFlags result = 0; if (_module.ILOnly) { result |= CorFlags.ILOnly; } if (_module.Requires32bits) { result |= CorFlags.Requires32Bit; } if (_module.StrongNameSigned) { result |= CorFlags.StrongNameSigned; } if (_module.TrackDebugData) { result |= CorFlags.TrackDebugData; } if (_module.Prefers32bits) { result |= CorFlags.Requires32Bit | CorFlags.Prefers32Bit; } return result; } //// //// Resource Format. //// //// //// Resource directory consists of two counts, following by a variable length //// array of directory entries. The first count is the number of entries at //// beginning of the array that have actual names associated with each entry. //// The entries are in ascending order, case insensitive strings. The second //// count is the number of entries that immediately follow the named entries. //// This second count identifies the number of entries that have 16-bit integer //// Ids as their name. These entries are also sorted in ascending order. //// //// This structure allows fast lookup by either name or number, but for any //// given resource entry only one form of lookup is supported, not both. //// This is consistant with the syntax of the .RC file and the .RES file. //// //typedef struct _IMAGE_RESOURCE_DIRECTORY { // DWORD Characteristics; // DWORD TimeDateStamp; // WORD MajorVersion; // WORD MinorVersion; // WORD NumberOfNamedEntries; // WORD NumberOfIdEntries; //// IMAGE_RESOURCE_DIRECTORY_ENTRY DirectoryEntries[]; //} IMAGE_RESOURCE_DIRECTORY, *PIMAGE_RESOURCE_DIRECTORY; //#define IMAGE_RESOURCE_NAME_IS_STRING 0x80000000 //#define IMAGE_RESOURCE_DATA_IS_DIRECTORY 0x80000000 //// //// Each directory contains the 32-bit Name of the entry and an offset, //// relative to the beginning of the resource directory of the data associated //// with this directory entry. If the name of the entry is an actual text //// string instead of an integer Id, then the high order bit of the name field //// is set to one and the low order 31-bits are an offset, relative to the //// beginning of the resource directory of the string, which is of type //// IMAGE_RESOURCE_DIRECTORY_STRING. Otherwise the high bit is clear and the //// low-order 16-bits are the integer Id that identify this resource directory //// entry. If the directory entry is yet another resource directory (i.e. a //// subdirectory), then the high order bit of the offset field will be //// set to indicate this. Otherwise the high bit is clear and the offset //// field points to a resource data entry. //// //typedef struct _IMAGE_RESOURCE_DIRECTORY_ENTRY { // union { // struct { // DWORD NameOffset:31; // DWORD NameIsString:1; // } DUMMYSTRUCTNAME; // DWORD Name; // WORD Id; // } DUMMYUNIONNAME; // union { // DWORD OffsetToData; // struct { // DWORD OffsetToDirectory:31; // DWORD DataIsDirectory:1; // } DUMMYSTRUCTNAME2; // } DUMMYUNIONNAME2; //} IMAGE_RESOURCE_DIRECTORY_ENTRY, *PIMAGE_RESOURCE_DIRECTORY_ENTRY; //// //// For resource directory entries that have actual string names, the Name //// field of the directory entry points to an object of the following type. //// All of these string objects are stored together after the last resource //// directory entry and before the first resource data object. This minimizes //// the impact of these variable length objects on the alignment of the fixed //// size directory entry objects. //// //typedef struct _IMAGE_RESOURCE_DIRECTORY_STRING { // WORD Length; // CHAR NameString[ 1 ]; //} IMAGE_RESOURCE_DIRECTORY_STRING, *PIMAGE_RESOURCE_DIRECTORY_STRING; //typedef struct _IMAGE_RESOURCE_DIR_STRING_U { // WORD Length; // WCHAR NameString[ 1 ]; //} IMAGE_RESOURCE_DIR_STRING_U, *PIMAGE_RESOURCE_DIR_STRING_U; //// //// Each resource data entry describes a leaf node in the resource directory //// tree. It contains an offset, relative to the beginning of the resource //// directory of the data for the resource, a size field that gives the number //// of bytes of data at that offset, a CodePage that should be used when //// decoding code point values within the resource data. Typically for new //// applications the code page would be the unicode code page. //// //typedef struct _IMAGE_RESOURCE_DATA_ENTRY { // DWORD OffsetToData; // DWORD Size; // DWORD CodePage; // DWORD Reserved; //} IMAGE_RESOURCE_DATA_ENTRY, *PIMAGE_RESOURCE_DATA_ENTRY; private class Directory { internal readonly string Name; internal readonly int ID; internal ushort NumberOfNamedEntries; internal ushort NumberOfIdEntries; internal readonly List<object> Entries; internal Directory(string name, int id) { this.Name = name; this.ID = id; this.Entries = new List<object>(); } } private static int CompareResources(IWin32Resource left, IWin32Resource right) { int result = CompareResourceIdentifiers(left.TypeId, left.TypeName, right.TypeId, right.TypeName); return (result == 0) ? CompareResourceIdentifiers(left.Id, left.Name, right.Id, right.Name) : result; } //when comparing a string vs ordinal, the string should always be less than the ordinal. Per the spec, //entries identified by string must precede those identified by ordinal. private static int CompareResourceIdentifiers(int xOrdinal, string xString, int yOrdinal, string yString) { if (xString == null) { if (yString == null) { return xOrdinal - yOrdinal; } else { return 1; } } else if (yString == null) { return -1; } else { return String.Compare(xString, yString, StringComparison.OrdinalIgnoreCase); } } //sort the resources by ID least to greatest then by NAME. //Where strings and ordinals are compared, strings are less than ordinals. internal static IEnumerable<IWin32Resource> SortResources(IEnumerable<IWin32Resource> resources) { return resources.OrderBy(CompareResources); } //Win32 resources are supplied to the compiler in one of two forms, .RES (the output of the resource compiler), //or .OBJ (the output of running cvtres.exe on a .RES file). A .RES file is parsed and processed into //a set of objects implementing IWin32Resources. These are then ordered and the final image form is constructed //and written to the resource section. Resources in .OBJ form are already very close to their final output //form. Rather than reading them and parsing them into a set of objects similar to those produced by //processing a .RES file, we process them like the native linker would, copy the relevant sections from //the .OBJ into our output and apply some fixups. private void SerializeWin32Resources(uint resourcesRva) { var resourceSection = _module.Win32ResourceSection; if (resourceSection != null) { SerializeWin32Resources(resourceSection, resourcesRva); return; } var theResources = _module.Win32Resources; if (IteratorHelper.EnumerableIsEmpty(theResources)) { return; } SerializeWin32Resources(theResources, resourcesRva); } private void SerializeWin32Resources(IEnumerable<IWin32Resource> theResources, uint resourcesRva) { theResources = SortResources(theResources); Directory typeDirectory = new Directory(string.Empty, 0); Directory nameDirectory = null; Directory languageDirectory = null; int lastTypeID = int.MinValue; string lastTypeName = null; int lastID = int.MinValue; string lastName = null; uint sizeOfDirectoryTree = 16; //EDMAURER note that this list is assumed to be sorted lowest to highest //first by typeId, then by Id. foreach (IWin32Resource r in theResources) { bool typeDifferent = (r.TypeId < 0 && r.TypeName != lastTypeName) || r.TypeId > lastTypeID; if (typeDifferent) { lastTypeID = r.TypeId; lastTypeName = r.TypeName; if (lastTypeID < 0) { Debug.Assert(typeDirectory.NumberOfIdEntries == 0, "Not all Win32 resources with types encoded as strings precede those encoded as ints"); typeDirectory.NumberOfNamedEntries++; } else { typeDirectory.NumberOfIdEntries++; } sizeOfDirectoryTree += 24; typeDirectory.Entries.Add(nameDirectory = new Directory(lastTypeName, lastTypeID)); } if (typeDifferent || (r.Id < 0 && r.Name != lastName) || r.Id > lastID) { lastID = r.Id; lastName = r.Name; if (lastID < 0) { Debug.Assert(nameDirectory.NumberOfIdEntries == 0, "Not all Win32 resources with names encoded as strings precede those encoded as ints"); nameDirectory.NumberOfNamedEntries++; } else { nameDirectory.NumberOfIdEntries++; } sizeOfDirectoryTree += 24; nameDirectory.Entries.Add(languageDirectory = new Directory(lastName, lastID)); } languageDirectory.NumberOfIdEntries++; sizeOfDirectoryTree += 8; languageDirectory.Entries.Add(r); } MemoryStream stream = MemoryStream.GetInstance(); BinaryWriter dataWriter = new BinaryWriter(stream, true); //'dataWriter' is where opaque resource data goes as well as strings that are used as type or name identifiers this.WriteDirectory(typeDirectory, _win32ResourceWriter, 0, 0, sizeOfDirectoryTree, resourcesRva, dataWriter); dataWriter.BaseStream.WriteTo(_win32ResourceWriter.BaseStream); _win32ResourceWriter.WriteByte(0); while ((_win32ResourceWriter.BaseStream.Length % 4) != 0) { _win32ResourceWriter.WriteByte(0); } stream.Free(); } private void WriteDirectory(Directory directory, BinaryWriter writer, uint offset, uint level, uint sizeOfDirectoryTree, uint virtualAddressBase, BinaryWriter dataWriter) { writer.WriteUint(0); // Characteristics writer.WriteUint(0); // Timestamp writer.WriteUint(0); // Version writer.WriteUshort(directory.NumberOfNamedEntries); writer.WriteUshort(directory.NumberOfIdEntries); uint n = (uint)directory.Entries.Count; uint k = offset + 16 + n * 8; for (int i = 0; i < n; i++) { int id; string name; uint nameOffset = dataWriter.BaseStream.Position + sizeOfDirectoryTree; uint directoryOffset = k; Directory subDir = directory.Entries[i] as Directory; if (subDir != null) { id = subDir.ID; name = subDir.Name; if (level == 0) { k += SizeOfDirectory(subDir); } else { k += 16 + 8 * (uint)subDir.Entries.Count; } } else { //EDMAURER write out an IMAGE_RESOURCE_DATA_ENTRY followed //immediately by the data that it refers to. This results //in a layout different than that produced by pulling the resources //from an OBJ. In that case all of the data bits of a resource are //contiguous in .rsrc$02. After processing these will end up at //the end of .rsrc following all of the directory //info and IMAGE_RESOURCE_DATA_ENTRYs IWin32Resource r = (IWin32Resource)directory.Entries[i]; id = level == 0 ? r.TypeId : level == 1 ? r.Id : (int)r.LanguageId; name = level == 0 ? r.TypeName : level == 1 ? r.Name : null; dataWriter.WriteUint(virtualAddressBase + sizeOfDirectoryTree + 16 + dataWriter.BaseStream.Position); byte[] data = new List<byte>(r.Data).ToArray(); dataWriter.WriteUint((uint)data.Length); dataWriter.WriteUint(r.CodePage); dataWriter.WriteUint(0); dataWriter.WriteBytes(data); while ((dataWriter.BaseStream.Length % 4) != 0) { dataWriter.WriteByte(0); } } if (id >= 0) { writer.WriteInt(id); } else { if (name == null) { name = string.Empty; } writer.WriteUint(nameOffset | 0x80000000); dataWriter.WriteUshort((ushort)name.Length); dataWriter.WriteChars(name.ToCharArray()); // REVIEW: what happens if the name contains chars that do not fit into a single utf8 code point? } if (subDir != null) { writer.WriteUint(directoryOffset | 0x80000000); } else { writer.WriteUint(nameOffset); } } k = offset + 16 + n * 8; for (int i = 0; i < n; i++) { Directory subDir = directory.Entries[i] as Directory; if (subDir != null) { this.WriteDirectory(subDir, writer, k, level + 1, sizeOfDirectoryTree, virtualAddressBase, dataWriter); if (level == 0) { k += SizeOfDirectory(subDir); } else { k += 16 + 8 * (uint)subDir.Entries.Count; } } } } private static uint SizeOfDirectory(Directory/*!*/ directory) { uint n = (uint)directory.Entries.Count; uint size = 16 + 8 * n; for (int i = 0; i < n; i++) { Directory subDir = directory.Entries[i] as Directory; if (subDir != null) { size += 16 + 8 * (uint)subDir.Entries.Count; } } return size; } private void SerializeWin32Resources(ResourceSection resourceSections, uint resourcesRva) { _win32ResourceWriter.WriteBytes(resourceSections.SectionBytes); var savedPosition = _win32ResourceWriter.BaseStream.Position; var readStream = new System.IO.MemoryStream(resourceSections.SectionBytes); var reader = new BinaryReader(readStream); foreach (int addressToFixup in resourceSections.Relocations) { _win32ResourceWriter.BaseStream.Position = (uint)addressToFixup; reader.BaseStream.Position = addressToFixup; _win32ResourceWriter.WriteUint(reader.ReadUInt32() + resourcesRva); } _win32ResourceWriter.BaseStream.Position = savedPosition; } //#define IMAGE_FILE_RELOCS_STRIPPED 0x0001 // Relocation info stripped from file. //#define IMAGE_FILE_EXECUTABLE_IMAGE 0x0002 // File is executable (i.e. no unresolved externel references). //#define IMAGE_FILE_LINE_NUMS_STRIPPED 0x0004 // Line nunbers stripped from file. //#define IMAGE_FILE_LOCAL_SYMS_STRIPPED 0x0008 // Local symbols stripped from file. //#define IMAGE_FILE_AGGRESIVE_WS_TRIM 0x0010 // Agressively trim working set //#define IMAGE_FILE_LARGE_ADDRESS_AWARE 0x0020 // App can handle >2gb addresses //#define IMAGE_FILE_BYTES_REVERSED_LO 0x0080 // Bytes of machine word are reversed. //#define IMAGE_FILE_32BIT_MACHINE 0x0100 // 32 bit word machine. //#define IMAGE_FILE_DEBUG_STRIPPED 0x0200 // Debugging info stripped from file in .DBG file //#define IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP 0x0400 // If Image is on removable media, copy and run from the swap file. //#define IMAGE_FILE_NET_RUN_FROM_SWAP 0x0800 // If Image is on Net, copy and run from the swap file. //#define IMAGE_FILE_SYSTEM 0x1000 // System File. //#define IMAGE_FILE_DLL 0x2000 // File is a DLL. //#define IMAGE_FILE_UP_SYSTEM_ONLY 0x4000 // File should only be run on a UP machine //#define IMAGE_FILE_BYTES_REVERSED_HI 0x8000 // Bytes of machine word are reversed. private static readonly byte[] s_dosHeader = new byte[] { 0x4d, 0x5a, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x0e, 0x1f, 0xba, 0x0e, 0x00, 0xb4, 0x09, 0xcd, 0x21, 0xb8, 0x01, 0x4c, 0xcd, 0x21, 0x54, 0x68, 0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x2e, 0x0d, 0x0d, 0x0a, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; private void WriteHeaders(Stream peStream, out long ntHeaderTimestampPosition) { IModule module = _module; NtHeader ntHeader = _ntHeader; BinaryWriter writer = new BinaryWriter(_headerStream); // MS-DOS stub (128 bytes) writer.WriteBytes(s_dosHeader); // TODO: provide an option to suppress the second half of the DOS header? // PE Signature (4 bytes) writer.WriteUint(0x00004550); /* "PE\0\0" */ // COFF Header 20 bytes writer.WriteUshort((ushort)module.Machine); writer.WriteUshort(ntHeader.NumberOfSections); ntHeaderTimestampPosition = writer.BaseStream.Position + peStream.Position; writer.WriteUint(ntHeader.TimeDateStamp); writer.WriteUint(ntHeader.PointerToSymbolTable); writer.WriteUint(0); // NumberOfSymbols writer.WriteUshort((ushort)(!module.Requires64bits ? 224 : 240)); // SizeOfOptionalHeader // ushort characteristics = 0x0002|0x0004|0x0008; // executable | no COFF line nums | no COFF symbols (as required by the standard) ushort characteristics = 0x0002; // executable (as required by the Linker team). if (module.Kind == ModuleKind.DynamicallyLinkedLibrary || module.Kind == ModuleKind.WindowsRuntimeMetadata) { characteristics |= 0x2000; } if (module.Requires32bits) { characteristics |= 0x0100; // 32 bit machine (The standard says to always set this, the linker team says otherwise) //The loader team says that this is not used for anything in the OS. } else { characteristics |= 0x0020; // large address aware (the standard says never to set this, the linker team says otherwise). //The loader team says that this is not overridden for managed binaries and will be respected if set. } writer.WriteUshort(characteristics); // PE Header (224 bytes if 32 bits, 240 bytes if 64 bit) if (!module.Requires64bits) { writer.WriteUshort(0x10B); // Magic = PE32 // 2 } else { writer.WriteUshort(0x20B); // Magic = PE32+ // 2 } writer.WriteByte(module.LinkerMajorVersion); // 3 writer.WriteByte(module.LinkerMinorVersion); // 4 writer.WriteUint(ntHeader.SizeOfCode); // 8 writer.WriteUint(ntHeader.SizeOfInitializedData); // 12 writer.WriteUint(ntHeader.SizeOfUninitializedData); // 16 writer.WriteUint(ntHeader.AddressOfEntryPoint); // 20 writer.WriteUint(ntHeader.BaseOfCode); // 24 if (!module.Requires64bits) { writer.WriteUint(ntHeader.BaseOfData); // 28 writer.WriteUint((uint)module.BaseAddress); // 32 } else { writer.WriteUlong(module.BaseAddress); // 32 } writer.WriteUint(0x2000); // SectionAlignment 36 writer.WriteUint(module.FileAlignment); // 40 writer.WriteUshort(4); // MajorOperatingSystemVersion 42 writer.WriteUshort(0); // MinorOperatingSystemVersion 44 writer.WriteUshort(0); // MajorImageVersion 46 writer.WriteUshort(0); // MinorImageVersion 48 writer.WriteUshort(module.MajorSubsystemVersion); // MajorSubsystemVersion 50 writer.WriteUshort(module.MinorSubsystemVersion); // MinorSubsystemVersion 52 writer.WriteUint(0); // Win32VersionValue 56 writer.WriteUint(ntHeader.SizeOfImage); // 60 writer.WriteUint(ntHeader.SizeOfHeaders); // 64 writer.WriteUint(0); // CheckSum 68 switch (module.Kind) { case ModuleKind.ConsoleApplication: case ModuleKind.DynamicallyLinkedLibrary: case ModuleKind.WindowsRuntimeMetadata: writer.WriteUshort(3); // 70 break; case ModuleKind.WindowsApplication: writer.WriteUshort(2); // 70 break; default: writer.WriteUshort(0); // break; } writer.WriteUshort(module.DllCharacteristics); if (!module.Requires64bits) { writer.WriteUint((uint)module.SizeOfStackReserve); // 76 writer.WriteUint((uint)module.SizeOfStackCommit); // 80 writer.WriteUint((uint)module.SizeOfHeapReserve); // 84 writer.WriteUint((uint)module.SizeOfHeapCommit); // 88 } else { writer.WriteUlong(module.SizeOfStackReserve); // 80 writer.WriteUlong(module.SizeOfStackCommit); // 88 writer.WriteUlong(module.SizeOfHeapReserve); // 96 writer.WriteUlong(module.SizeOfHeapCommit); // 104 } writer.WriteUint(0); // LoaderFlags 92|108 writer.WriteUint(16); // numberOfDataDirectories 96|112 writer.WriteUint(ntHeader.ExportTable.RelativeVirtualAddress); // 100|116 writer.WriteUint(ntHeader.ExportTable.Size); // 104|120 writer.WriteUint(ntHeader.ImportTable.RelativeVirtualAddress); // 108|124 writer.WriteUint(ntHeader.ImportTable.Size); // 112|128 writer.WriteUint(ntHeader.ResourceTable.RelativeVirtualAddress); // 116|132 writer.WriteUint(ntHeader.ResourceTable.Size); // 120|136 writer.WriteUint(ntHeader.ExceptionTable.RelativeVirtualAddress); // 124|140 writer.WriteUint(ntHeader.ExceptionTable.Size); // 128|144 writer.WriteUint(ntHeader.CertificateTable.RelativeVirtualAddress); // 132|148 writer.WriteUint(ntHeader.CertificateTable.Size); // 136|152 writer.WriteUint(ntHeader.BaseRelocationTable.RelativeVirtualAddress); // 140|156 writer.WriteUint(ntHeader.BaseRelocationTable.Size); // 144|160 writer.WriteUint(ntHeader.DebugTable.RelativeVirtualAddress); // 148|164 writer.WriteUint(ntHeader.DebugTable.Size); // 152|168 writer.WriteUint(ntHeader.CopyrightTable.RelativeVirtualAddress); // 156|172 writer.WriteUint(ntHeader.CopyrightTable.Size); // 160|176 writer.WriteUint(ntHeader.GlobalPointerTable.RelativeVirtualAddress); // 164|180 writer.WriteUint(ntHeader.GlobalPointerTable.Size); // 168|184 writer.WriteUint(ntHeader.ThreadLocalStorageTable.RelativeVirtualAddress); // 172|188 writer.WriteUint(ntHeader.ThreadLocalStorageTable.Size); // 176|192 writer.WriteUint(ntHeader.LoadConfigTable.RelativeVirtualAddress); // 180|196 writer.WriteUint(ntHeader.LoadConfigTable.Size); // 184|200 writer.WriteUint(ntHeader.BoundImportTable.RelativeVirtualAddress); // 188|204 writer.WriteUint(ntHeader.BoundImportTable.Size); // 192|208 writer.WriteUint(ntHeader.ImportAddressTable.RelativeVirtualAddress); // 196|212 writer.WriteUint(ntHeader.ImportAddressTable.Size); // 200|216 writer.WriteUint(ntHeader.DelayImportTable.RelativeVirtualAddress); // 204|220 writer.WriteUint(ntHeader.DelayImportTable.Size); // 208|224 writer.WriteUint(ntHeader.CliHeaderTable.RelativeVirtualAddress); // 212|228 writer.WriteUint(ntHeader.CliHeaderTable.Size); // 216|232 writer.WriteUlong(0); // 224|240 // Section Headers WriteSectionHeader(_textSection, writer); WriteSectionHeader(_rdataSection, writer); WriteSectionHeader(_sdataSection, writer); WriteSectionHeader(_coverSection, writer); WriteSectionHeader(_resourceSection, writer); WriteSectionHeader(_relocSection, writer); WriteSectionHeader(_tlsSection, writer); writer.BaseStream.WriteTo(peStream); _headerStream = _emptyStream; } private static void WriteSectionHeader(SectionHeader sectionHeader, BinaryWriter writer) { if (sectionHeader.VirtualSize == 0) { return; } for (int j = 0, m = sectionHeader.Name.Length; j < 8; j++) { if (j < m) { writer.WriteByte((byte)sectionHeader.Name[j]); } else { writer.WriteByte(0); } } writer.WriteUint(sectionHeader.VirtualSize); writer.WriteUint(sectionHeader.RelativeVirtualAddress); writer.WriteUint(sectionHeader.SizeOfRawData); writer.WriteUint(sectionHeader.PointerToRawData); writer.WriteUint(sectionHeader.PointerToRelocations); writer.WriteUint(sectionHeader.PointerToLinenumbers); writer.WriteUshort(sectionHeader.NumberOfRelocations); writer.WriteUshort(sectionHeader.NumberOfLinenumbers); writer.WriteUint(sectionHeader.Characteristics); } private void WriteTextSection( Stream peStream, CorHeader corHeader, MemoryStream metadataStream, MemoryStream ilStream, MemoryStream mappedFieldDataStream, MemoryStream managedResourceStream, MetadataSizes metadataSizes, ContentId pdbContentId, out long metadataPosition) { peStream.Position = _textSection.PointerToRawData; if (_emitRuntimeStartupStub) { this.WriteImportAddressTable(peStream); } WriteCorHeader(peStream, corHeader); WriteIL(peStream, ilStream); metadataPosition = peStream.Position; WriteMetadata(peStream, metadataStream); WriteManagedResources(peStream, managedResourceStream); WriteSpaceForHash(peStream, (int)corHeader.StrongNameSignature.Size); WriteDebugTable(peStream, pdbContentId, metadataSizes); if (_emitRuntimeStartupStub) { WriteImportTable(peStream); WriteNameTable(peStream); WriteRuntimeStartupStub(peStream); } WriteMappedFieldData(peStream, mappedFieldDataStream); } private void WriteImportAddressTable(Stream peStream) { BinaryWriter writer = new BinaryWriter(new MemoryStream(16)); bool use32bitAddresses = !_module.Requires64bits; uint importTableRVA = _ntHeader.ImportTable.RelativeVirtualAddress; uint ilRVA = importTableRVA + 40; uint hintRva = ilRVA + (use32bitAddresses ? 12u : 16u); // Import Address Table if (use32bitAddresses) { writer.WriteUint(hintRva); // 4 writer.WriteUint(0); // 8 } else { writer.WriteUlong(hintRva); // 8 writer.WriteUlong(0); // 16 } writer.BaseStream.WriteTo(peStream); } private void WriteImportTable(Stream peStream) { BinaryWriter writer = new BinaryWriter(new MemoryStream(70)); bool use32bitAddresses = !_module.Requires64bits; uint importTableRVA = _ntHeader.ImportTable.RelativeVirtualAddress; uint ilRVA = importTableRVA + 40; uint hintRva = ilRVA + (use32bitAddresses ? 12u : 16u); uint nameRva = hintRva + 12 + 2; // Import table writer.WriteUint(ilRVA); // 4 writer.WriteUint(0); // 8 writer.WriteUint(0); // 12 writer.WriteUint(nameRva); // 16 writer.WriteUint(_ntHeader.ImportAddressTable.RelativeVirtualAddress); // 20 writer.BaseStream.Position += 20; // 40 // Import Lookup table if (use32bitAddresses) { writer.WriteUint(hintRva); // 44 writer.WriteUint(0); // 48 writer.WriteUint(0); // 52 } else { writer.WriteUlong(hintRva); // 48 writer.WriteUlong(0); // 56 } // Hint table writer.WriteUshort(0); // Hint 54|58 string entryPointName = (_module.Kind == ModuleKind.DynamicallyLinkedLibrary || _module.Kind == ModuleKind.WindowsRuntimeMetadata) ? "_CorDllMain" : "_CorExeMain"; foreach (char ch in entryPointName) { writer.WriteByte((byte)ch); // 65|69 } writer.WriteByte(0); // 66|70 writer.BaseStream.WriteTo(peStream); } private static void WriteNameTable(Stream peStream) { BinaryWriter writer = new BinaryWriter(new MemoryStream(14)); foreach (char ch in "mscoree.dll") { writer.WriteByte((byte)ch); // 11 } writer.WriteByte(0); // 12 writer.WriteUshort(0); // 14 writer.BaseStream.WriteTo(peStream); } private static void WriteCorHeader(Stream peStream, CorHeader corHeader) { BinaryWriter writer = new BinaryWriter(new MemoryStream(72)); writer.WriteUint(72); // Number of bytes in this header 4 writer.WriteUshort(corHeader.MajorRuntimeVersion); // 6 writer.WriteUshort(corHeader.MinorRuntimeVersion); // 8 writer.WriteUint(corHeader.MetadataDirectory.RelativeVirtualAddress); // 12 writer.WriteUint(corHeader.MetadataDirectory.Size); // 16 writer.WriteUint((uint)corHeader.Flags); // 20 writer.WriteUint(corHeader.EntryPointToken); // 24 writer.WriteUint(corHeader.Resources.Size == 0 ? 0u : corHeader.Resources.RelativeVirtualAddress); // 28 writer.WriteUint(corHeader.Resources.Size); // 32 writer.WriteUint(corHeader.StrongNameSignature.Size == 0 ? 0u : corHeader.StrongNameSignature.RelativeVirtualAddress); // 36 writer.WriteUint(corHeader.StrongNameSignature.Size); // 40 writer.WriteUint(corHeader.CodeManagerTable.RelativeVirtualAddress); // 44 writer.WriteUint(corHeader.CodeManagerTable.Size); // 48 writer.WriteUint(corHeader.VTableFixups.RelativeVirtualAddress); // 52 writer.WriteUint(corHeader.VTableFixups.Size); // 56 writer.WriteUint(corHeader.ExportAddressTableJumps.RelativeVirtualAddress); // 60 writer.WriteUint(corHeader.ExportAddressTableJumps.Size); // 64 writer.WriteUlong(0); // 72 writer.BaseStream.WriteTo(peStream); } private static void WriteIL(Stream peStream, MemoryStream ilStream) { ilStream.WriteTo(peStream); while (peStream.Position % 4 != 0) { peStream.WriteByte(0); } } private static void WriteMappedFieldData(Stream peStream, MemoryStream dataStream) { dataStream.WriteTo(peStream); while (peStream.Position % 4 != 0) { peStream.WriteByte(0); } } private static void WriteSpaceForHash(Stream peStream, int strongNameSignatureSize) { while (strongNameSignatureSize > 0) { peStream.WriteByte(0); strongNameSignatureSize--; } } private static void WriteMetadata(Stream peStream, MemoryStream metadataStream) { metadataStream.WriteTo(peStream); while (peStream.Position % 4 != 0) { peStream.WriteByte(0); } } private static void WriteManagedResources(Stream peStream, MemoryStream managedResourceStream) { managedResourceStream.WriteTo(peStream); while (peStream.Position % 4 != 0) { peStream.WriteByte(0); } } private void WriteDebugTable(Stream peStream, ContentId pdbContentId, MetadataSizes metadataSizes) { if (!EmitPdb) { return; } MemoryStream stream = new MemoryStream(); BinaryWriter writer = new BinaryWriter(stream); // characteristics: writer.WriteUint(0); // PDB stamp writer.WriteBytes(pdbContentId.Stamp); // version writer.WriteUint(0); // type: const int ImageDebugTypeCodeView = 2; writer.WriteUint(ImageDebugTypeCodeView); // size of data: writer.WriteUint((uint)ComputeSizeOfDebugDirectoryData()); uint dataOffset = (uint)ComputeOffsetToDebugTable(metadataSizes) + ImageDebugDirectoryBaseSize; // PointerToRawData (RVA of the data): writer.WriteUint(_textSection.RelativeVirtualAddress + dataOffset); // AddressOfRawData (position of the data in the PE stream): writer.WriteUint(_textSection.PointerToRawData + dataOffset); writer.WriteByte((byte)'R'); writer.WriteByte((byte)'S'); writer.WriteByte((byte)'D'); writer.WriteByte((byte)'S'); // PDB id: writer.WriteBytes(pdbContentId.Guid); // age writer.WriteUint(PdbWriter.Age); // UTF-8 encoded zero-terminated path to PDB writer.WriteString(_pdbPathOpt, emitNullTerminator: true); writer.BaseStream.WriteTo(peStream); stream.Free(); } private void WriteRuntimeStartupStub(Stream peStream) { BinaryWriter writer = new BinaryWriter(new MemoryStream(16)); // entry point code, consisting of a jump indirect to _CorXXXMain if (!_module.Requires64bits) { //emit 0's (nops) to pad the entry point code so that the target address is aligned on a 4 byte boundary. for (uint i = 0, n = (uint)(BitArithmeticUtilities.Align((uint)peStream.Position, 4) - peStream.Position); i < n; i++) writer.WriteByte(0); writer.WriteUshort(0); writer.WriteByte(0xff); writer.WriteByte(0x25); //4 writer.WriteUint(_ntHeader.ImportAddressTable.RelativeVirtualAddress + (uint)_module.BaseAddress); //8 } else { //emit 0's (nops) to pad the entry point code so that the target address is aligned on a 8 byte boundary. for (uint i = 0, n = (uint)(BitArithmeticUtilities.Align((uint)peStream.Position, 8) - peStream.Position); i < n; i++) writer.WriteByte(0); writer.WriteUint(0); writer.WriteUshort(0); writer.WriteByte(0xff); writer.WriteByte(0x25); //8 writer.WriteUlong(_ntHeader.ImportAddressTable.RelativeVirtualAddress + _module.BaseAddress); //16 } writer.BaseStream.WriteTo(peStream); } private void WriteCoverSection(Stream peStream) { peStream.Position = _coverSection.PointerToRawData; _coverageDataWriter.BaseStream.WriteTo(peStream); } private void WriteRdataSection(Stream peStream) { peStream.Position = _rdataSection.PointerToRawData; _rdataWriter.BaseStream.WriteTo(peStream); } private void WriteSdataSection(Stream peStream) { peStream.Position = _sdataSection.PointerToRawData; _sdataWriter.BaseStream.WriteTo(peStream); } private void WriteRelocSection(Stream peStream) { if (!_emitRuntimeStartupStub) { //No need to write out a reloc section, but there is still a need to pad out the peStream so that it is an even multiple of module.FileAlignment if (_relocSection.PointerToRawData != peStream.Position) { //for example, the resource section did not end bang on the alignment boundary peStream.Position = _relocSection.PointerToRawData - 1; peStream.WriteByte(0); } return; } peStream.Position = _relocSection.PointerToRawData; BinaryWriter writer = new BinaryWriter(new MemoryStream(_module.FileAlignment)); writer.WriteUint(((_ntHeader.AddressOfEntryPoint + 2) / 0x1000) * 0x1000); writer.WriteUint(_module.Requires64bits && !_module.RequiresAmdInstructionSet ? 14u : 12u); uint offsetWithinPage = (_ntHeader.AddressOfEntryPoint + 2) % 0x1000; uint relocType = _module.Requires64bits ? 10u : 3u; ushort s = (ushort)((relocType << 12) | offsetWithinPage); writer.WriteUshort(s); if (_module.Requires64bits && !_module.RequiresAmdInstructionSet) { writer.WriteUint(relocType << 12); } writer.WriteUshort(0); // next chunk's RVA writer.BaseStream.Position = _module.FileAlignment; writer.BaseStream.WriteTo(peStream); } private void WriteResourceSection(Stream peStream) { if (_win32ResourceWriter.BaseStream.Length == 0) { return; } peStream.Position = _resourceSection.PointerToRawData; _win32ResourceWriter.BaseStream.WriteTo(peStream); peStream.WriteByte(0); while (peStream.Position % 8 != 0) { peStream.WriteByte(0); } } private void WriteTlsSection(Stream peStream) { peStream.Position = _tlsSection.PointerToRawData; _tlsDataWriter.BaseStream.WriteTo(peStream); } } }
furesoft/roslyn
src/Compilers/Core/Portable/PEWriter/PeWriter.cs
C#
apache-2.0
69,905
package gr.forth.ics.graph.event; public interface GraphListener extends NodeListener, EdgeListener { }
DimitrisAndreou/flexigraph
src/gr/forth/ics/graph/event/GraphListener.java
Java
apache-2.0
105
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import com.facebook.buck.rules.keys.DefaultRuleKeyCache; import com.facebook.buck.rules.keys.RuleKeyFactories; import com.facebook.buck.step.DefaultStepRunner; import com.facebook.buck.testutil.DummyFileHashCache; import com.facebook.buck.util.cache.FileHashCacheMode; import com.facebook.buck.util.concurrent.ListeningMultiSemaphore; import com.facebook.buck.util.concurrent.ResourceAllocationFairness; import com.facebook.buck.util.concurrent.ResourceAmounts; import com.facebook.buck.util.concurrent.WeightedListeningExecutorService; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import java.util.Optional; /** Handy way to create new {@link CachingBuildEngine} instances for test purposes. */ public class CachingBuildEngineFactory { private CachingBuildEngine.BuildMode buildMode = CachingBuildEngine.BuildMode.SHALLOW; private CachingBuildEngine.MetadataStorage metadataStorage = CachingBuildEngine.MetadataStorage.FILESYSTEM; private CachingBuildEngine.DepFiles depFiles = CachingBuildEngine.DepFiles.ENABLED; private long maxDepFileCacheEntries = 256L; private Optional<Long> artifactCacheSizeLimit = Optional.empty(); private long inputFileSizeLimit = Long.MAX_VALUE; private Optional<RuleKeyFactories> ruleKeyFactories = Optional.empty(); private CachingBuildEngineDelegate cachingBuildEngineDelegate; private WeightedListeningExecutorService executorService; private BuildRuleResolver buildRuleResolver; private ResourceAwareSchedulingInfo resourceAwareSchedulingInfo = ResourceAwareSchedulingInfo.NON_AWARE_SCHEDULING_INFO; private boolean logBuildRuleFailuresInline = true; private BuildInfoStoreManager buildInfoStoreManager; private FileHashCacheMode fileHashCacheMode = FileHashCacheMode.DEFAULT; public CachingBuildEngineFactory( BuildRuleResolver buildRuleResolver, BuildInfoStoreManager buildInfoStoreManager) { this.cachingBuildEngineDelegate = new LocalCachingBuildEngineDelegate(new DummyFileHashCache()); this.executorService = toWeighted(MoreExecutors.newDirectExecutorService()); this.buildRuleResolver = buildRuleResolver; this.buildInfoStoreManager = buildInfoStoreManager; } public CachingBuildEngineFactory setBuildMode(CachingBuildEngine.BuildMode buildMode) { this.buildMode = buildMode; return this; } public CachingBuildEngineFactory setFileHashCachMode(FileHashCacheMode fileHashCachMode) { this.fileHashCacheMode = fileHashCachMode; return this; } public CachingBuildEngineFactory setDepFiles(CachingBuildEngine.DepFiles depFiles) { this.depFiles = depFiles; return this; } public CachingBuildEngineFactory setMaxDepFileCacheEntries(long maxDepFileCacheEntries) { this.maxDepFileCacheEntries = maxDepFileCacheEntries; return this; } public CachingBuildEngineFactory setArtifactCacheSizeLimit( Optional<Long> artifactCacheSizeLimit) { this.artifactCacheSizeLimit = artifactCacheSizeLimit; return this; } public CachingBuildEngineFactory setCachingBuildEngineDelegate( CachingBuildEngineDelegate cachingBuildEngineDelegate) { this.cachingBuildEngineDelegate = cachingBuildEngineDelegate; return this; } public CachingBuildEngineFactory setExecutorService(ListeningExecutorService executorService) { this.executorService = toWeighted(executorService); return this; } public CachingBuildEngineFactory setExecutorService( WeightedListeningExecutorService executorService) { this.executorService = executorService; return this; } public CachingBuildEngineFactory setRuleKeyFactories(RuleKeyFactories ruleKeyFactories) { this.ruleKeyFactories = Optional.of(ruleKeyFactories); return this; } public CachingBuildEngineFactory setLogBuildRuleFailuresInline( boolean logBuildRuleFailuresInline) { this.logBuildRuleFailuresInline = logBuildRuleFailuresInline; return this; } public CachingBuildEngine build() { if (ruleKeyFactories.isPresent()) { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver); return new CachingBuildEngine( cachingBuildEngineDelegate, executorService, new DefaultStepRunner(), buildMode, metadataStorage, depFiles, maxDepFileCacheEntries, artifactCacheSizeLimit, buildRuleResolver, buildInfoStoreManager, ruleFinder, DefaultSourcePathResolver.from(ruleFinder), ruleKeyFactories.get(), resourceAwareSchedulingInfo, logBuildRuleFailuresInline, fileHashCacheMode); } return new CachingBuildEngine( cachingBuildEngineDelegate, executorService, new DefaultStepRunner(), buildMode, metadataStorage, depFiles, maxDepFileCacheEntries, artifactCacheSizeLimit, buildRuleResolver, buildInfoStoreManager, resourceAwareSchedulingInfo, logBuildRuleFailuresInline, RuleKeyFactories.of( 0, cachingBuildEngineDelegate.getFileHashCache(), buildRuleResolver, inputFileSizeLimit, new DefaultRuleKeyCache<>()), fileHashCacheMode); } private static WeightedListeningExecutorService toWeighted(ListeningExecutorService service) { return new WeightedListeningExecutorService( new ListeningMultiSemaphore( ResourceAmounts.of(Integer.MAX_VALUE, 0, 0, 0), ResourceAllocationFairness.FAIR), /* defaultPermits */ ResourceAmounts.of(1, 0, 0, 0), service); } }
k21/buck
test/com/facebook/buck/rules/CachingBuildEngineFactory.java
Java
apache-2.0
6,351
package org.finos.waltz.integration_test.inmem.helpers; import org.finos.waltz.model.EntityKind; import org.finos.waltz.model.EntityReference; import org.finos.waltz.model.Operation; import org.finos.waltz.model.involvement.EntityInvolvementChangeCommand; import org.finos.waltz.model.involvement.ImmutableEntityInvolvementChangeCommand; import org.finos.waltz.model.involvement_kind.ImmutableInvolvementKindCreateCommand; import org.finos.waltz.model.involvement_kind.InvolvementKindCreateCommand; import org.finos.waltz.service.involvement.InvolvementService; import org.finos.waltz.service.involvement_kind.InvolvementKindService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import static org.finos.waltz.model.EntityReference.mkRef; @Service public class InvolvementHelper { private final InvolvementService involvementService; private final InvolvementKindService involvementKindService; @Autowired public InvolvementHelper(InvolvementService involvementService, InvolvementKindService involvementKindService) { this.involvementService = involvementService; this.involvementKindService = involvementKindService; } public long mkInvolvementKind(String name) { InvolvementKindCreateCommand cmd = ImmutableInvolvementKindCreateCommand.builder() .description(name) .name(name) .externalId(name) .build(); return involvementKindService.create(cmd, NameHelper.mkUserId("involvementHelper")); } public void createInvolvement(Long pId, long invId, EntityReference entity) { EntityInvolvementChangeCommand cmd = ImmutableEntityInvolvementChangeCommand.builder() .involvementKindId((int) invId) .personEntityRef(mkRef(EntityKind.PERSON, pId)) .operation(Operation.ADD) .build(); involvementService.addEntityInvolvement(NameHelper.mkUserId(), entity, cmd); } }
khartec/waltz
waltz-integration-test/src/test/java/org/finos/waltz/integration_test/inmem/helpers/InvolvementHelper.java
Java
apache-2.0
2,071
/** * Copyright &copy; 2012-2016 <a href="https://github.com/tlkzzz/jeesite">JeeSite</a> All rights reserved. */ package com.tlkzzz.jeesite.modules.sys.service; import java.util.List; import com.tlkzzz.jeesite.common.service.CrudService; import com.tlkzzz.jeesite.common.utils.CacheUtils; import com.tlkzzz.jeesite.modules.sys.entity.Dict; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.tlkzzz.jeesite.modules.sys.dao.DictDao; import com.tlkzzz.jeesite.modules.sys.utils.DictUtils; /** * 字典Service * @author tlkzzz * @version 2014-05-16 */ @Service @Transactional(readOnly = true) public class DictService extends CrudService<DictDao, Dict> { /** * 查询字段类型列表 * @return */ public List<String> findTypeList(){ return dao.findTypeList(new Dict()); } @Transactional(readOnly = false) public void save(Dict dict) { super.save(dict); CacheUtils.remove(DictUtils.CACHE_DICT_MAP); } @Transactional(readOnly = false) public void delete(Dict dict) { super.delete(dict); CacheUtils.remove(DictUtils.CACHE_DICT_MAP); } }
tlkzzz/xpjfx
src/main/java/com/tlkzzz/jeesite/modules/sys/service/DictService.java
Java
apache-2.0
1,144
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.accesscontrol; import java.security.Principal; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.UnsupportedRepositoryOperationException; import javax.jcr.ValueFactory; import javax.jcr.security.AccessControlEntry; import javax.jcr.security.AccessControlList; import javax.jcr.security.AccessControlPolicy; import com.google.common.collect.ImmutableMap; import org.apache.jackrabbit.api.security.JackrabbitAccessControlManager; import org.apache.jackrabbit.api.security.principal.JackrabbitPrincipal; import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalImpl; import org.jetbrains.annotations.NotNull; import org.junit.Before; import org.junit.Test; import static org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants.REP_GLOB; import static org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants.REP_NODE_PATH; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; public class PrincipalACLTest extends AbstractAccessControlTest { private ACL principalAcl; @Override @Before public void before() throws Exception { super.before(); JackrabbitAccessControlManager acMgr = getAccessControlManager(root); AccessControlList policy = AccessControlUtils.getAccessControlList(acMgr, TEST_PATH); policy.addAccessControlEntry(testPrincipal, testPrivileges); policy.addAccessControlEntry(EveryonePrincipal.getInstance(), testPrivileges); acMgr.setPolicy(TEST_PATH, policy); root.commit(); principalAcl = getPrincipalAcl(acMgr, testPrincipal); } @NotNull private static ACL getPrincipalAcl(@NotNull JackrabbitAccessControlManager acMgr, @NotNull Principal testPrincipal) throws RepositoryException { for (AccessControlPolicy acp : acMgr.getPolicies(testPrincipal)) { if (acp instanceof ACL) { return (ACL) acp; } } throw new RuntimeException("no principal acl found"); } @Test(expected = UnsupportedRepositoryOperationException.class) public void testReorder() throws Exception { AccessControlEntry[] entries = principalAcl.getAccessControlEntries(); principalAcl.orderBefore(entries[0], null); } @Test public void testEquals() throws Exception { assertEquals(principalAcl, principalAcl); assertEquals(principalAcl, getPrincipalAcl(getAccessControlManager(root), testPrincipal)); } @Test public void testEqualsDifferentPrincipal() throws Exception { assertNotEquals(principalAcl, getPrincipalAcl(getAccessControlManager(root), EveryonePrincipal.getInstance())); } @Test public void testEqualsDifferentACL() throws Exception { assertNotEquals(principalAcl, AccessControlUtils.getAccessControlList(getAccessControlManager(root), TEST_PATH)); } @Test public void testEqualsDifferentPath() throws Exception { ACL acl = getPrincipalAcl(getAccessControlManager(root), new PrincipalImpl(testPrincipal.getName())); assertNotEquals(principalAcl, acl); } @Test public void testEqualsDifferentEntries() throws Exception { ValueFactory vf = getValueFactory(root); ACL acl = getPrincipalAcl(getAccessControlManager(root), testPrincipal); acl.addEntry(testPrincipal, privilegesFromNames(JCR_VERSION_MANAGEMENT), true, ImmutableMap.of(REP_GLOB, vf.createValue("/subtree/*"), REP_NODE_PATH, vf.createValue(TEST_PATH))); assertNotEquals(principalAcl, acl); } @Test public void testHashCode() { assertEquals(0, principalAcl.hashCode()); } }
trekawek/jackrabbit-oak
oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/accesscontrol/PrincipalACLTest.java
Java
apache-2.0
4,767
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.dataFlow.inliner; import com.intellij.codeInspection.dataFlow.CFGBuilder; import com.intellij.codeInspection.dataFlow.Nullness; import com.intellij.codeInspection.dataFlow.SpecialField; import com.intellij.codeInspection.dataFlow.value.DfaValueFactory; import com.intellij.codeInspection.dataFlow.value.DfaVariableValue; import com.intellij.psi.PsiExpression; import com.intellij.psi.PsiMethodCallExpression; import com.intellij.psi.PsiVariable; import com.siyeh.ig.callMatcher.CallMapper; import org.jetbrains.annotations.NotNull; import static com.intellij.codeInspection.dataFlow.SpecialField.COLLECTION_SIZE; import static com.intellij.codeInspection.dataFlow.SpecialField.MAP_SIZE; import static com.intellij.psi.CommonClassNames.JAVA_UTIL_COLLECTIONS; import static com.siyeh.ig.callMatcher.CallMatcher.staticCall; public class CollectionFactoryInliner implements CallInliner { static final class FactoryInfo { int mySize; SpecialField mySizeField; public FactoryInfo(int size, SpecialField sizeField) { mySize = size; mySizeField = sizeField; } } private static final CallMapper<FactoryInfo> STATIC_FACTORIES = new CallMapper<FactoryInfo>() .register(staticCall(JAVA_UTIL_COLLECTIONS, "emptyList", "emptySet").parameterCount(0), new FactoryInfo(0, COLLECTION_SIZE)) .register(staticCall(JAVA_UTIL_COLLECTIONS, "singletonList", "singleton").parameterCount(1), new FactoryInfo(1, COLLECTION_SIZE)) .register(staticCall(JAVA_UTIL_COLLECTIONS, "emptyMap").parameterCount(0), new FactoryInfo(0, MAP_SIZE)) .register(staticCall(JAVA_UTIL_COLLECTIONS, "singletonMap").parameterCount(2), new FactoryInfo(1, MAP_SIZE)); @Override public boolean tryInlineCall(@NotNull CFGBuilder builder, @NotNull PsiMethodCallExpression call) { FactoryInfo factoryInfo = STATIC_FACTORIES.mapFirst(call); if (factoryInfo == null) return false; PsiExpression[] args = call.getArgumentList().getExpressions(); for (PsiExpression arg : args) { builder.pushExpression(arg).pop(); } PsiVariable variable = builder.createTempVariable(call.getType()); DfaValueFactory factory = builder.getFactory(); DfaVariableValue variableValue = factory.getVarFactory().createVariableValue(variable, false); builder.pushVariable(variable) // tmpVar = <Value of collection type> .push(factory.createTypeValue(call.getType(), Nullness.NOT_NULL)) .assign() // leave tmpVar on stack: it's result of method call .push(factoryInfo.mySizeField.createValue(factory, variableValue)) // tmpVar.size = <size> .push(factory.getInt(factoryInfo.mySize)) .assign() .pop(); return true; } }
apixandru/intellij-community
java/java-analysis-impl/src/com/intellij/codeInspection/dataFlow/inliner/CollectionFactoryInliner.java
Java
apache-2.0
3,317
/* * Copyright (C) 2014 The Android Open Source Project * Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.io; import com.google.j2objc.WeakProxy; import java.util.Formatter; import java.util.Locale; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.UnsupportedCharsetException; /** * A <code>PrintStream</code> adds functionality to another output stream, * namely the ability to print representations of various data values * conveniently. Two other features are provided as well. Unlike other output * streams, a <code>PrintStream</code> never throws an * <code>IOException</code>; instead, exceptional situations merely set an * internal flag that can be tested via the <code>checkError</code> method. * Optionally, a <code>PrintStream</code> can be created so as to flush * automatically; this means that the <code>flush</code> method is * automatically invoked after a byte array is written, one of the * <code>println</code> methods is invoked, or a newline character or byte * (<code>'\n'</code>) is written. * * <p> All characters printed by a <code>PrintStream</code> are converted into * bytes using the platform's default character encoding. The <code>{@link * PrintWriter}</code> class should be used in situations that require writing * characters rather than bytes. * * @author Frank Yellin * @author Mark Reinhold * @since JDK1.0 */ public class PrintStream extends FilterOutputStream implements Appendable, Closeable { private final boolean autoFlush; private boolean trouble = false; private Formatter formatter; /** * Track both the text- and character-output streams, so that their buffers * can be flushed without flushing the entire stream. */ private BufferedWriter textOut; private OutputStreamWriter charOut; private Charset charset; /** * requireNonNull is explicitly declared here so as not to create an extra * dependency on java.util.Objects.requireNonNull. PrintStream is loaded * early during system initialization. */ private static <T> T requireNonNull(T obj, String message) { if (obj == null) throw new NullPointerException(message); return obj; } /** * Returns a charset object for the given charset name. * @throws NullPointerException is csn is null * @throws UnsupportedEncodingException if the charset is not supported */ private static Charset toCharset(String csn) throws UnsupportedEncodingException { requireNonNull(csn, "charsetName"); try { return Charset.forName(csn); } catch (IllegalCharsetNameException|UnsupportedCharsetException unused) { // UnsupportedEncodingException should be thrown throw new UnsupportedEncodingException(csn); } } /* Private constructors */ private PrintStream(boolean autoFlush, OutputStream out) { super(out); this.autoFlush = autoFlush; } private PrintStream(boolean autoFlush, OutputStream out, Charset charset) { super(out); this.autoFlush = autoFlush; } /* Variant of the private constructor so that the given charset name * can be verified before evaluating the OutputStream argument. Used * by constructors creating a FileOutputStream that also take a * charset name. */ private PrintStream(boolean autoFlush, Charset charset, OutputStream out) throws UnsupportedEncodingException { this(autoFlush, out, charset); } /** * Creates a new print stream. This stream will not flush automatically. * * @param out The output stream to which values and objects will be * printed * * @see java.io.PrintWriter#PrintWriter(java.io.OutputStream) */ public PrintStream(OutputStream out) { this(out, false); } /** * Creates a new print stream. * * @param out The output stream to which values and objects will be * printed * @param autoFlush A boolean; if true, the output buffer will be flushed * whenever a byte array is written, one of the * <code>println</code> methods is invoked, or a newline * character or byte (<code>'\n'</code>) is written * * @see java.io.PrintWriter#PrintWriter(java.io.OutputStream, boolean) */ public PrintStream(OutputStream out, boolean autoFlush) { this(autoFlush, requireNonNull(out, "Null output stream")); } /** * Creates a new print stream. * * @param out The output stream to which values and objects will be * printed * @param autoFlush A boolean; if true, the output buffer will be flushed * whenever a byte array is written, one of the * <code>println</code> methods is invoked, or a newline * character or byte (<code>'\n'</code>) is written * @param encoding The name of a supported * <a href="../lang/package-summary.html#charenc"> * character encoding</a> * * @throws UnsupportedEncodingException * If the named encoding is not supported * * @since 1.4 */ public PrintStream(OutputStream out, boolean autoFlush, String encoding) throws UnsupportedEncodingException { this(autoFlush, requireNonNull(out, "Null output stream"), toCharset(encoding)); } /** * Creates a new print stream, without automatic line flushing, with the * specified file name. This convenience constructor creates * the necessary intermediate {@link java.io.OutputStreamWriter * OutputStreamWriter}, which will encode characters using the * {@linkplain java.nio.charset.Charset#defaultCharset() default charset} * for this instance of the Java virtual machine. * * @param fileName * The name of the file to use as the destination of this print * stream. If the file exists, then it will be truncated to * zero size; otherwise, a new file will be created. The output * will be written to the file and is buffered. * * @throws FileNotFoundException * If the given file object does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(fileName)} denies write * access to the file * * @since 1.5 */ public PrintStream(String fileName) throws FileNotFoundException { this(false, new FileOutputStream(fileName)); } /** * Creates a new print stream, without automatic line flushing, with the * specified file name and charset. This convenience constructor creates * the necessary intermediate {@link java.io.OutputStreamWriter * OutputStreamWriter}, which will encode characters using the provided * charset. * * @param fileName * The name of the file to use as the destination of this print * stream. If the file exists, then it will be truncated to * zero size; otherwise, a new file will be created. The output * will be written to the file and is buffered. * * @param csn * The name of a supported {@linkplain java.nio.charset.Charset * charset} * * @throws FileNotFoundException * If the given file object does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(fileName)} denies write * access to the file * * @throws UnsupportedEncodingException * If the named charset is not supported * * @since 1.5 */ public PrintStream(String fileName, String csn) throws FileNotFoundException, UnsupportedEncodingException { // ensure charset is checked before the file is opened this(false, toCharset(csn), new FileOutputStream(fileName)); } /** * Creates a new print stream, without automatic line flushing, with the * specified file. This convenience constructor creates the necessary * intermediate {@link java.io.OutputStreamWriter OutputStreamWriter}, * which will encode characters using the {@linkplain * java.nio.charset.Charset#defaultCharset() default charset} for this * instance of the Java virtual machine. * * @param file * The file to use as the destination of this print stream. If the * file exists, then it will be truncated to zero size; otherwise, * a new file will be created. The output will be written to the * file and is buffered. * * @throws FileNotFoundException * If the given file object does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(file.getPath())} * denies write access to the file * * @since 1.5 */ public PrintStream(File file) throws FileNotFoundException { this(false, new FileOutputStream(file)); } /** * Creates a new print stream, without automatic line flushing, with the * specified file and charset. This convenience constructor creates * the necessary intermediate {@link java.io.OutputStreamWriter * OutputStreamWriter}, which will encode characters using the provided * charset. * * @param file * The file to use as the destination of this print stream. If the * file exists, then it will be truncated to zero size; otherwise, * a new file will be created. The output will be written to the * file and is buffered. * * @param csn * The name of a supported {@linkplain java.nio.charset.Charset * charset} * * @throws FileNotFoundException * If the given file object does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(file.getPath())} * denies write access to the file * * @throws UnsupportedEncodingException * If the named charset is not supported * * @since 1.5 */ public PrintStream(File file, String csn) throws FileNotFoundException, UnsupportedEncodingException { // ensure charset is checked before the file is opened this(false, toCharset(csn), new FileOutputStream(file)); } /** Check to make sure that the stream has not been closed */ private void ensureOpen() throws IOException { if (out == null) throw new IOException("Stream closed"); } /** * Flushes the stream. This is done by writing any buffered output bytes to * the underlying output stream and then flushing that stream. * * @see java.io.OutputStream#flush() */ public void flush() { synchronized (this) { try { ensureOpen(); out.flush(); } catch (IOException x) { trouble = true; } } } private boolean closing = false; /* To avoid recursive closing */ // Android-changed: Lazily initialize textOut. private BufferedWriter getTextOut() { if (textOut == null) { PrintStream proxy = WeakProxy.forObject(this); charOut = charset != null ? new OutputStreamWriter(proxy, charset) : new OutputStreamWriter(proxy); textOut = new BufferedWriter(charOut); } return textOut; } /** * Closes the stream. This is done by flushing the stream and then closing * the underlying output stream. * * @see java.io.OutputStream#close() */ public void close() { synchronized (this) { if (! closing) { closing = true; try { // Android-changed: Lazily initialized. if (textOut != null) { textOut.close(); } out.close(); } catch (IOException x) { trouble = true; } textOut = null; charOut = null; out = null; } } } /** * Flushes the stream and checks its error state. The internal error state * is set to <code>true</code> when the underlying output stream throws an * <code>IOException</code> other than <code>InterruptedIOException</code>, * and when the <code>setError</code> method is invoked. If an operation * on the underlying output stream throws an * <code>InterruptedIOException</code>, then the <code>PrintStream</code> * converts the exception back into an interrupt by doing: * <pre> * Thread.currentThread().interrupt(); * </pre> * or the equivalent. * * @return <code>true</code> if and only if this stream has encountered an * <code>IOException</code> other than * <code>InterruptedIOException</code>, or the * <code>setError</code> method has been invoked */ public boolean checkError() { if (out != null) flush(); if (out instanceof java.io.PrintStream) { PrintStream ps = (PrintStream) out; return ps.checkError(); } return trouble; } /** * Sets the error state of the stream to <code>true</code>. * * <p> This method will cause subsequent invocations of {@link * #checkError()} to return <tt>true</tt> until {@link * #clearError()} is invoked. * * @since JDK1.1 */ protected void setError() { trouble = true; } /** * Clears the internal error state of this stream. * * <p> This method will cause subsequent invocations of {@link * #checkError()} to return <tt>false</tt> until another write * operation fails and invokes {@link #setError()}. * * @since 1.6 */ protected void clearError() { trouble = false; } /* * Exception-catching, synchronized output operations, * which also implement the write() methods of OutputStream */ /** * Writes the specified byte to this stream. If the byte is a newline and * automatic flushing is enabled then the <code>flush</code> method will be * invoked. * * <p> Note that the byte is written as given; to write a character that * will be translated according to the platform's default character * encoding, use the <code>print(char)</code> or <code>println(char)</code> * methods. * * @param b The byte to be written * @see #print(char) * @see #println(char) */ public void write(int b) { try { synchronized (this) { ensureOpen(); out.write(b); if ((b == '\n') && autoFlush) out.flush(); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } /** * Writes <code>len</code> bytes from the specified byte array starting at * offset <code>off</code> to this stream. If automatic flushing is * enabled then the <code>flush</code> method will be invoked. * * <p> Note that the bytes will be written as given; to write characters * that will be translated according to the platform's default character * encoding, use the <code>print(char)</code> or <code>println(char)</code> * methods. * * @param buf A byte array * @param off Offset from which to start taking bytes * @param len Number of bytes to write */ public void write(byte buf[], int off, int len) { try { synchronized (this) { ensureOpen(); out.write(buf, off, len); if (autoFlush) out.flush(); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } /* * The following private methods on the text- and character-output streams * always flush the stream buffers, so that writes to the underlying byte * stream occur as promptly as with the original PrintStream. */ private void write(char buf[]) { try { synchronized (this) { ensureOpen(); // Android-changed: Lazily initialized. BufferedWriter textOut = getTextOut(); textOut.write(buf); textOut.flushBuffer(); charOut.flushBuffer(); if (autoFlush) { for (int i = 0; i < buf.length; i++) if (buf[i] == '\n') out.flush(); } } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } private void write(String s) { try { synchronized (this) { ensureOpen(); // Android-changed: Lazily initialized. BufferedWriter textOut = getTextOut(); textOut.write(s); textOut.flushBuffer(); charOut.flushBuffer(); if (autoFlush && (s.indexOf('\n') >= 0)) out.flush(); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } private void newLine() { try { synchronized (this) { ensureOpen(); // Android-changed: Lazily initialized. BufferedWriter textOut = getTextOut(); textOut.newLine(); textOut.flushBuffer(); charOut.flushBuffer(); if (autoFlush) out.flush(); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } /* Methods that do not terminate lines */ /** * Prints a boolean value. The string produced by <code>{@link * java.lang.String#valueOf(boolean)}</code> is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param b The <code>boolean</code> to be printed */ public void print(boolean b) { write(b ? "true" : "false"); } /** * Prints a character. The character is translated into one or more bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param c The <code>char</code> to be printed */ public void print(char c) { write(String.valueOf(c)); } /** * Prints an integer. The string produced by <code>{@link * java.lang.String#valueOf(int)}</code> is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param i The <code>int</code> to be printed * @see java.lang.Integer#toString(int) */ public void print(int i) { write(String.valueOf(i)); } /** * Prints a long integer. The string produced by <code>{@link * java.lang.String#valueOf(long)}</code> is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param l The <code>long</code> to be printed * @see java.lang.Long#toString(long) */ public void print(long l) { write(String.valueOf(l)); } /** * Prints a floating-point number. The string produced by <code>{@link * java.lang.String#valueOf(float)}</code> is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param f The <code>float</code> to be printed * @see java.lang.Float#toString(float) */ public void print(float f) { write(String.valueOf(f)); } /** * Prints a double-precision floating-point number. The string produced by * <code>{@link java.lang.String#valueOf(double)}</code> is translated into * bytes according to the platform's default character encoding, and these * bytes are written in exactly the manner of the <code>{@link * #write(int)}</code> method. * * @param d The <code>double</code> to be printed * @see java.lang.Double#toString(double) */ public void print(double d) { write(String.valueOf(d)); } /** * Prints an array of characters. The characters are converted into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param s The array of chars to be printed * * @throws NullPointerException If <code>s</code> is <code>null</code> */ public void print(char s[]) { write(s); } /** * Prints a string. If the argument is <code>null</code> then the string * <code>"null"</code> is printed. Otherwise, the string's characters are * converted into bytes according to the platform's default character * encoding, and these bytes are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param s The <code>String</code> to be printed */ public void print(String s) { if (s == null) { s = "null"; } write(s); } /** * Prints an object. The string produced by the <code>{@link * java.lang.String#valueOf(Object)}</code> method is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param obj The <code>Object</code> to be printed * @see java.lang.Object#toString() */ public void print(Object obj) { write(String.valueOf(obj)); } /* Methods that do terminate lines */ /** * Terminates the current line by writing the line separator string. The * line separator string is defined by the system property * <code>line.separator</code>, and is not necessarily a single newline * character (<code>'\n'</code>). */ public void println() { newLine(); } /** * Prints a boolean and then terminate the line. This method behaves as * though it invokes <code>{@link #print(boolean)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>boolean</code> to be printed */ public void println(boolean x) { synchronized (this) { print(x); newLine(); } } /** * Prints a character and then terminate the line. This method behaves as * though it invokes <code>{@link #print(char)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>char</code> to be printed. */ public void println(char x) { synchronized (this) { print(x); newLine(); } } /** * Prints an integer and then terminate the line. This method behaves as * though it invokes <code>{@link #print(int)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>int</code> to be printed. */ public void println(int x) { synchronized (this) { print(x); newLine(); } } /** * Prints a long and then terminate the line. This method behaves as * though it invokes <code>{@link #print(long)}</code> and then * <code>{@link #println()}</code>. * * @param x a The <code>long</code> to be printed. */ public void println(long x) { synchronized (this) { print(x); newLine(); } } /** * Prints a float and then terminate the line. This method behaves as * though it invokes <code>{@link #print(float)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>float</code> to be printed. */ public void println(float x) { synchronized (this) { print(x); newLine(); } } /** * Prints a double and then terminate the line. This method behaves as * though it invokes <code>{@link #print(double)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>double</code> to be printed. */ public void println(double x) { synchronized (this) { print(x); newLine(); } } /** * Prints an array of characters and then terminate the line. This method * behaves as though it invokes <code>{@link #print(char[])}</code> and * then <code>{@link #println()}</code>. * * @param x an array of chars to print. */ public void println(char x[]) { synchronized (this) { print(x); newLine(); } } /** * Prints a String and then terminate the line. This method behaves as * though it invokes <code>{@link #print(String)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>String</code> to be printed. */ public void println(String x) { synchronized (this) { print(x); newLine(); } } /** * Prints an Object and then terminate the line. This method calls * at first String.valueOf(x) to get the printed object's string value, * then behaves as * though it invokes <code>{@link #print(String)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>Object</code> to be printed. */ public void println(Object x) { String s = String.valueOf(x); synchronized (this) { print(s); newLine(); } } /** * A convenience method to write a formatted string to this output stream * using the specified format string and arguments. * * <p> An invocation of this method of the form <tt>out.printf(format, * args)</tt> behaves in exactly the same way as the invocation * * <pre> * out.format(format, args) </pre> * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a> * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This output stream * * @since 1.5 */ public PrintStream printf(String format, Object ... args) { return format(format, args); } /** * A convenience method to write a formatted string to this output stream * using the specified format string and arguments. * * <p> An invocation of this method of the form <tt>out.printf(l, format, * args)</tt> behaves in exactly the same way as the invocation * * <pre> * out.format(l, format, args) </pre> * * @param l * The {@linkplain java.util.Locale locale} to apply during * formatting. If <tt>l</tt> is <tt>null</tt> then no localization * is applied. * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a> * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This output stream * * @since 1.5 */ public PrintStream printf(Locale l, String format, Object ... args) { return format(l, format, args); } /** * Writes a formatted string to this output stream using the specified * format string and arguments. * * <p> The locale always used is the one returned by {@link * java.util.Locale#getDefault() Locale.getDefault()}, regardless of any * previous invocations of other formatting methods on this object. * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a> * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This output stream * * @since 1.5 */ public PrintStream format(String format, Object ... args) { try { synchronized (this) { ensureOpen(); if ((formatter == null) || (formatter.locale() != Locale.getDefault())) formatter = new Formatter((Appendable) WeakProxy.forObject(this)); formatter.format(Locale.getDefault(), format, args); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } return this; } /** * Writes a formatted string to this output stream using the specified * format string and arguments. * * @param l * The {@linkplain java.util.Locale locale} to apply during * formatting. If <tt>l</tt> is <tt>null</tt> then no localization * is applied. * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a> * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This output stream * * @since 1.5 */ public PrintStream format(Locale l, String format, Object ... args) { try { synchronized (this) { ensureOpen(); if ((formatter == null) || (formatter.locale() != l)) formatter = new Formatter(WeakProxy.forObject(this), l); formatter.format(l, format, args); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } return this; } /** * Appends the specified character sequence to this output stream. * * <p> An invocation of this method of the form <tt>out.append(csq)</tt> * behaves in exactly the same way as the invocation * * <pre> * out.print(csq.toString()) </pre> * * <p> Depending on the specification of <tt>toString</tt> for the * character sequence <tt>csq</tt>, the entire sequence may not be * appended. For instance, invoking then <tt>toString</tt> method of a * character buffer will return a subsequence whose content depends upon * the buffer's position and limit. * * @param csq * The character sequence to append. If <tt>csq</tt> is * <tt>null</tt>, then the four characters <tt>"null"</tt> are * appended to this output stream. * * @return This output stream * * @since 1.5 */ public PrintStream append(CharSequence csq) { if (csq == null) print("null"); else print(csq.toString()); return this; } /** * Appends a subsequence of the specified character sequence to this output * stream. * * <p> An invocation of this method of the form <tt>out.append(csq, start, * end)</tt> when <tt>csq</tt> is not <tt>null</tt>, behaves in * exactly the same way as the invocation * * <pre> * out.print(csq.subSequence(start, end).toString()) </pre> * * @param csq * The character sequence from which a subsequence will be * appended. If <tt>csq</tt> is <tt>null</tt>, then characters * will be appended as if <tt>csq</tt> contained the four * characters <tt>"null"</tt>. * * @param start * The index of the first character in the subsequence * * @param end * The index of the character following the last character in the * subsequence * * @return This output stream * * @throws IndexOutOfBoundsException * If <tt>start</tt> or <tt>end</tt> are negative, <tt>start</tt> * is greater than <tt>end</tt>, or <tt>end</tt> is greater than * <tt>csq.length()</tt> * * @since 1.5 */ public PrintStream append(CharSequence csq, int start, int end) { CharSequence cs = (csq == null ? "null" : csq); write(cs.subSequence(start, end).toString()); return this; } /** * Appends the specified character to this output stream. * * <p> An invocation of this method of the form <tt>out.append(c)</tt> * behaves in exactly the same way as the invocation * * <pre> * out.print(c) </pre> * * @param c * The 16-bit character to append * * @return This output stream * * @since 1.5 */ public PrintStream append(char c) { print(c); return this; } }
life-beam/j2objc
jre_emul/android/platform/libcore/ojluni/src/main/java/java/io/PrintStream.java
Java
apache-2.0
41,821
/* Copyright 2015 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package e2e import ( "bytes" "fmt" "math" "sort" "strconv" "strings" "sync" "text/tabwriter" "time" . "github.com/onsi/gomega" "k8s.io/kubernetes/pkg/api" client "k8s.io/kubernetes/pkg/client/unversioned" utilruntime "k8s.io/kubernetes/pkg/util/runtime" ) const ( resourceDataGatheringPeriodSeconds = 60 ) type resourceConstraint struct { cpuConstraint float64 memoryConstraint uint64 } type containerResourceGatherer struct { usageTimeseries map[time.Time]resourceUsagePerContainer stopCh chan struct{} wg sync.WaitGroup } type SingleContainerSummary struct { Name string Cpu float64 Mem uint64 } // we can't have int here, as JSON does not accept integer keys. type ResourceUsageSummary map[string][]SingleContainerSummary func (s *ResourceUsageSummary) PrintHumanReadable() string { buf := &bytes.Buffer{} w := tabwriter.NewWriter(buf, 1, 0, 1, ' ', 0) for perc, summaries := range *s { buf.WriteString(fmt.Sprintf("%v percentile:\n", perc)) fmt.Fprintf(w, "container\tcpu(cores)\tmemory(MB)\n") for _, summary := range summaries { fmt.Fprintf(w, "%q\t%.3f\t%.2f\n", summary.Name, summary.Cpu, float64(summary.Mem)/(1024*1024)) } w.Flush() } return buf.String() } func (s *ResourceUsageSummary) PrintJSON() string { return prettyPrintJSON(*s) } func (g *containerResourceGatherer) startGatheringData(c *client.Client, period time.Duration) { g.usageTimeseries = make(map[time.Time]resourceUsagePerContainer) g.wg.Add(1) g.stopCh = make(chan struct{}) go func() error { defer utilruntime.HandleCrash() defer g.wg.Done() for { select { case <-time.After(period): now := time.Now() data, err := g.getKubeSystemContainersResourceUsage(c) if err != nil { Logf("Error while getting resource usage: %v", err) continue } g.usageTimeseries[now] = data case <-g.stopCh: Logf("Stop channel is closed. Stopping gatherer.") return nil } } }() } func (g *containerResourceGatherer) stopAndSummarize(percentiles []int, constraints map[string]resourceConstraint) *ResourceUsageSummary { close(g.stopCh) Logf("Closed stop channel.") g.wg.Wait() Logf("Waitgroup finished.") if len(percentiles) == 0 { Logf("Warning! Empty percentile list for stopAndPrintData.") return &ResourceUsageSummary{} } stats := g.computePercentiles(g.usageTimeseries, percentiles) sortedKeys := []string{} for name := range stats[percentiles[0]] { sortedKeys = append(sortedKeys, name) } sort.Strings(sortedKeys) violatedConstraints := make([]string, 0) summary := make(ResourceUsageSummary) for _, perc := range percentiles { for _, name := range sortedKeys { usage := stats[perc][name] summary[strconv.Itoa(perc)] = append(summary[strconv.Itoa(perc)], SingleContainerSummary{ Name: name, Cpu: usage.CPUUsageInCores, Mem: usage.MemoryWorkingSetInBytes, }) // Verifying 99th percentile of resource usage if perc == 99 { // Name has a form: <pod_name>/<container_name> containerName := strings.Split(name, "/")[1] if constraint, ok := constraints[containerName]; ok { if usage.CPUUsageInCores > constraint.cpuConstraint { violatedConstraints = append( violatedConstraints, fmt.Sprintf("Container %v is using %v/%v CPU", name, usage.CPUUsageInCores, constraint.cpuConstraint, ), ) } if usage.MemoryWorkingSetInBytes > constraint.memoryConstraint { violatedConstraints = append( violatedConstraints, fmt.Sprintf("Container %v is using %v/%v MB of memory", name, float64(usage.MemoryWorkingSetInBytes)/(1024*1024), float64(constraint.memoryConstraint)/(1024*1024), ), ) } } } } } Expect(violatedConstraints).To(BeEmpty()) return &summary } func (g *containerResourceGatherer) computePercentiles(timeSeries map[time.Time]resourceUsagePerContainer, percentilesToCompute []int) map[int]resourceUsagePerContainer { if len(timeSeries) == 0 { return make(map[int]resourceUsagePerContainer) } dataMap := make(map[string]*usageDataPerContainer) for _, singleStatistic := range timeSeries { for name, data := range singleStatistic { if dataMap[name] == nil { dataMap[name] = &usageDataPerContainer{ cpuData: make([]float64, len(timeSeries)), memUseData: make([]uint64, len(timeSeries)), memWorkSetData: make([]uint64, len(timeSeries)), } } dataMap[name].cpuData = append(dataMap[name].cpuData, data.CPUUsageInCores) dataMap[name].memUseData = append(dataMap[name].memUseData, data.MemoryUsageInBytes) dataMap[name].memWorkSetData = append(dataMap[name].memWorkSetData, data.MemoryWorkingSetInBytes) } } for _, v := range dataMap { sort.Float64s(v.cpuData) sort.Sort(uint64arr(v.memUseData)) sort.Sort(uint64arr(v.memWorkSetData)) } result := make(map[int]resourceUsagePerContainer) for _, perc := range percentilesToCompute { data := make(resourceUsagePerContainer) for k, v := range dataMap { percentileIndex := int(math.Ceil(float64(len(v.cpuData)*perc)/100)) - 1 data[k] = &containerResourceUsage{ Name: k, CPUUsageInCores: v.cpuData[percentileIndex], MemoryUsageInBytes: v.memUseData[percentileIndex], MemoryWorkingSetInBytes: v.memWorkSetData[percentileIndex], } } result[perc] = data } return result } func (g *containerResourceGatherer) getKubeSystemContainersResourceUsage(c *client.Client) (resourceUsagePerContainer, error) { pods, err := c.Pods("kube-system").List(api.ListOptions{}) if err != nil { return resourceUsagePerContainer{}, err } nodes, err := c.Nodes().List(api.ListOptions{}) if err != nil { return resourceUsagePerContainer{}, err } containerIDToNameMap := make(map[string]string) containerIDs := make([]string, 0) for _, pod := range pods.Items { for _, container := range pod.Status.ContainerStatuses { containerID := strings.TrimPrefix(container.ContainerID, "docker:/") containerIDToNameMap[containerID] = pod.Name + "/" + container.Name containerIDs = append(containerIDs, containerID) } } mutex := sync.Mutex{} wg := sync.WaitGroup{} wg.Add(len(nodes.Items)) errors := make([]error, 0) nameToUsageMap := make(resourceUsagePerContainer, len(containerIDToNameMap)) for _, node := range nodes.Items { go func(nodeName string) { defer utilruntime.HandleCrash() defer wg.Done() nodeUsage, err := getOneTimeResourceUsageOnNode(c, nodeName, 15*time.Second, func() []string { return containerIDs }, true) mutex.Lock() defer mutex.Unlock() if err != nil { errors = append(errors, err) return } for k, v := range nodeUsage { nameToUsageMap[containerIDToNameMap[k]] = v } }(node.Name) } wg.Wait() if len(errors) != 0 { return resourceUsagePerContainer{}, fmt.Errorf("Errors while gathering usage data: %v", errors) } return nameToUsageMap, nil }
swagiaal/kubernetes
test/e2e/resource_usage_gatherer.go
GO
apache-2.0
7,581
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/clouddirectory/model/BatchUpdateObjectAttributesResponse.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace CloudDirectory { namespace Model { BatchUpdateObjectAttributesResponse::BatchUpdateObjectAttributesResponse() : m_objectIdentifierHasBeenSet(false) { } BatchUpdateObjectAttributesResponse::BatchUpdateObjectAttributesResponse(JsonView jsonValue) : m_objectIdentifierHasBeenSet(false) { *this = jsonValue; } BatchUpdateObjectAttributesResponse& BatchUpdateObjectAttributesResponse::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("ObjectIdentifier")) { m_objectIdentifier = jsonValue.GetString("ObjectIdentifier"); m_objectIdentifierHasBeenSet = true; } return *this; } JsonValue BatchUpdateObjectAttributesResponse::Jsonize() const { JsonValue payload; if(m_objectIdentifierHasBeenSet) { payload.WithString("ObjectIdentifier", m_objectIdentifier); } return payload; } } // namespace Model } // namespace CloudDirectory } // namespace Aws
cedral/aws-sdk-cpp
aws-cpp-sdk-clouddirectory/source/model/BatchUpdateObjectAttributesResponse.cpp
C++
apache-2.0
1,706
package com.ctrip.zeus.task.clean; import com.ctrip.zeus.clean.CleanDbManager; import com.ctrip.zeus.task.AbstractTask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import javax.annotation.Resource; /** * Created by fanqq on 2016/1/5. */ @Component("cleanDbTask") public class CleanDbTask extends AbstractTask { Logger logger = LoggerFactory.getLogger(this.getClass()); @Resource private CleanDbManager cleanDbManager; @Override public void start() { } @Override public void run() throws Exception { try { logger.info("[CleanDbTask] clean db task started."); cleanDbManager.run(); logger.info("[CleanDbTask] clean db task finished."); } catch (Exception e) { logger.warn("[clean db job] clean db exception." + e.getMessage(), e); } } @Override public void stop() { } @Override public long getInterval() { return 60000 * 30; } }
sdgdsffdsfff/zeus
slb/src/main/java/com/ctrip/zeus/task/clean/CleanDbTask.java
Java
apache-2.0
1,085
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.domain.builders; import org.mifos.dto.screen.ClientPersonalDetailDto; public class ClientPersonalDetailDtoBuilder { private Integer ethinicity; private Integer citizenship; private Integer handicapped; private Integer businessActivities; private Integer maritalStatus; private Integer educationLevel; private Short numChildren; private Short gender; private Short povertyStatus; public ClientPersonalDetailDto build() { return new ClientPersonalDetailDto(ethinicity, citizenship, handicapped, businessActivities, maritalStatus, educationLevel, numChildren, gender, povertyStatus); } }
vorburger/mifos-head
application/src/test/java/org/mifos/domain/builders/ClientPersonalDetailDtoBuilder.java
Java
apache-2.0
1,421
/* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.fixture; import com.thoughtworks.go.util.GoConstants; import com.thoughtworks.go.util.SystemEnvironment; public class DatabaseDiskIsLow implements PreCondition { @Override public void onSetUp() throws Exception { new SystemEnvironment().setProperty(SystemEnvironment.DATABASE_FULL_SIZE_LIMIT, "1m"); new SystemEnvironment().setProperty(SystemEnvironment.DATABASE_WARNING_SIZE_LIMIT, "11222334m"); } @Override public void onTearDown() throws Exception { new SystemEnvironment().clearProperty(SystemEnvironment.DATABASE_FULL_SIZE_LIMIT); new SystemEnvironment().clearProperty(SystemEnvironment.DATABASE_WARNING_SIZE_LIMIT); } public long getLowLimit() { return 1 * GoConstants.MEGA_BYTE; } }
marques-work/gocd
server/src/test-integration/java/com/thoughtworks/go/fixture/DatabaseDiskIsLow.java
Java
apache-2.0
1,397
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #if defined(XALAN_BUILD_DEPRECATED_DOM_BRIDGE) #include "XercesBridgeHelper.hpp" #if XERCES_VERSION_MAJOR >= 2 #include <xercesc/dom/deprecated/DOM_CharacterData.hpp> #else #include <xercesc/dom/DOM_CharacterData.hpp> #endif #include <xalanc/XercesParserLiaison/XercesDOMException.hpp> namespace XALAN_CPP_NAMESPACE { void XercesBridgeHelper::setNodeValue( DOM_NodeType& theXercesNode, const XalanDOMString& nodeValue) { try { theXercesNode.setNodeValue(XalanDOMStringToXercesDOMString(nodeValue)); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } void XercesBridgeHelper::normalize(DOM_NodeType& theXercesNode) { try { theXercesNode.normalize(); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } bool XercesBridgeHelper::isSupported( const DOM_NodeType& theXercesNode, const XalanDOMString& feature, const XalanDOMString& version) { return theXercesNode.isSupported( XalanDOMStringToXercesDOMString(feature), XalanDOMStringToXercesDOMString(version)); } void XercesBridgeHelper::setPrefix( DOM_NodeType& theXercesNode, const XalanDOMString& prefix) { try { theXercesNode.setPrefix(XalanDOMStringToXercesDOMString(prefix)); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } const XalanDOMString XercesBridgeHelper::substringData( const DOM_CharacterDataType& theXercesNode, unsigned int offset, unsigned int count) { try { const DOMStringType theString(theXercesNode.substringData(offset, count)); return XalanDOMString(theString.rawBuffer(), theString.length()); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } void XercesBridgeHelper::appendData( DOM_CharacterDataType& theXercesNode, const XalanDOMString& arg) { try { theXercesNode.appendData(XalanDOMStringToXercesDOMString(arg)); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } void XercesBridgeHelper::insertData( DOM_CharacterDataType& theXercesNode, unsigned int offset, const XalanDOMString& arg) { try { theXercesNode.insertData(offset, XalanDOMStringToXercesDOMString(arg)); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } void XercesBridgeHelper::deleteData( DOM_CharacterDataType& theXercesNode, unsigned int offset, unsigned int count) { try { theXercesNode.deleteData(offset, count); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } void XercesBridgeHelper::replaceData( DOM_CharacterDataType& theXercesNode, unsigned int offset, unsigned int count, const XalanDOMString& arg) { try { theXercesNode.replaceData(offset, count, XalanDOMStringToXercesDOMString(arg)); } catch(const DOM_DOMExceptionType& theException) { throw XercesDOMException(theException); } } } #endif //XALAN_BUILD_DEPRECATED_DOM_BRIDGE
apache/xalan-c
src/xalanc/XercesParserLiaison/Deprecated/XercesBridgeHelper.cpp
C++
apache-2.0
4,549
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/ds/model/UpdateConditionalForwarderRequest.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::DirectoryService::Model; using namespace Aws::Utils::Json; using namespace Aws::Utils; UpdateConditionalForwarderRequest::UpdateConditionalForwarderRequest() : m_directoryIdHasBeenSet(false), m_remoteDomainNameHasBeenSet(false), m_dnsIpAddrsHasBeenSet(false) { } Aws::String UpdateConditionalForwarderRequest::SerializePayload() const { JsonValue payload; if(m_directoryIdHasBeenSet) { payload.WithString("DirectoryId", m_directoryId); } if(m_remoteDomainNameHasBeenSet) { payload.WithString("RemoteDomainName", m_remoteDomainName); } if(m_dnsIpAddrsHasBeenSet) { Array<JsonValue> dnsIpAddrsJsonList(m_dnsIpAddrs.size()); for(unsigned dnsIpAddrsIndex = 0; dnsIpAddrsIndex < dnsIpAddrsJsonList.GetLength(); ++dnsIpAddrsIndex) { dnsIpAddrsJsonList[dnsIpAddrsIndex].AsString(m_dnsIpAddrs[dnsIpAddrsIndex]); } payload.WithArray("DnsIpAddrs", std::move(dnsIpAddrsJsonList)); } return payload.View().WriteReadable(); } Aws::Http::HeaderValueCollection UpdateConditionalForwarderRequest::GetRequestSpecificHeaders() const { Aws::Http::HeaderValueCollection headers; headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "DirectoryService_20150416.UpdateConditionalForwarder")); return headers; }
cedral/aws-sdk-cpp
aws-cpp-sdk-ds/source/model/UpdateConditionalForwarderRequest.cpp
C++
apache-2.0
2,001
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.utils.logging; import org.slf4j.Logger; import org.slf4j.Marker; import static com.google.common.base.MoreObjects.toStringHelper; /** * Delegating logger. */ public class DelegatingLogger implements Logger { private final Logger delegate; public DelegatingLogger(Logger delegate) { this.delegate = delegate; } @Override public String getName() { return delegate.getName(); } @Override public boolean isTraceEnabled() { return delegate.isTraceEnabled(); } @Override public void trace(String msg) { delegate.trace(msg); } @Override public void trace(String format, Object arg) { delegate.trace(format, arg); } @Override public void trace(String format, Object arg1, Object arg2) { delegate.trace(format, arg1, arg2); } @Override public void trace(String format, Object... arguments) { delegate.trace(format, arguments); } @Override public void trace(String msg, Throwable t) { delegate.trace(msg, t); } @Override public boolean isTraceEnabled(Marker marker) { return delegate.isTraceEnabled(marker); } @Override public void trace(Marker marker, String msg) { delegate.trace(marker, msg); } @Override public void trace(Marker marker, String format, Object arg) { delegate.trace(marker, format, arg); } @Override public void trace(Marker marker, String format, Object arg1, Object arg2) { delegate.trace(marker, format, arg1, arg2); } @Override public void trace(Marker marker, String format, Object... argArray) { delegate.trace(marker, format, argArray); } @Override public void trace(Marker marker, String msg, Throwable t) { delegate.trace(marker, msg, t); } @Override public boolean isDebugEnabled() { return delegate.isDebugEnabled(); } @Override public void debug(String msg) { delegate.debug(msg); } @Override public void debug(String format, Object arg) { delegate.debug(format, arg); } @Override public void debug(String format, Object arg1, Object arg2) { delegate.debug(format, arg1, arg2); } @Override public void debug(String format, Object... arguments) { delegate.debug(format, arguments); } @Override public void debug(String msg, Throwable t) { delegate.debug(msg, t); } @Override public boolean isDebugEnabled(Marker marker) { return delegate.isDebugEnabled(marker); } @Override public void debug(Marker marker, String msg) { delegate.debug(marker, msg); } @Override public void debug(Marker marker, String format, Object arg) { delegate.debug(marker, format, arg); } @Override public void debug(Marker marker, String format, Object arg1, Object arg2) { delegate.debug(marker, format, arg1, arg2); } @Override public void debug(Marker marker, String format, Object... arguments) { delegate.debug(marker, format, arguments); } @Override public void debug(Marker marker, String msg, Throwable t) { delegate.debug(marker, msg, t); } @Override public boolean isInfoEnabled() { return delegate.isInfoEnabled(); } @Override public void info(String msg) { delegate.info(msg); } @Override public void info(String format, Object arg) { delegate.info(format, arg); } @Override public void info(String format, Object arg1, Object arg2) { delegate.info(format, arg1, arg2); } @Override public void info(String format, Object... arguments) { delegate.info(format, arguments); } @Override public void info(String msg, Throwable t) { delegate.info(msg, t); } @Override public boolean isInfoEnabled(Marker marker) { return delegate.isInfoEnabled(marker); } @Override public void info(Marker marker, String msg) { delegate.info(marker, msg); } @Override public void info(Marker marker, String format, Object arg) { delegate.info(marker, format, arg); } @Override public void info(Marker marker, String format, Object arg1, Object arg2) { delegate.info(marker, format, arg1, arg2); } @Override public void info(Marker marker, String format, Object... arguments) { delegate.info(marker, format, arguments); } @Override public void info(Marker marker, String msg, Throwable t) { delegate.info(marker, msg, t); } @Override public boolean isWarnEnabled() { return delegate.isWarnEnabled(); } @Override public void warn(String msg) { delegate.warn(msg); } @Override public void warn(String format, Object arg) { delegate.warn(format, arg); } @Override public void warn(String format, Object... arguments) { delegate.warn(format, arguments); } @Override public void warn(String format, Object arg1, Object arg2) { delegate.warn(format, arg1, arg2); } @Override public void warn(String msg, Throwable t) { delegate.warn(msg, t); } @Override public boolean isWarnEnabled(Marker marker) { return delegate.isWarnEnabled(marker); } @Override public void warn(Marker marker, String msg) { delegate.warn(marker, msg); } @Override public void warn(Marker marker, String format, Object arg) { delegate.warn(marker, format, arg); } @Override public void warn(Marker marker, String format, Object arg1, Object arg2) { delegate.warn(marker, format, arg1, arg2); } @Override public void warn(Marker marker, String format, Object... arguments) { delegate.warn(marker, format, arguments); } @Override public void warn(Marker marker, String msg, Throwable t) { delegate.warn(marker, msg, t); } @Override public boolean isErrorEnabled() { return delegate.isErrorEnabled(); } @Override public void error(String msg) { delegate.error(msg); } @Override public void error(String format, Object arg) { delegate.error(format, arg); } @Override public void error(String format, Object arg1, Object arg2) { delegate.error(format, arg1, arg2); } @Override public void error(String format, Object... arguments) { delegate.error(format, arguments); } @Override public void error(String msg, Throwable t) { delegate.error(msg, t); } @Override public boolean isErrorEnabled(Marker marker) { return delegate.isErrorEnabled(marker); } @Override public void error(Marker marker, String msg) { delegate.error(marker, msg); } @Override public void error(Marker marker, String format, Object arg) { delegate.error(marker, format, arg); } @Override public void error(Marker marker, String format, Object arg1, Object arg2) { delegate.error(marker, format, arg1, arg2); } @Override public void error(Marker marker, String format, Object... arguments) { delegate.error(marker, format, arguments); } @Override public void error(Marker marker, String msg, Throwable t) { delegate.error(marker, msg, t); } @Override public String toString() { return toStringHelper(this) .addValue(delegate) .toString(); } }
atomix/atomix
utils/src/main/java/io/atomix/utils/logging/DelegatingLogger.java
Java
apache-2.0
7,703
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov. // Jad home page: http://www.geocities.com/kpdus/jad.html // Decompiler options: braces fieldsfirst space lnc package com.xiaomi.infra.galaxy.common.model; import com.xiaomi.infra.galaxy.common.GalaxyClientException; import com.xiaomi.infra.galaxy.common.constants.ReturnCode; import java.util.ArrayList; import java.util.List; // Referenced classes of package com.xiaomi.infra.galaxy.common.model: // AttributeValue, Comparator, AttributeType, Verifier public class Condition { private String attributeName; private List attributeValues; private String comparator; public Condition() { } public Condition addAttributeValue(Object obj) { if (attributeValues == null) { attributeValues = new ArrayList(); } AttributeValue.putAttributeValueInList(attributeValues, obj); return this; } public boolean equals(Object obj) { if (this != obj) goto _L2; else goto _L1 _L1: boolean flag = true; _L4: return flag; _L2: flag = false; if (obj == null) goto _L4; else goto _L3 _L3: boolean flag1; flag1 = obj instanceof Condition; flag = false; if (!flag1) goto _L4; else goto _L5 _L5: Condition condition = (Condition)obj; boolean flag2; boolean flag3; boolean flag4; boolean flag7; boolean flag8; boolean flag9; if (condition.getComparator() == null) { flag2 = true; } else { flag2 = false; } if (getComparator() == null) { flag3 = true; } else { flag3 = false; } flag4 = flag2 ^ flag3; flag = false; if (flag4) goto _L4; else goto _L6 _L6: if (condition.getComparator() == null) { break; /* Loop/switch isn't completed */ } flag9 = condition.getComparator().equals(getComparator()); flag = false; if (!flag9) goto _L4; else goto _L7 _L7: boolean flag5; boolean flag6; if (condition.getAttributeValues() == null) { flag5 = true; } else { flag5 = false; } if (getAttributeValues() == null) { flag6 = true; } else { flag6 = false; } flag7 = flag5 ^ flag6; flag = false; if (flag7) goto _L4; else goto _L8 _L8: if (condition.getAttributeValues() == null) { break; /* Loop/switch isn't completed */ } flag8 = condition.getAttributeValues().equals(getAttributeValues()); flag = false; if (!flag8) goto _L4; else goto _L9 _L9: return true; } public String getAttributeName() { return attributeName; } public List getAttributeValues() { return attributeValues; } public String getComparator() { return comparator; } public int hashCode() { int i; int j; List list; int k; if (getComparator() == null) { i = 0; } else { i = getComparator().hashCode(); } j = 31 * (i + 31); list = getAttributeValues(); k = 0; if (list != null) { k = getAttributeValues().hashCode(); } return j + k; } public void setAttributeName(String s) { attributeName = s; } public void setAttributeValues(List list) { attributeValues = list; } public void setComparator(String s) { comparator = s; } public void validate() { Comparator comparator1; AttributeType attributetype; comparator1 = Comparator.fromValue(comparator); if (attributeValues == null || attributeValues.size() != 1) { throw new GalaxyClientException(ReturnCode.UNEXPECTED_NUMBER_OF_OPERANDS, "comparator can only use one attribute value"); } attributetype = AttributeType.fromValue(((AttributeValue)attributeValues.get(0)).getType()); Verifier.validateAttributeValue(attributeName, (AttributeValue)attributeValues.get(0), false); _cls1..SwitchMap.com.xiaomi.infra.galaxy.common.model.Comparator[comparator1.ordinal()]; JVM INSTR tableswitch 1 6: default 128 // 1 221 // 2 221 // 3 179 // 4 222 // 5 265 // 6 308; goto _L1 _L2 _L2 _L3 _L4 _L5 _L6 _L1: throw new GalaxyClientException(ReturnCode.UNEXPECTED_OPERAND_TYPE, (new StringBuilder()).append(attributetype.name()).append("(comparator:").append(comparator1).append(" ,attributeType").append(attributetype).append(")").toString()); _L3: if (AttributeType.BINARY != attributetype && AttributeType.STRING != attributetype && AttributeType.INT8 != attributetype && AttributeType.INT16 != attributetype && AttributeType.INT32 != attributetype && AttributeType.INT64 != attributetype) { break; /* Loop/switch isn't completed */ } _L2: return; _L4: if (AttributeType.BINARY == attributetype || AttributeType.STRING == attributetype || AttributeType.INT8 == attributetype || AttributeType.INT16 == attributetype || AttributeType.INT32 == attributetype || AttributeType.INT64 == attributetype) { return; } break; /* Loop/switch isn't completed */ _L5: if (AttributeType.BINARY == attributetype || AttributeType.STRING == attributetype || AttributeType.INT8 == attributetype || AttributeType.INT16 == attributetype || AttributeType.INT32 == attributetype || AttributeType.INT64 == attributetype) { return; } break; /* Loop/switch isn't completed */ _L6: if (AttributeType.BINARY == attributetype || AttributeType.STRING == attributetype || AttributeType.INT8 == attributetype || AttributeType.INT16 == attributetype || AttributeType.INT32 == attributetype || AttributeType.INT64 == attributetype) { return; } if (true) goto _L1; else goto _L7 _L7: } public Condition withAttributeValues(List list) { setAttributeValues(list); return this; } public Condition withComparator(Comparator comparator1) { setComparator(comparator1.name()); return this; } private class _cls1 { static final int $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[]; static { $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator = new int[Comparator.values().length]; try { $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.EQ.ordinal()] = 1; } catch (NoSuchFieldError nosuchfielderror) { } try { $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.NE.ordinal()] = 2; } catch (NoSuchFieldError nosuchfielderror1) { } try { $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.GT.ordinal()] = 3; } catch (NoSuchFieldError nosuchfielderror2) { } try { $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.GE.ordinal()] = 4; } catch (NoSuchFieldError nosuchfielderror3) { } try { $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.LT.ordinal()] = 5; } catch (NoSuchFieldError nosuchfielderror4) { } try { $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.LE.ordinal()] = 6; } catch (NoSuchFieldError nosuchfielderror5) { return; } } } }
vishnudevk/MiBandDecompiled
Original Files/source/src/com/xiaomi/infra/galaxy/common/model/Condition.java
Java
apache-2.0
8,239
package com.marvinlabs.widget.progresspanel.demo; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import com.marvinlabs.widget.progresspanel.demo.R; public class DemoListActivity extends FragmentActivity implements DemoListFragment.OnDemoSelectedListener { private static final String TAG_LIST_FRAGMENT = "ListFragment"; private static final String TAG_DEMO_FRAGMENT = "DemoFragment"; private DemoListFragment listFragment; private boolean isDualPane; private int currentDemo; /** * Creates a fragment given a fully qualified class name and some arguments */ public static Fragment newFragmentInstance(String className, Bundle args) { try { Class<?> c = Class.forName(className); Fragment f = (Fragment) c.newInstance(); f.setArguments(args); return f; } catch (ClassNotFoundException e) { throw new RuntimeException("Cannot create fragment", e); } catch (InstantiationException e) { throw new RuntimeException("Cannot create fragment", e); } catch (IllegalAccessException e) { throw new RuntimeException("Cannot create fragment", e); } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_demolist); listFragment = (DemoListFragment) getSupportFragmentManager().findFragmentByTag(TAG_LIST_FRAGMENT); if (listFragment == null) { listFragment = DemoListFragment.newInstance(); getSupportFragmentManager().beginTransaction().replace(R.id.main_container, listFragment, TAG_LIST_FRAGMENT).commitAllowingStateLoss(); } } @Override protected void onStart() { super.onStart(); listFragment.setSelectable(isDualPane); } @Override public void onDemoSelected(String fragmentClass, Bundle args) { Intent i = new Intent(this, DemoActivity.class); i.putExtra(DemoActivity.EXTRA_FRAGMENT_CLASS, fragmentClass); i.putExtra(DemoActivity.EXTRA_FRAGMENT_ARGS, args); startActivity(i); } }
0359xiaodong/android-progress-panel
demo/src/main/java/com/marvinlabs/widget/progresspanel/demo/DemoListActivity.java
Java
apache-2.0
2,294
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.containeranalysis.v1alpha1.model; /** * Basis describes the base image portion (Note) of the DockerImage relationship. Linked occurrences * are derived from this or an equivalent image via: FROM Or an equivalent reference, e.g. a tag of * the resource_url. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Container Analysis API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Basis extends com.google.api.client.json.GenericJson { /** * The fingerprint of the base image. * The value may be {@code null}. */ @com.google.api.client.util.Key private Fingerprint fingerprint; /** * The resource_url for the resource representing the basis of associated occurrence images. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String resourceUrl; /** * The fingerprint of the base image. * @return value or {@code null} for none */ public Fingerprint getFingerprint() { return fingerprint; } /** * The fingerprint of the base image. * @param fingerprint fingerprint or {@code null} for none */ public Basis setFingerprint(Fingerprint fingerprint) { this.fingerprint = fingerprint; return this; } /** * The resource_url for the resource representing the basis of associated occurrence images. * @return value or {@code null} for none */ public java.lang.String getResourceUrl() { return resourceUrl; } /** * The resource_url for the resource representing the basis of associated occurrence images. * @param resourceUrl resourceUrl or {@code null} for none */ public Basis setResourceUrl(java.lang.String resourceUrl) { this.resourceUrl = resourceUrl; return this; } @Override public Basis set(String fieldName, Object value) { return (Basis) super.set(fieldName, value); } @Override public Basis clone() { return (Basis) super.clone(); } }
googleapis/google-api-java-client-services
clients/google-api-services-containeranalysis/v1alpha1/1.31.0/com/google/api/services/containeranalysis/v1alpha1/model/Basis.java
Java
apache-2.0
2,988