code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
"""
WSGI config for OIPA project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "OIPA.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OIPA.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| schlos/OIPA-V2.1 | OIPA/OIPA/wsgi.py | Python | agpl-3.0 | 1,413 |
<?php
/**
* Shopware 5
* Copyright (c) shopware AG
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License with an additional
* permission and of our proprietary license can be found at and
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "Shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, title and interest in
* our trademarks remain entirely with us.
*/
namespace Shopware\Recovery\Install\Service;
use Shopware\Recovery\Install\Struct\Shop;
/**
* @category Shopware
*
* @copyright Copyright (c) shopware AG (http://www.shopware.de)
*/
class ShopService
{
/**
* @var \PDO
*/
private $connection;
/**
* @param \PDO $connection
*/
public function __construct(\PDO $connection)
{
$this->connection = $connection;
}
/**
* @param Shop $shop
*
* @throws \RuntimeException
*/
public function updateShop(Shop $shop)
{
if (empty($shop->locale)
|| empty($shop->host)
) {
throw new \RuntimeException('Please fill in all required fields. (shop configuration)');
}
try {
$fetchLanguageId = $this->getLocaleIdByLocale($shop->locale);
// Update s_core_shops
$sql = <<<'EOT'
UPDATE
s_core_shops
SET
`name` = ?,
locale_id = ?,
host = ?,
base_path = ?,
hosts = ?
WHERE
`default` = 1
EOT;
$prepareStatement = $this->connection->prepare($sql);
$prepareStatement->execute([
$shop->name,
$fetchLanguageId,
$shop->host,
$shop->basePath,
$shop->host,
]);
} catch (\Exception $e) {
throw new \RuntimeException($e->getMessage(), 0, $e);
}
}
/**
* @param Shop $shop
*
* @throws \RuntimeException
*/
public function updateConfig(Shop $shop)
{
// Do update on shop-configuration
if (empty($shop->name) || empty($shop->email)) {
throw new \RuntimeException('Please fill in all required fields. (shop configuration#2)');
}
$this->updateMailAddress($shop);
$this->updateShopName($shop);
}
/**
* @param string $locale
*
* @return int
*/
protected function getLocaleIdByLocale($locale)
{
$fetchLanguageId = $this->connection->prepare(
'SELECT id FROM s_core_locales WHERE locale = ?'
);
$fetchLanguageId->execute([$locale]);
$fetchLanguageId = $fetchLanguageId->fetchColumn();
if (!$fetchLanguageId) {
throw new \RuntimeException('Language with id ' . $locale . ' not found');
}
return (int) $fetchLanguageId;
}
/**
* @param Shop $shop
*/
private function updateMailAddress(Shop $shop)
{
$this->updateConfigValue('mail', $shop->email);
}
/**
* @param Shop $shop
*/
private function updateShopName(Shop $shop)
{
$this->updateConfigValue('shopName', $shop->name);
}
/**
* @param string $elementName
* @param mixed $value
*/
private function updateConfigValue($elementName, $value)
{
$sql = <<<'EOT'
DELETE
FROM s_core_config_values
WHERE element_id =
(SELECT id FROM s_core_config_elements WHERE name=:elementName)
AND shop_id = 1
EOT;
$this->connection->prepare($sql)->execute([
'elementName' => $elementName,
]);
$sql = <<<'EOT'
INSERT INTO `s_core_config_values`
(`id`, `element_id`, `shop_id`, `value`) VALUES
(NULL, (SELECT id FROM s_core_config_elements WHERE name=:elementName), 1, :value);
EOT;
$prepared = $this->connection->prepare($sql);
$prepared->execute([
'elementName' => $elementName,
'value' => serialize($value),
]);
}
}
| egoistIT/shopware | recovery/install/src/Service/ShopService.php | PHP | agpl-3.0 | 4,461 |
/**
* tapcfg - A cross-platform configuration utility for TAP driver
* Copyright (C) 2008-2011 Juho Vähä-Herttua
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
using TAPNet;
using System;
using System.Net;
using System.Threading;
public class TAPNetTest {
private static void Main(string[] args) {
VirtualDevice dev = new VirtualDevice();
dev.LogCallback = new LogCallback(LogCallback);
dev.Start("Device name", true);
Console.WriteLine("Got device name: {0}", dev.DeviceName);
Console.WriteLine("Got device hwaddr: {0}", BitConverter.ToString(dev.HWAddress));
dev.HWAddress = new byte[] { 0x00, 0x01, 0x23, 0x45, 0x67, 0x89 };
dev.MTU = 1280;
dev.SetAddress(IPAddress.Parse("192.168.10.1"), 16);
dev.Enabled = true;
while (true) {
Thread.Sleep(1000);
}
}
private static void LogCallback(LogLevel level, string msg) {
Console.WriteLine(level + ": " + msg);
}
}
| juhovh/tapcfg | src/demos/TAPNetTest.cs | C# | lgpl-2.1 | 1,388 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
class Psi4(CMakePackage):
"""Psi4 is an open-source suite of ab initio quantum chemistry
programs designed for efficient, high-accuracy simulations of
a variety of molecular properties."""
homepage = "http://www.psicode.org/"
url = "https://github.com/psi4/psi4/archive/0.5.tar.gz"
version('0.5', '53041b8a9be3958384171d0d22f9fdd0')
variant('build_type', default='Release',
description='The build type to build',
values=('Debug', 'Release'))
# Required dependencies
depends_on('blas')
depends_on('lapack')
depends_on('boost+chrono+filesystem+python+regex+serialization+system+timer+thread')
depends_on('python')
depends_on('cmake@3.3:', type='build')
depends_on('py-numpy', type=('build', 'run'))
# Optional dependencies
# TODO: add packages for these
# depends_on('perl')
# depends_on('erd')
# depends_on('pcm-solver')
# depends_on('chemps2')
def cmake_args(self):
spec = self.spec
return [
'-DBLAS_TYPE={0}'.format(spec['blas'].name.upper()),
'-DBLAS_LIBRARIES={0}'.format(spec['blas'].libs.joined()),
'-DLAPACK_TYPE={0}'.format(spec['lapack'].name.upper()),
'-DLAPACK_LIBRARIES={0}'.format(
spec['lapack'].libs.joined()),
'-DBOOST_INCLUDEDIR={0}'.format(spec['boost'].prefix.include),
'-DBOOST_LIBRARYDIR={0}'.format(spec['boost'].prefix.lib),
'-DENABLE_CHEMPS2=OFF'
]
@run_after('install')
def filter_compilers(self, spec, prefix):
"""Run after install to tell the configuration files to
use the compilers that Spack built the package with.
If this isn't done, they'll have PLUGIN_CXX set to
Spack's generic cxx. We want it to be bound to
whatever compiler it was built with."""
kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
cc_files = ['bin/psi4-config']
cxx_files = ['bin/psi4-config', 'include/psi4/psiconfig.h']
template = 'share/psi4/plugin/Makefile.template'
for filename in cc_files:
filter_file(os.environ['CC'], self.compiler.cc,
os.path.join(prefix, filename), **kwargs)
for filename in cxx_files:
filter_file(os.environ['CXX'], self.compiler.cxx,
os.path.join(prefix, filename), **kwargs)
# The binary still keeps track of the compiler used to install Psi4
# and uses it when creating a plugin template
filter_file('@PLUGIN_CXX@', self.compiler.cxx,
os.path.join(prefix, template), **kwargs)
# The binary links to the build include directory instead of the
# installation include directory:
# https://github.com/psi4/psi4/issues/410
filter_file('@PLUGIN_INCLUDES@', '-I{0}'.format(
' -I'.join([
os.path.join(spec['psi4'].prefix.include, 'psi4'),
os.path.join(spec['boost'].prefix.include, 'boost'),
os.path.join(spec['python'].headers.directories[0]),
spec['lapack'].prefix.include,
spec['blas'].prefix.include,
'/usr/include'
])
), os.path.join(prefix, template), **kwargs)
| EmreAtes/spack | var/spack/repos/builtin/packages/psi4/package.py | Python | lgpl-2.1 | 4,596 |
/*****************************************/
/* Written by andrew.wilkins@csiro.au */
/* Please contact me if you make changes */
/*****************************************/
#include "RichardsExcavFlow.h"
#include "Function.h"
#include "Material.h"
template<>
InputParameters validParams<RichardsExcavFlow>()
{
InputParameters params = validParams<SideIntegralVariablePostprocessor>();
params.addRequiredParam<FunctionName>("excav_geom_function", "The function describing the excavation geometry (type RichardsExcavGeom)");
params.addRequiredParam<UserObjectName>("richardsVarNames_UO", "The UserObject that holds the list of Richards variable names.");
params.addClassDescription("Records total flow INTO an excavation (if quantity is positive then flow has occured from rock into excavation void)");
return params;
}
RichardsExcavFlow::RichardsExcavFlow(const std::string & name, InputParameters parameters) :
SideIntegralVariablePostprocessor(name, parameters),
_richards_name_UO(getUserObject<RichardsVarNames>("richardsVarNames_UO")),
_pvar(_richards_name_UO.richards_var_num(_var.number())),
_flux(getMaterialProperty<std::vector<RealVectorValue> >("flux")),
_func(getFunction("excav_geom_function"))
{}
Real
RichardsExcavFlow::computeQpIntegral()
{
return -_func.value(_t, _q_point[_qp])*_normals[_qp]*_flux[_qp][_pvar]*_dt;
}
| cpritam/moose | modules/richards/src/postprocessors/RichardsExcavFlow.C | C++ | lgpl-2.1 | 1,375 |
/****************************************************************/
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* All contents are licensed under LGPL V2.1 */
/* See LICENSE for full restrictions */
/****************************************************************/
#include "RichardsExcavFlow.h"
#include "Function.h"
#include "Material.h"
template<>
InputParameters validParams<RichardsExcavFlow>()
{
InputParameters params = validParams<SideIntegralVariablePostprocessor>();
params.addRequiredParam<FunctionName>("excav_geom_function", "The function describing the excavation geometry (type RichardsExcavGeom)");
params.addRequiredParam<UserObjectName>("richardsVarNames_UO", "The UserObject that holds the list of Richards variable names.");
params.addClassDescription("Records total flow INTO an excavation (if quantity is positive then flow has occured from rock into excavation void)");
return params;
}
RichardsExcavFlow::RichardsExcavFlow(const std::string & name, InputParameters parameters) :
SideIntegralVariablePostprocessor(name, parameters),
_richards_name_UO(getUserObject<RichardsVarNames>("richardsVarNames_UO")),
_pvar(_richards_name_UO.richards_var_num(_var.number())),
_flux(getMaterialProperty<std::vector<RealVectorValue> >("flux")),
_func(getFunction("excav_geom_function"))
{}
Real
RichardsExcavFlow::computeQpIntegral()
{
return -_func.value(_t, _q_point[_qp])*_normals[_qp]*_flux[_qp][_pvar]*_dt;
}
| gleicher27/Tardigrade | moose/modules/richards/src/postprocessors/RichardsExcavFlow.C | C++ | lgpl-2.1 | 1,602 |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.gui.main.contactlist.contactsource;
import java.util.*;
import net.java.sip.communicator.impl.gui.*;
import net.java.sip.communicator.service.contactsource.*;
import net.java.sip.communicator.service.protocol.*;
/**
* The <tt>StringContactSourceServiceImpl</tt> is an implementation of the
* <tt>ContactSourceService</tt> that returns the searched string as a result
* contact.
*
* @author Yana Stamcheva
*/
public class StringContactSourceServiceImpl
implements ContactSourceService
{
/**
* The protocol provider to be used with this string contact source.
*/
private final ProtocolProviderService protocolProvider;
/**
* The operation set supported by this string contact source.
*/
private final Class<? extends OperationSet> opSetClass;
/**
* Can display disable adding display details for source contacts.
*/
private boolean disableDisplayDetails = true;
/**
* Creates an instance of <tt>StringContactSourceServiceImpl</tt>.
*
* @param protocolProvider the protocol provider to be used with this string
* contact source
* @param opSet the operation set supported by this string contact source
*/
public StringContactSourceServiceImpl(
ProtocolProviderService protocolProvider,
Class<? extends OperationSet> opSet)
{
this.protocolProvider = protocolProvider;
this.opSetClass = opSet;
}
/**
* Returns the type of this contact source.
*
* @return the type of this contact source
*/
public int getType()
{
return SEARCH_TYPE;
}
/**
* Returns a user-friendly string that identifies this contact source.
*
* @return the display name of this contact source
*/
public String getDisplayName()
{
return GuiActivator.getResources().getI18NString(
"service.gui.SEARCH_STRING_CONTACT_SOURCE");
}
/**
* Creates query for the given <tt>queryString</tt>.
*
* @param queryString the string to search for
* @return the created query
*/
public ContactQuery createContactQuery(String queryString)
{
return createContactQuery(queryString, -1);
}
/**
* Creates query for the given <tt>queryString</tt>.
*
* @param queryString the string to search for
* @param contactCount the maximum count of result contacts
* @return the created query
*/
public ContactQuery createContactQuery( String queryString,
int contactCount)
{
return new StringQuery(queryString);
}
/**
* Changes whether to add display details for contact sources.
* @param disableDisplayDetails
*/
public void setDisableDisplayDetails(boolean disableDisplayDetails)
{
this.disableDisplayDetails = disableDisplayDetails;
}
/**
* Returns the source contact corresponding to the query string.
*
* @return the source contact corresponding to the query string
*/
public SourceContact createSourceContact(String queryString)
{
ArrayList<ContactDetail> contactDetails
= new ArrayList<ContactDetail>();
ContactDetail contactDetail = new ContactDetail(queryString);
// Init supported operation sets.
ArrayList<Class<? extends OperationSet>>
supportedOpSets
= new ArrayList<Class<? extends OperationSet>>();
supportedOpSets.add(opSetClass);
contactDetail.setSupportedOpSets(supportedOpSets);
// Init preferred protocol providers.
Map<Class<? extends OperationSet>,ProtocolProviderService>
providers = new HashMap<Class<? extends OperationSet>,
ProtocolProviderService>();
providers.put(opSetClass, protocolProvider);
contactDetail.setPreferredProviders(providers);
contactDetails.add(contactDetail);
GenericSourceContact sourceContact
= new GenericSourceContact( StringContactSourceServiceImpl.this,
queryString,
contactDetails);
if(disableDisplayDetails)
{
sourceContact.setDisplayDetails(
GuiActivator.getResources().getI18NString(
"service.gui.CALL_VIA")
+ " "
+ protocolProvider.getAccountID().getDisplayName());
}
return sourceContact;
}
/**
* The query implementation.
*/
private class StringQuery
extends AbstractContactQuery<ContactSourceService>
{
/**
* The query string.
*/
private String queryString;
/**
* The query result list.
*/
private final List<SourceContact> results;
/**
* Creates an instance of this query implementation.
*
* @param queryString the string to query
*/
public StringQuery(String queryString)
{
super(StringContactSourceServiceImpl.this);
this.queryString = queryString;
this.results = new ArrayList<SourceContact>();
}
/**
* Returns the query string.
*
* @return the query string
*/
public String getQueryString()
{
return queryString;
}
/**
* Returns the list of query results.
*
* @return the list of query results
*/
public List<SourceContact> getQueryResults()
{
return results;
}
@Override
public void start()
{
SourceContact contact = createSourceContact(queryString);
results.add(contact);
fireContactReceived(contact);
if (getStatus() != QUERY_CANCELED)
setStatus(QUERY_COMPLETED);
}
}
/**
* Returns the index of the contact source in the result list.
*
* @return the index of the contact source in the result list
*/
public int getIndex()
{
return 0;
}
}
| marclaporte/jitsi | src/net/java/sip/communicator/impl/gui/main/contactlist/contactsource/StringContactSourceServiceImpl.java | Java | lgpl-2.1 | 6,347 |
/* Soot - a J*va Optimization Framework
* Copyright (C) 2002 Florian Loitsch
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* Modified by the Sable Research Group and others 1997-1999.
* See the 'credits' file distributed with Soot for the complete list of
* contributors. (Soot is distributed at http://www.sable.mcgill.ca/soot)
*/
package soot.jimple.toolkits.scalar.pre;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import soot.Body;
import soot.BodyTransformer;
import soot.EquivalentValue;
import soot.G;
import soot.Local;
import soot.Scene;
import soot.SideEffectTester;
import soot.Singletons;
import soot.Unit;
import soot.Value;
import soot.jimple.AssignStmt;
import soot.jimple.IdentityStmt;
import soot.jimple.Jimple;
import soot.jimple.NaiveSideEffectTester;
import soot.jimple.toolkits.graph.CriticalEdgeRemover;
import soot.jimple.toolkits.pointer.PASideEffectTester;
import soot.jimple.toolkits.scalar.LocalCreation;
import soot.options.BCMOptions;
import soot.options.Options;
import soot.toolkits.graph.BriefUnitGraph;
import soot.toolkits.graph.UnitGraph;
import soot.util.Chain;
import soot.util.UnitMap;
/**
* Performs a partial redundancy elimination (= code motion). This is done, by
* moving <b>every</b>computation as high as possible (it is easy to show, that
* they are computationally optimal), and then replacing the original
* computation by a reference to this new high computation. This implies, that
* we introduce <b>many</b> new helper-variables (that can easily be eliminated
* afterwards).<br>
* In order to catch every redundant expression, this transformation must be
* done on a graph without critical edges. Therefore the first thing we do, is
* removing them. A subsequent pass can then easily remove the synthetic nodes
* we have introduced.<br>
* The term "busy" refers to the fact, that we <b>always</b> move computations
* as high as possible. Even, if this is not necessary.
*
* @see soot.jimple.toolkits.graph.CriticalEdgeRemover
*/
public class BusyCodeMotion extends BodyTransformer {
public BusyCodeMotion(Singletons.Global g) {
}
public static BusyCodeMotion v() {
return G.v().soot_jimple_toolkits_scalar_pre_BusyCodeMotion();
}
private static final String PREFIX = "$bcm";
/**
* performs the busy code motion.
*/
protected void internalTransform(Body b, String phaseName, Map<String, String> opts) {
BCMOptions options = new BCMOptions(opts);
HashMap<EquivalentValue, Local> expToHelper = new HashMap<EquivalentValue, Local>();
Chain<Unit> unitChain = b.getUnits();
if (Options.v().verbose())
G.v().out.println("[" + b.getMethod().getName() + "] performing Busy Code Motion...");
CriticalEdgeRemover.v().transform(b, phaseName + ".cer");
UnitGraph graph = new BriefUnitGraph(b);
/* map each unit to its RHS. only take binary expressions */
Map<Unit, EquivalentValue> unitToEquivRhs = new UnitMap<EquivalentValue>(b, graph.size() + 1, 0.7f) {
protected EquivalentValue mapTo(Unit unit) {
Value tmp = SootFilter.noInvokeRhs(unit);
Value tmp2 = SootFilter.binop(tmp);
if (tmp2 == null)
tmp2 = SootFilter.concreteRef(tmp);
return SootFilter.equiVal(tmp2);
}
};
/* same as before, but without exception-throwing expressions */
Map<Unit, EquivalentValue> unitToNoExceptionEquivRhs = new UnitMap<EquivalentValue>(b, graph.size() + 1, 0.7f) {
protected EquivalentValue mapTo(Unit unit) {
Value tmp = SootFilter.binopRhs(unit);
tmp = SootFilter.noExceptionThrowing(tmp);
return SootFilter.equiVal(tmp);
}
};
/* if a more precise sideeffect-tester comes out, please change it here! */
SideEffectTester sideEffect;
if (Scene.v().hasCallGraph() && !options.naive_side_effect()) {
sideEffect = new PASideEffectTester();
} else {
sideEffect = new NaiveSideEffectTester();
}
sideEffect.newMethod(b.getMethod());
UpSafetyAnalysis upSafe = new UpSafetyAnalysis(graph, unitToEquivRhs, sideEffect);
DownSafetyAnalysis downSafe = new DownSafetyAnalysis(graph, unitToNoExceptionEquivRhs, sideEffect);
EarliestnessComputation earliest = new EarliestnessComputation(graph, upSafe, downSafe, sideEffect);
LocalCreation localCreation = new LocalCreation(b.getLocals(), PREFIX);
Iterator<Unit> unitIt = unitChain.snapshotIterator();
{ /* insert the computations at the earliest positions */
while (unitIt.hasNext()) {
Unit currentUnit = unitIt.next();
for (EquivalentValue equiVal : earliest.getFlowBefore(currentUnit)) {
// Value exp = equiVal.getValue();
/* get the unic helper-name for this expression */
Local helper = expToHelper.get(equiVal);
// Make sure not to place any stuff inside the identity block at
// the beginning of the method
if (currentUnit instanceof IdentityStmt)
currentUnit = getFirstNonIdentityStmt(b);
if (helper == null) {
helper = localCreation.newLocal(equiVal.getType());
expToHelper.put(equiVal, helper);
}
/* insert a new Assignment-stmt before the currentUnit */
Value insertValue = Jimple.cloneIfNecessary(equiVal.getValue());
Unit firstComp = Jimple.v().newAssignStmt(helper, insertValue);
unitChain.insertBefore(firstComp, currentUnit);
}
}
}
{ /* replace old computations by the helper-vars */
unitIt = unitChain.iterator();
while (unitIt.hasNext()) {
Unit currentUnit = unitIt.next();
EquivalentValue rhs = unitToEquivRhs.get(currentUnit);
if (rhs != null) {
Local helper = expToHelper.get(rhs);
if (helper != null)
((AssignStmt) currentUnit).setRightOp(helper);
}
}
}
if (Options.v().verbose())
G.v().out.println("[" + b.getMethod().getName() + "] Busy Code Motion done!");
}
private Unit getFirstNonIdentityStmt(Body b) {
for (Unit u : b.getUnits())
if (!(u instanceof IdentityStmt))
return u;
return null;
}
}
| cfallin/soot | src/soot/jimple/toolkits/scalar/pre/BusyCodeMotion.java | Java | lgpl-2.1 | 6,656 |
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.cache;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.alfresco.repo.cache.TransactionStats.OpType;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
/**
* Simple non-persistent implementation of {@link CacheStatistics}. Statistics
* are empty at repository startup.
*
* @since 5.0
* @author Matt Ward
*/
public class InMemoryCacheStatistics implements CacheStatistics, ApplicationContextAware
{
/** Read/Write locks by cache name */
private final ConcurrentMap<String, ReentrantReadWriteLock> locks = new ConcurrentHashMap<>();
private Map<String, Map<OpType, OperationStats>> cacheToStatsMap = new HashMap<>();
private ApplicationContext applicationContext;
@Override
public long count(String cacheName, OpType opType)
{
ReadLock readLock = getReadLock(cacheName);
readLock.lock();
try
{
Map<OpType, OperationStats> cacheStats = cacheToStatsMap.get(cacheName);
if (cacheStats == null)
{
throw new NoStatsForCache(cacheName);
}
OperationStats opStats = cacheStats.get(opType);
return opStats.getCount();
}
finally
{
readLock.unlock();
}
}
@Override
public double meanTime(String cacheName, OpType opType)
{
ReadLock readLock = getReadLock(cacheName);
readLock.lock();
try
{
Map<OpType, OperationStats> cacheStats = cacheToStatsMap.get(cacheName);
if (cacheStats == null)
{
throw new NoStatsForCache(cacheName);
}
OperationStats opStats = cacheStats.get(opType);
return opStats.meanTime();
}
finally
{
readLock.unlock();
}
}
@Override
public void add(String cacheName, TransactionStats txStats)
{
boolean registerCacheStats = false;
WriteLock writeLock = getWriteLock(cacheName);
writeLock.lock();
try
{
// Are we adding new stats for a previously unseen cache?
registerCacheStats = !cacheToStatsMap.containsKey(cacheName);
if (registerCacheStats)
{
// There are no statistics yet for this cache.
cacheToStatsMap.put(cacheName, new HashMap<OpType, OperationStats>());
}
Map<OpType, OperationStats> cacheStats = cacheToStatsMap.get(cacheName);
for (OpType opType : OpType.values())
{
SummaryStatistics txOpSummary = txStats.getTimings(opType);
long count = txOpSummary.getN();
double totalTime = txOpSummary.getSum();
OperationStats oldStats = cacheStats.get(opType);
OperationStats newStats;
if (oldStats == null)
{
newStats = new OperationStats(totalTime, count);
}
else
{
newStats = new OperationStats(oldStats, totalTime, count);
}
cacheStats.put(opType, newStats);
}
}
finally
{
writeLock.unlock();
}
if (registerCacheStats)
{
// We've added stats for a previously unseen cache, raise an event
// so that an MBean for the cache may be registered, for example.
applicationContext.publishEvent(new CacheStatisticsCreated(this, cacheName));
}
}
@Override
public double hitMissRatio(String cacheName)
{
ReadLock readLock = getReadLock(cacheName);
readLock.lock();
try
{
Map<OpType, OperationStats> cacheStats = cacheToStatsMap.get(cacheName);
if (cacheStats == null)
{
throw new NoStatsForCache(cacheName);
}
long hits = cacheStats.get(OpType.GET_HIT).getCount();
long misses = cacheStats.get(OpType.GET_MISS).getCount();
return (double)hits / (hits+misses);
}
finally
{
readLock.unlock();
}
}
@Override
public long numGets(String cacheName)
{
ReadLock readLock = getReadLock(cacheName);
readLock.lock();
try
{
Map<OpType, OperationStats> cacheStats = cacheToStatsMap.get(cacheName);
if (cacheStats == null)
{
throw new NoStatsForCache(cacheName);
}
long hits = cacheStats.get(OpType.GET_HIT).getCount();
long misses = cacheStats.get(OpType.GET_MISS).getCount();
return hits+misses;
}
finally
{
readLock.unlock();
}
}
@Override
public Map<OpType, OperationStats> allStats(String cacheName)
{
ReadLock readLock = getReadLock(cacheName);
readLock.lock();
try
{
Map<OpType, OperationStats> cacheStats = cacheToStatsMap.get(cacheName);
if (cacheStats == null)
{
throw new NoStatsForCache(cacheName);
}
return new HashMap<>(cacheStats);
}
finally
{
readLock.unlock();
}
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
{
this.applicationContext = applicationContext;
}
/**
* Gets a {@link ReentrantReadWriteLock} for a specific cache, lazily
* creating the lock if necessary. Locks may be created per cache
* (rather than hashing to a smaller pool) since the number of
* caches is not too large.
*
* @param cacheName Cache name to obtain lock for.
* @return ReentrantReadWriteLock
*/
private ReentrantReadWriteLock getLock(String cacheName)
{
if (!locks.containsKey(cacheName))
{
ReentrantReadWriteLock newLock = new ReentrantReadWriteLock();
if (locks.putIfAbsent(cacheName, newLock) == null)
{
// Lock was successfully added to map.
return newLock;
};
}
return locks.get(cacheName);
}
private ReadLock getReadLock(String cacheName)
{
ReadLock readLock = getLock(cacheName).readLock();
return readLock;
}
private WriteLock getWriteLock(String cacheName)
{
WriteLock writeLock = getLock(cacheName).writeLock();
return writeLock;
}
}
| Alfresco/alfresco-repository | src/main/java/org/alfresco/repo/cache/InMemoryCacheStatistics.java | Java | lgpl-3.0 | 8,474 |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
*
* ***************************************************************************/
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Runtime.CompilerServices;
#if !FEATURE_CORE_DLR
namespace Microsoft.Scripting.Ast {
#else
namespace System.Linq.Expressions {
#endif
/// <summary>
/// Represents creating a new array and possibly initializing the elements of the new array.
/// </summary>
[DebuggerTypeProxy(typeof(Expression.NewArrayExpressionProxy))]
public class NewArrayExpression : Expression {
private readonly ReadOnlyCollection<Expression> _expressions;
private readonly Type _type;
internal NewArrayExpression(Type type, ReadOnlyCollection<Expression> expressions) {
_expressions = expressions;
_type = type;
}
internal static NewArrayExpression Make(ExpressionType nodeType, Type type, ReadOnlyCollection<Expression> expressions) {
if (nodeType == ExpressionType.NewArrayInit) {
return new NewArrayInitExpression(type, expressions);
} else {
return new NewArrayBoundsExpression(type, expressions);
}
}
/// <summary>
/// Gets the static type of the expression that this <see cref="Expression" /> represents. (Inherited from <see cref="Expression"/>.)
/// </summary>
/// <returns>The <see cref="Type"/> that represents the static type of the expression.</returns>
public sealed override Type Type {
get { return _type; }
}
/// <summary>
/// Gets the bounds of the array if the value of the <see cref="P:NodeType"/> property is NewArrayBounds, or the values to initialize the elements of the new array if the value of the <see cref="P:NodeType"/> property is NewArrayInit.
/// </summary>
public ReadOnlyCollection<Expression> Expressions {
get { return _expressions; }
}
/// <summary>
/// Dispatches to the specific visit method for this node type.
/// </summary>
protected internal override Expression Accept(ExpressionVisitor visitor) {
return visitor.VisitNewArray(this);
}
/// <summary>
/// Creates a new expression that is like this one, but using the
/// supplied children. If all of the children are the same, it will
/// return this expression.
/// </summary>
/// <param name="expressions">The <see cref="Expressions" /> property of the result.</param>
/// <returns>This expression if no children changed, or an expression with the updated children.</returns>
public NewArrayExpression Update(IEnumerable<Expression> expressions) {
if (expressions == Expressions) {
return this;
}
if (NodeType == ExpressionType.NewArrayInit) {
return Expression.NewArrayInit(Type.GetElementType(), expressions);
}
return Expression.NewArrayBounds(Type.GetElementType(), expressions);
}
}
internal sealed class NewArrayInitExpression : NewArrayExpression {
internal NewArrayInitExpression(Type type, ReadOnlyCollection<Expression> expressions)
: base(type, expressions) {
}
/// <summary>
/// Returns the node type of this <see cref="Expression" />. (Inherited from <see cref="Expression" />.)
/// </summary>
/// <returns>The <see cref="ExpressionType"/> that represents this expression.</returns>
public sealed override ExpressionType NodeType {
get { return ExpressionType.NewArrayInit; }
}
}
internal sealed class NewArrayBoundsExpression : NewArrayExpression {
internal NewArrayBoundsExpression(Type type, ReadOnlyCollection<Expression> expressions)
: base(type, expressions) {
}
/// <summary>
/// Returns the node type of this <see cref="Expression" />. (Inherited from <see cref="Expression" />.)
/// </summary>
/// <returns>The <see cref="ExpressionType"/> that represents this expression.</returns>
public sealed override ExpressionType NodeType {
get { return ExpressionType.NewArrayBounds; }
}
}
public partial class Expression {
#region NewArrayInit
/// <summary>
/// Creates a new array expression of the specified type from the provided initializers.
/// </summary>
/// <param name="type">A Type that represents the element type of the array.</param>
/// <param name="initializers">The expressions used to create the array elements.</param>
/// <returns>An instance of the <see cref="NewArrayExpression"/>.</returns>
public static NewArrayExpression NewArrayInit(Type type, params Expression[] initializers) {
return NewArrayInit(type, (IEnumerable<Expression>)initializers);
}
/// <summary>
/// Creates a new array expression of the specified type from the provided initializers.
/// </summary>
/// <param name="type">A Type that represents the element type of the array.</param>
/// <param name="initializers">The expressions used to create the array elements.</param>
/// <returns>An instance of the <see cref="NewArrayExpression"/>.</returns>
public static NewArrayExpression NewArrayInit(Type type, IEnumerable<Expression> initializers) {
ContractUtils.RequiresNotNull(type, "type");
ContractUtils.RequiresNotNull(initializers, "initializers");
if (type.Equals(typeof(void))) {
throw Error.ArgumentCannotBeOfTypeVoid();
}
ReadOnlyCollection<Expression> initializerList = initializers.ToReadOnly();
Expression[] newList = null;
for (int i = 0, n = initializerList.Count; i < n; i++) {
Expression expr = initializerList[i];
RequiresCanRead(expr, "initializers");
if (!TypeUtils.AreReferenceAssignable(type, expr.Type)) {
if (!TryQuote(type, ref expr)){
throw Error.ExpressionTypeCannotInitializeArrayType(expr.Type, type);
}
if (newList == null) {
newList = new Expression[initializerList.Count];
for (int j = 0; j < i; j++) {
newList[j] = initializerList[j];
}
}
}
if (newList != null) {
newList[i] = expr;
}
}
if (newList != null) {
initializerList = new TrueReadOnlyCollection<Expression>(newList);
}
return NewArrayExpression.Make(ExpressionType.NewArrayInit, type.MakeArrayType(), initializerList);
}
#endregion
#region NewArrayBounds
/// <summary>
/// Creates a <see cref="NewArrayExpression"/> that represents creating an array that has a specified rank.
/// </summary>
/// <param name="type">A <see cref="Type"/> that represents the element type of the array.</param>
/// <param name="bounds">An array that contains Expression objects to use to populate the Expressions collection.</param>
/// <returns>A <see cref="NewArrayExpression"/> that has the <see cref="P:NodeType"/> property equal to type and the <see cref="P:Expressions"/> property set to the specified value.</returns>
public static NewArrayExpression NewArrayBounds(Type type, params Expression[] bounds) {
return NewArrayBounds(type, (IEnumerable<Expression>)bounds);
}
/// <summary>
/// Creates a <see cref="NewArrayExpression"/> that represents creating an array that has a specified rank.
/// </summary>
/// <param name="type">A <see cref="Type"/> that represents the element type of the array.</param>
/// <param name="bounds">An IEnumerable{T} that contains Expression objects to use to populate the Expressions collection.</param>
/// <returns>A <see cref="NewArrayExpression"/> that has the <see cref="P:NodeType"/> property equal to type and the <see cref="P:Expressions"/> property set to the specified value.</returns>
public static NewArrayExpression NewArrayBounds(Type type, IEnumerable<Expression> bounds) {
ContractUtils.RequiresNotNull(type, "type");
ContractUtils.RequiresNotNull(bounds, "bounds");
if (type.Equals(typeof(void))) {
throw Error.ArgumentCannotBeOfTypeVoid();
}
ReadOnlyCollection<Expression> boundsList = bounds.ToReadOnly();
int dimensions = boundsList.Count;
if (dimensions <= 0) throw Error.BoundsCannotBeLessThanOne();
for (int i = 0; i < dimensions; i++) {
Expression expr = boundsList[i];
RequiresCanRead(expr, "bounds");
if (!TypeUtils.IsInteger(expr.Type)) {
throw Error.ArgumentMustBeInteger();
}
}
Type arrayType;
if (dimensions == 1) {
//To get a vector, need call Type.MakeArrayType().
//Type.MakeArrayType(1) gives a non-vector array, which will cause type check error.
arrayType = type.MakeArrayType();
} else {
arrayType = type.MakeArrayType(dimensions);
}
return NewArrayExpression.Make(ExpressionType.NewArrayBounds, arrayType, bounds.ToReadOnly());
}
#endregion
}
}
| edwinspire/VSharp | class/dlr/Runtime/Microsoft.Scripting.Core/Ast/NewArrayExpression.cs | C# | lgpl-3.0 | 10,491 |
/**
* Created : Mar 26, 2012
*
* @author pquiring
*/
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
public class ProgrammerPanel extends javax.swing.JPanel implements Display, ActionListener {
/**
* Creates new form MainPanel
*/
public ProgrammerPanel(Backend backend) {
initComponents();
divide.setText("\u00f7");
this.backend = backend;
Insets zero = new Insets(0, 0, 0, 0);
JButton b;
for(int a=0;a<getComponentCount();a++) {
Component c = getComponent(a);
if (c instanceof JButton) {
b = (JButton)c;
b.addActionListener(this);
b.setMargin(zero);
}
}
backend.setRadix(10);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
buttonGroup1 = new javax.swing.ButtonGroup();
output = new javax.swing.JTextField();
n7 = new javax.swing.JButton();
n8 = new javax.swing.JButton();
n9 = new javax.swing.JButton();
n4 = new javax.swing.JButton();
n5 = new javax.swing.JButton();
n6 = new javax.swing.JButton();
n1 = new javax.swing.JButton();
n2 = new javax.swing.JButton();
n3 = new javax.swing.JButton();
n0 = new javax.swing.JButton();
divide = new javax.swing.JButton();
multiple = new javax.swing.JButton();
minus = new javax.swing.JButton();
plus = new javax.swing.JButton();
allClear = new javax.swing.JButton();
open = new javax.swing.JButton();
close = new javax.swing.JButton();
eq = new javax.swing.JButton();
open1 = new javax.swing.JButton();
open2 = new javax.swing.JButton();
open3 = new javax.swing.JButton();
jPanel1 = new javax.swing.JPanel();
hex = new javax.swing.JRadioButton();
decimal = new javax.swing.JRadioButton();
oct = new javax.swing.JRadioButton();
bin = new javax.swing.JRadioButton();
n10 = new javax.swing.JButton();
n11 = new javax.swing.JButton();
n12 = new javax.swing.JButton();
n13 = new javax.swing.JButton();
n14 = new javax.swing.JButton();
n15 = new javax.swing.JButton();
open4 = new javax.swing.JButton();
open5 = new javax.swing.JButton();
dec1 = new javax.swing.JButton();
allClear1 = new javax.swing.JButton();
output.setEditable(false);
output.setHorizontalAlignment(javax.swing.JTextField.RIGHT);
n7.setText("7");
n8.setText("8");
n9.setText("9");
n4.setText("4");
n5.setText("5");
n6.setText("6");
n1.setText("1");
n2.setText("2");
n3.setText("3");
n0.setText("0");
divide.setText("/");
multiple.setText("x");
minus.setText("-");
plus.setText("+");
allClear.setText("AC");
allClear.setToolTipText("All Clear");
open.setText("(");
close.setText(")");
eq.setText("=");
open1.setText("XOR");
open2.setText("AND");
open3.setText("NOT");
buttonGroup1.add(hex);
hex.setText("Hex");
hex.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
hexActionPerformed(evt);
}
});
buttonGroup1.add(decimal);
decimal.setSelected(true);
decimal.setText("Dec");
decimal.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
decimalActionPerformed(evt);
}
});
buttonGroup1.add(oct);
oct.setText("Oct");
oct.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
octActionPerformed(evt);
}
});
buttonGroup1.add(bin);
bin.setText("Bin");
bin.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
binActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(hex)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(decimal)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(oct)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(bin)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(hex)
.addComponent(decimal)
.addComponent(oct)
.addComponent(bin))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
n10.setText("A");
n11.setText("B");
n12.setText("C");
n13.setText("D");
n14.setText("E");
n15.setText("F");
open4.setText("MOD");
open5.setText("OR");
dec1.setText("+/-");
allClear1.setText("<");
allClear1.setToolTipText("All Clear");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(output, javax.swing.GroupLayout.DEFAULT_SIZE, 386, Short.MAX_VALUE)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(n10, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(n0, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(n1, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(n4, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(n7, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 50, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addGroup(layout.createSequentialGroup()
.addComponent(n8, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(n9, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(divide, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(allClear, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addComponent(dec1, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(62, 62, 62)
.addComponent(plus, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(eq, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addComponent(n2, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(n3, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(minus, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(56, 56, 56))
.addGroup(layout.createSequentialGroup()
.addComponent(n5, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(n6, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(multiple, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(open, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(close, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(allClear1, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(open4, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(open5, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(open1, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(open2, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(open3, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(layout.createSequentialGroup()
.addComponent(n11, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(n12, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(n13, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(n14, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(n15, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGap(0, 0, Short.MAX_VALUE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(output, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(n7)
.addComponent(n8)
.addComponent(n9)
.addComponent(divide)
.addComponent(allClear)
.addComponent(open2)
.addComponent(allClear1))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(n4)
.addComponent(n5)
.addComponent(n6)
.addComponent(multiple)
.addComponent(close)
.addComponent(open5)
.addComponent(open))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(n1)
.addComponent(n2)
.addComponent(n3)
.addComponent(minus)
.addComponent(open1)
.addComponent(open4))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(n0)
.addComponent(plus)
.addComponent(dec1)
.addComponent(open3)
.addComponent(eq))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(n10)
.addComponent(n11)
.addComponent(n12)
.addComponent(n13)
.addComponent(n14)
.addComponent(n15))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
private void hexActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_hexActionPerformed
backend.setRadix(16);
}//GEN-LAST:event_hexActionPerformed
private void decimalActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_decimalActionPerformed
backend.setRadix(10);
}//GEN-LAST:event_decimalActionPerformed
private void octActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_octActionPerformed
backend.setRadix(8);
}//GEN-LAST:event_octActionPerformed
private void binActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_binActionPerformed
backend.setRadix(2);
}//GEN-LAST:event_binActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton allClear;
private javax.swing.JButton allClear1;
private javax.swing.JRadioButton bin;
private javax.swing.ButtonGroup buttonGroup1;
private javax.swing.JButton close;
private javax.swing.JButton dec1;
private javax.swing.JRadioButton decimal;
private javax.swing.JButton divide;
private javax.swing.JButton eq;
private javax.swing.JRadioButton hex;
private javax.swing.JPanel jPanel1;
private javax.swing.JButton minus;
private javax.swing.JButton multiple;
private javax.swing.JButton n0;
private javax.swing.JButton n1;
private javax.swing.JButton n10;
private javax.swing.JButton n11;
private javax.swing.JButton n12;
private javax.swing.JButton n13;
private javax.swing.JButton n14;
private javax.swing.JButton n15;
private javax.swing.JButton n2;
private javax.swing.JButton n3;
private javax.swing.JButton n4;
private javax.swing.JButton n5;
private javax.swing.JButton n6;
private javax.swing.JButton n7;
private javax.swing.JButton n8;
private javax.swing.JButton n9;
private javax.swing.JRadioButton oct;
private javax.swing.JButton open;
private javax.swing.JButton open1;
private javax.swing.JButton open2;
private javax.swing.JButton open3;
private javax.swing.JButton open4;
private javax.swing.JButton open5;
private javax.swing.JTextField output;
private javax.swing.JButton plus;
// End of variables declaration//GEN-END:variables
private Backend backend;
public void setDisplay(String str) {
int idx = str.indexOf(',');
if (idx != -1) {
output.setText(str.substring(0,idx)); //remove radix
} else {
output.setText(str);
}
}
public void actionPerformed(ActionEvent ae) {
JButton b = (JButton)ae.getSource();
String txt = b.getText();
if (txt.length() == 1) {
char first = txt.charAt(0);
if (((first >= '0') && (first <= '9')) || (first == '.') || ((first >= 'A') && (first <= 'F'))) {
backend.addDigit(first);
return;
}
}
backend.addOperation(txt);
}
public void cut() {
output.cut();
}
public void copy() {
output.copy();
}
public void setRadix(int rx) {
switch (rx) {
case 16: hex.setSelected(true); break;
case 10: decimal.setSelected(true); break;
case 8: oct.setSelected(true); break;
case 2: bin.setSelected(true); break;
}
backend.setRadix(rx);
}
}
| ericomattos/javaforce | projects/jfcalc/src/ProgrammerPanel.java | Java | lgpl-3.0 | 20,790 |
/*
Copyright 2011, AUTHORS.txt (http://ui.operamasks.org/about)
Dual licensed under the MIT or LGPL Version 2 licenses.
*/
/**
* @file Spell checker
*/
// Register a plugin named "wsc".
OMEDITOR.plugins.add( 'wsc',
{
requires : [ 'dialog' ],
init : function( editor )
{
var commandName = 'checkspell';
var command = editor.addCommand( commandName, new OMEDITOR.dialogCommand( commandName ) );
// SpellChecker doesn't work in Opera and with custom domain
command.modes = { wysiwyg : ( !OMEDITOR.env.opera && !OMEDITOR.env.air && document.domain == window.location.hostname ) };
editor.ui.addButton( 'SpellChecker',
{
label : editor.lang.spellCheck.toolbar,
command : commandName
});
OMEDITOR.dialog.add( commandName, this.path + 'dialogs/wsc.js' );
}
});
OMEDITOR.config.wsc_customerId = OMEDITOR.config.wsc_customerId || '1:ua3xw1-2XyGJ3-GWruD3-6OFNT1-oXcuB1-nR6Bp4-hgQHc-EcYng3-sdRXG3-NOfFk' ;
OMEDITOR.config.wsc_customLoaderScript = OMEDITOR.config.wsc_customLoaderScript || null;
| yonghuang/fastui | samplecenter/basic/timeTest/operamasks-ui-2.0/development-bundle/ui/editor/_source/plugins/wsc/plugin.js | JavaScript | lgpl-3.0 | 1,027 |
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
export default [
[
[
'நள்.', 'நண்.', 'அதி.', 'கா.', 'மதி.', 'பிற்.', 'மா.',
'அந்தி மா.', 'இர.'
],
[
'நள்ளிரவு', 'நண்பகல்', 'அதிகாலை', 'காலை',
'மதியம்', 'பிற்பகல்', 'மாலை',
'அந்தி மாலை', 'இரவு'
],
],
[
[
'நள்.', 'நண்.', 'அதி.', 'கா.', 'மதி.', 'பிற்.', 'மா.',
'அந்தி மா.', 'இ.'
],
[
'நள்ளிரவு', 'நண்பகல்', 'அதிகாலை', 'காலை',
'மதியம்', 'பிற்பகல்', 'மாலை',
'அந்தி மாலை', 'இரவு'
],
],
[
'00:00', '12:00', ['03:00', '05:00'], ['05:00', '12:00'], ['12:00', '14:00'],
['14:00', '16:00'], ['16:00', '18:00'], ['18:00', '21:00'], ['21:00', '03:00']
]
];
//# sourceMappingURL=ta-MY.js.map | rospilot/rospilot | share/web_assets/nodejs_deps/node_modules/@angular/common/locales/extra/ta-MY.js | JavaScript | apache-2.0 | 1,346 |
/*
* Copyright (c) 2014 Wael Chatila / Icegreen Technologies. All Rights Reserved.
* This software is released under the Apache license 2.0
* This file has been modified by the copyright holder.
* Original file can be found at http://james.apache.org
*/
package com.icegreen.greenmail.imap;
/**
* @author Darrell DeBoer <darrell@apache.org>
* @version $Revision: 109034 $
*/
public class ProtocolException extends Exception {
public ProtocolException(String s) {
super(s);
}
public ProtocolException(String s, Throwable cause) {
super(s,cause);
}
}
| buildscientist/greenmail | greenmail-core/src/main/java/com/icegreen/greenmail/imap/ProtocolException.java | Java | apache-2.0 | 610 |
/*
* Copyright 2015, Yahoo Inc.
* Copyrights licensed under the Apache License.
* See the accompanying LICENSE file for terms.
*/
package com.yahoo.dba.perf.myperf.common;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
public class MyDatabases implements java.io.Serializable{
private static final long serialVersionUID = -8586381924495834726L;
private Set<String> myDbSet = new TreeSet<String>();
synchronized public Set<String> getMyDbList()
{
return java.util.Collections.unmodifiableSet(this.myDbSet);
}
synchronized public void addDb(String name)
{
if(!this.myDbSet.contains(name))
this.myDbSet.add(name);
}
synchronized public void addDbs(List<String> names)
{
for (String name:names)
{
if(!this.myDbSet.contains(name))
this.myDbSet.add(name);
}
}
synchronized public void removeDb(String name)
{
if(this.myDbSet.contains(name))
this.myDbSet.remove(name);
}
synchronized public void replaceDb(String oldName, String newName)
{
if(!this.myDbSet.contains(oldName))
{
this.myDbSet.remove(oldName);
this.myDbSet.remove(newName);
}
}
synchronized public int size()
{
return this.myDbSet.size();
}
}
| wgpshashank/mysql_perf_analyzer | myperf/src/main/java/com/yahoo/dba/perf/myperf/common/MyDatabases.java | Java | apache-2.0 | 1,264 |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.guvnor.tools.actions;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.text.MessageFormat;
import java.util.Properties;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IObjectActionDelegate;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.webdav.IResponse;
import org.guvnor.tools.Activator;
import org.guvnor.tools.GuvnorRepository;
import org.guvnor.tools.Messages;
import org.guvnor.tools.utils.GuvnorMetadataProps;
import org.guvnor.tools.utils.GuvnorMetadataUtils;
import org.guvnor.tools.utils.PlatformUtils;
import org.guvnor.tools.utils.webdav.IWebDavClient;
import org.guvnor.tools.utils.webdav.WebDavClientFactory;
import org.guvnor.tools.utils.webdav.WebDavException;
import org.guvnor.tools.utils.webdav.WebDavServerCache;
import org.guvnor.tools.views.RepositoryView;
import org.guvnor.tools.views.ResourceHistoryView;
import org.guvnor.tools.views.model.TreeObject;
import org.guvnor.tools.views.model.TreeParent;
import org.guvnor.tools.wizards.EditRepLocationWizard;
/**
* Shows the revision history for a given resource.
*/
public class EditConnectionAction implements IObjectActionDelegate {
private GuvnorRepository rep;
public EditConnectionAction() {
super();
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.IObjectActionDelegate#setActivePart(org.eclipse.jface.action.IAction, org.eclipse.ui.IWorkbenchPart)
*/
public void setActivePart(IAction action, IWorkbenchPart targetPart) {
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.IActionDelegate#run(org.eclipse.jface.action.IAction)
*/
public void run(IAction action) {
EditRepLocationWizard editWizard = new EditRepLocationWizard(rep);
editWizard.init(Activator.getDefault().getWorkbench(), null);
WizardDialog dialog =
new WizardDialog(Display.getCurrent().getActiveShell(), editWizard);
dialog.create();
dialog.open();
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.IActionDelegate#selectionChanged(org.eclipse.jface.action.IAction, org.eclipse.jface.viewers.ISelection)
*/
public void selectionChanged(IAction action, ISelection selection) {
// Reset state to default
action.setEnabled(false);
if (!(selection instanceof IStructuredSelection)) {
return;
}
IStructuredSelection sel = (IStructuredSelection)selection;
if (sel.size() != 1) {
return;
}
if (sel.getFirstElement() instanceof TreeObject) {
if (((TreeObject)sel.getFirstElement()).getNodeType() == TreeObject.Type.REPOSITORY) {
rep = ((TreeObject)sel.getFirstElement()).getGuvnorRepository();
action.setEnabled(true);
}
}
}
}
| droolsjbpm/droolsjbpm-tools | drools-eclipse/org.guvnor.tools/src/org/guvnor/tools/actions/EditConnectionAction.java | Java | apache-2.0 | 3,786 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import IndexToString, StringIndexer
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("IndexToStringExample")\
.getOrCreate()
# $example on$
df = spark.createDataFrame(
[(0, "a"), (1, "b"), (2, "c"), (3, "a"), (4, "a"), (5, "c")],
["id", "category"])
stringIndexer = StringIndexer(inputCol="category", outputCol="categoryIndex")
model = stringIndexer.fit(df)
indexed = model.transform(df)
converter = IndexToString(inputCol="categoryIndex", outputCol="originalCategory")
converted = converter.transform(indexed)
converted.select("id", "originalCategory").show()
# $example off$
spark.stop()
| mrchristine/spark-examples-dbc | src/main/python/ml/index_to_string_example.py | Python | apache-2.0 | 1,615 |
/*******************************************************************************
* Copyright (c) 2015-2016, WSO2.Telco Inc. (http://www.wso2telco.com) All Rights Reserved.
*
* WSO2.Telco Inc. licences this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.wso2telco.dep.oneapivalidation.service.impl.smsmessaging.northbound;
import org.json.JSONObject;
import com.wso2telco.dep.oneapivalidation.exceptions.CustomException;
import com.wso2telco.dep.oneapivalidation.service.IServiceValidate;
import com.wso2telco.dep.oneapivalidation.util.UrlValidator;
import com.wso2telco.dep.oneapivalidation.util.Validation;
import com.wso2telco.dep.oneapivalidation.util.ValidationRule;
/**
*
* @author WSO2telco
*/
public class ValidateNBDeliveryInfoNotification implements IServiceValidate {
private final String[] validationRules = { "DeliveryInfoNotification" };
public void validate(String json) throws CustomException {
String callbackData = null;
String address = null;
String operatorCode = null;
String filterCriteria = null;
String deliveryStatus = null;
try {
JSONObject objJSONObject = new JSONObject(json);
JSONObject objDeliveryInfoNotification = (JSONObject) objJSONObject
.get("deliveryInfoNotification");
if (!objDeliveryInfoNotification.isNull("callbackData")) {
callbackData = nullOrTrimmed(objDeliveryInfoNotification
.getString("callbackData"));
}
JSONObject objDeliveryInfo = (JSONObject) objDeliveryInfoNotification
.get("deliveryInfo");
if (objDeliveryInfo.get("address") != null) {
address = nullOrTrimmed(objDeliveryInfo.getString("address"));
}
if (objDeliveryInfo.get("operatorCode") != null) {
operatorCode = nullOrTrimmed(objDeliveryInfo
.getString("operatorCode"));
}
if (!objDeliveryInfo.isNull("filterCriteria")) {
filterCriteria = nullOrTrimmed(objDeliveryInfo
.getString("filterCriteria"));
}
if (objDeliveryInfo.get("deliveryStatus") != null) {
deliveryStatus = nullOrTrimmed(objDeliveryInfo
.getString("deliveryStatus"));
}
} catch (Exception e) {
throw new CustomException("POL0299", "Unexpected Error",
new String[] { "" });
}
ValidationRule[] rules = null;
rules = new ValidationRule[] {
new ValidationRule(ValidationRule.VALIDATION_TYPE_OPTIONAL,
"callbackData", callbackData),
new ValidationRule(
ValidationRule.VALIDATION_TYPE_MANDATORY_TEL,
"address", address),
new ValidationRule(ValidationRule.VALIDATION_TYPE_MANDATORY,
"operatorCode", operatorCode),
new ValidationRule(ValidationRule.VALIDATION_TYPE_OPTIONAL,
"filterCriteria", filterCriteria),
new ValidationRule(ValidationRule.VALIDATION_TYPE_MANDATORY,
"deliveryStatus", deliveryStatus) };
Validation.checkRequestParams(rules);
}
public void validateUrl(String pathInfo) throws CustomException {
String[] requestParts = null;
if (pathInfo != null) {
if (pathInfo.startsWith("/")) {
pathInfo = pathInfo.substring(1);
}
requestParts = pathInfo.split("/");
}
UrlValidator.validateRequest(requestParts, validationRules);
}
private static String nullOrTrimmed(String s) {
String rv = null;
if (s != null && s.trim().length() > 0) {
rv = s.trim();
}
return rv;
}
public void validate(String[] params) throws CustomException {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| WSO2Telco/component-dep | components/oneapi-validation/src/main/java/com/wso2telco/dep/oneapivalidation/service/impl/smsmessaging/northbound/ValidateNBDeliveryInfoNotification.java | Java | apache-2.0 | 4,034 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ml.dataframe;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.QueryProvider;
import org.elasticsearch.xpack.core.ml.utils.RuntimeMappingsValidator;
import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class DataFrameAnalyticsSource implements Writeable, ToXContentObject {
public static final ParseField INDEX = new ParseField("index");
public static final ParseField QUERY = new ParseField("query");
public static final ParseField _SOURCE = new ParseField("_source");
@SuppressWarnings({ "unchecked"})
public static ConstructingObjectParser<DataFrameAnalyticsSource, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<DataFrameAnalyticsSource, Void> parser = new ConstructingObjectParser<>("data_frame_analytics_source",
ignoreUnknownFields, a -> new DataFrameAnalyticsSource(
((List<String>) a[0]).toArray(new String[0]),
(QueryProvider) a[1],
(FetchSourceContext) a[2],
(Map<String, Object>) a[3]));
parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDEX);
parser.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> QueryProvider.fromXContent(p, ignoreUnknownFields, Messages.DATA_FRAME_ANALYTICS_BAD_QUERY_FORMAT), QUERY);
parser.declareField(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> FetchSourceContext.fromXContent(p),
_SOURCE,
ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING);
parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(),
SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD);
return parser;
}
private final String[] index;
private final QueryProvider queryProvider;
private final FetchSourceContext sourceFiltering;
private final Map<String, Object> runtimeMappings;
public DataFrameAnalyticsSource(String[] index, @Nullable QueryProvider queryProvider, @Nullable FetchSourceContext sourceFiltering,
@Nullable Map<String, Object> runtimeMappings) {
this.index = ExceptionsHelper.requireNonNull(index, INDEX);
if (index.length == 0) {
throw new IllegalArgumentException("source.index must specify at least one index");
}
if (Arrays.stream(index).anyMatch(Strings::isNullOrEmpty)) {
throw new IllegalArgumentException("source.index must contain non-null and non-empty strings");
}
this.queryProvider = queryProvider == null ? QueryProvider.defaultQuery() : queryProvider;
if (sourceFiltering != null && sourceFiltering.fetchSource() == false) {
throw new IllegalArgumentException("source._source cannot be disabled");
}
this.sourceFiltering = sourceFiltering;
this.runtimeMappings = runtimeMappings == null ? Collections.emptyMap() : Collections.unmodifiableMap(runtimeMappings);
RuntimeMappingsValidator.validate(this.runtimeMappings);
}
public DataFrameAnalyticsSource(StreamInput in) throws IOException {
index = in.readStringArray();
queryProvider = QueryProvider.fromStream(in);
sourceFiltering = in.readOptionalWriteable(FetchSourceContext::new);
runtimeMappings = in.readMap();
}
public DataFrameAnalyticsSource(DataFrameAnalyticsSource other) {
this.index = Arrays.copyOf(other.index, other.index.length);
this.queryProvider = new QueryProvider(other.queryProvider);
this.sourceFiltering = other.sourceFiltering == null ? null : new FetchSourceContext(
other.sourceFiltering.fetchSource(), other.sourceFiltering.includes(), other.sourceFiltering.excludes());
this.runtimeMappings = Collections.unmodifiableMap(new HashMap<>(other.runtimeMappings));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(index);
queryProvider.writeTo(out);
out.writeOptionalWriteable(sourceFiltering);
out.writeMap(runtimeMappings);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.array(INDEX.getPreferredName(), index);
builder.field(QUERY.getPreferredName(), queryProvider.getQuery());
if (sourceFiltering != null) {
builder.field(_SOURCE.getPreferredName(), sourceFiltering);
}
if (runtimeMappings.isEmpty() == false) {
builder.field(SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName(), runtimeMappings);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
DataFrameAnalyticsSource other = (DataFrameAnalyticsSource) o;
return Arrays.equals(index, other.index)
&& Objects.equals(queryProvider, other.queryProvider)
&& Objects.equals(sourceFiltering, other.sourceFiltering)
&& Objects.equals(runtimeMappings, other.runtimeMappings);
}
@Override
public int hashCode() {
return Objects.hash(Arrays.asList(index), queryProvider, sourceFiltering, runtimeMappings);
}
public String[] getIndex() {
return index;
}
/**
* Get the fully parsed query from the semi-parsed stored {@code Map<String, Object>}
*
* @return Fully parsed query
*/
public QueryBuilder getParsedQuery() {
Exception exception = queryProvider.getParsingException();
if (exception != null) {
if (exception instanceof RuntimeException) {
throw (RuntimeException) exception;
} else {
throw new ElasticsearchException(queryProvider.getParsingException());
}
}
return queryProvider.getParsedQuery();
}
public FetchSourceContext getSourceFiltering() {
return sourceFiltering;
}
Exception getQueryParsingException() {
return queryProvider.getParsingException();
}
// visible for testing
QueryProvider getQueryProvider() {
return queryProvider;
}
/**
* Calls the parser and returns any gathered deprecations
*
* @param namedXContentRegistry XContent registry to transform the lazily parsed query
* @return The deprecations from parsing the query
*/
public List<String> getQueryDeprecations(NamedXContentRegistry namedXContentRegistry) {
List<String> deprecations = new ArrayList<>();
try {
XContentObjectTransformer.queryBuilderTransformer(namedXContentRegistry).fromMap(queryProvider.getQuery(),
deprecations);
} catch (Exception exception) {
// Certain thrown exceptions wrap up the real Illegal argument making it hard to determine cause for the user
if (exception.getCause() instanceof IllegalArgumentException) {
exception = (Exception) exception.getCause();
}
throw ExceptionsHelper.badRequestException(Messages.DATA_FRAME_ANALYTICS_BAD_QUERY_FORMAT, exception);
}
return deprecations;
}
// Visible for testing
Map<String, Object> getQuery() {
return queryProvider.getQuery();
}
public Map<String, Object> getRuntimeMappings() {
return runtimeMappings;
}
public boolean isFieldExcluded(String path) {
if (sourceFiltering == null) {
return false;
}
// First we check in the excludes as they are applied last
for (String exclude : sourceFiltering.excludes()) {
if (pathMatchesSourcePattern(path, exclude)) {
return true;
}
}
// Now we can check the includes
// Empty includes means no further exclusions
if (sourceFiltering.includes().length == 0) {
return false;
}
for (String include : sourceFiltering.includes()) {
if (pathMatchesSourcePattern(path, include)) {
return false;
}
}
return true;
}
private static boolean pathMatchesSourcePattern(String path, String sourcePattern) {
if (sourcePattern.equals(path)) {
return true;
}
if (Regex.isSimpleMatchPattern(sourcePattern)) {
return Regex.simpleMatch(sourcePattern, path);
}
// At this stage sourcePattern is a concrete field name and path is not equal to it.
// We should check if path is a nested field of pattern.
// Let us take "foo" as an example.
// Fields that are "foo.*" should also be matched.
return Regex.simpleMatch(sourcePattern + ".*", path);
}
}
| robin13/elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java | Java | apache-2.0 | 10,456 |
/*
* Copyright © 2012, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All rights reserved.
*
* The NASA Tensegrity Robotics Toolkit (NTRT) v1 platform is licensed
* under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
/**
* @file tgSimView.cpp
* @brief Contains the definitions of members of class tgSimView
* @author Brian Mirletz, Ryan Adams
* $Id$
*/
// This module
#include "tgSimulation.h"
// This application
#include "tgModelVisitor.h"
#include "tgSimView.h"
// The C++ Standard Library
#include <cassert>
#include <iostream>
#include <stdexcept>
tgSimView::tgSimView(tgWorld& world,
double stepSize,
double renderRate) :
m_world(world),
m_pSimulation(NULL),
m_pModelVisitor(NULL),
m_stepSize(stepSize),
m_renderRate(renderRate),
m_renderTime(0.0),
m_initialized(false)
{
if (m_stepSize < 0.0)
{
throw std::invalid_argument("stepSize is not positive");
}
else if (renderRate < m_stepSize)
{
throw std::invalid_argument("renderRate is less than stepSize");
}
// Postcondition
assert(invariant());
assert(m_pSimulation == NULL);
assert(m_pModelVisitor == NULL);
assert(m_stepSize == stepSize);
assert(m_renderRate == renderRate);
assert(m_renderTime == 0.0);
assert(!m_initialized);
}
tgSimView::~tgSimView()
{
if (m_pSimulation != NULL)
{
// The tgSimView has been passed to a tgSimulation
teardown();
}
delete m_pModelVisitor;
}
void tgSimView::bindToSimulation(tgSimulation& simulation)
{
if (m_pSimulation != NULL)
{
throw
std::invalid_argument("The view already belongs to a simulation.");
}
else
{
m_pSimulation = &simulation;
tgWorld& world = simulation.getWorld();
bindToWorld(world);
}
// Postcondition
assert(invariant());
assert(m_pSimulation == &simulation);
}
void tgSimView::releaseFromSimulation()
{
// The destructor that calls this must not fail, so don't assert or throw
// on a precondition
m_pSimulation = NULL;
// The destructor that calls this must not fail, so don't assert a
// postcondition
}
void tgSimView::bindToWorld(tgWorld& world)
{
}
void tgSimView::setup()
{
assert(m_pSimulation != NULL);
// Just note that this function was called.
// tgSimViewGraphics needs to know for now.
m_initialized = true;
// Postcondition
assert(invariant());
assert(m_initialized);
}
void tgSimView::teardown()
{
// Just note that this function was called.
// tgSimViewGraphics needs to know for now.
m_initialized = false;
// Postcondition
assert(invariant());
assert(!m_initialized);
}
void tgSimView::run()
{
// This would normally run forever, but this is just for testing
run(10);
}
void tgSimView::run(int steps)
{
if (m_pSimulation != NULL)
{
// The tgSimView has been passed to a tgSimulation
std::cout << "SimView::run("<<steps<<")" << std::endl;
// This would normally run forever, but this is just for testing
m_renderTime = 0;
double totalTime = 0.0;
for (int i = 0; i < steps; i++) {
m_pSimulation->step(m_stepSize);
m_renderTime += m_stepSize;
totalTime += m_stepSize;
if (m_renderTime >= m_renderRate) {
render();
//std::cout << totalTime << std::endl;
m_renderTime = 0;
}
}
}
}
void tgSimView::render() const
{
if ((m_pSimulation != NULL) && (m_pModelVisitor != NULL))
{
// The tgSimView has been passed to a tgSimulation
m_pSimulation->onVisit(*m_pModelVisitor);
}
}
void tgSimView::render(const tgModelVisitor& r) const
{
if (m_pSimulation != NULL)
{
// The tgSimView has been passed to a tgSimulation
m_pSimulation->onVisit(r);
}
}
void tgSimView::reset()
{
if (m_pSimulation != NULL)
{
// The tgSimView has been passed to a tgSimulation
m_pSimulation->reset();
}
}
void tgSimView::setRenderRate(double renderRate)
{
m_renderRate = (renderRate > m_stepSize) ? renderRate : m_stepSize;
// Postcondition
assert(invariant());
}
void tgSimView::setStepSize(double stepSize)
{
if (stepSize <= 0.0)
{
throw std::invalid_argument("stepSize is not positive");
}
else
{
m_stepSize = stepSize;
// Assure that the render rate is no less than the new step size
setRenderRate(m_renderRate);
}
// Postcondition
assert(invariant());
assert((stepSize <= 0.0) || (m_stepSize == stepSize));
}
bool tgSimView::invariant() const
{
return
(m_stepSize >= 0.0) &&
(m_renderRate >= m_stepSize) &&
(m_renderTime >= 0.0);
}
| MRNAS/NTRT | src/core/tgSimView.cpp | C++ | apache-2.0 | 5,374 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
#include <iostream>
#include <fstream>
#include <sstream>
#include <algorithm>
#include <cctype>
#include "BodyOnlyXmlParser.hh"
#include "MdsException.hh"
using namespace mdsd::details;
void
BodyOnlyXmlParser::ParseFile(std::string xmlFilePath)
{
m_xmlFilePath = std::move(xmlFilePath);
std::ifstream infile{m_xmlFilePath};
if (!infile) {
std::ostringstream strm;
strm << "Failed to open file '" << m_xmlFilePath << "'.";
throw MDSEXCEPTION(strm.str());
}
std::string line;
while(std::getline(infile, line)) {
ParseChunk(line);
}
if (!infile.eof()) {
std::ostringstream strm;
strm << "Failed to parse file '" << m_xmlFilePath << "': ";
if (infile.bad()) {
strm << "Corrupted stream.";
}
else if (infile.fail()) {
strm << "IO operation failed.";
}
else {
strm << "std::getline() returned 0 for unknown reason.";
}
throw MDSEXCEPTION(strm.str());
}
}
void
BodyOnlyXmlParser::OnCharacters(const std::string& chars)
{
bool isEmptyOrWhiteSpace = std::all_of(chars.cbegin(), chars.cend(), ::isspace);
if (!isEmptyOrWhiteSpace) {
m_body.append(chars);
}
}
| Azure/azure-linux-extensions | Diagnostic/mdsd/mdscommands/BodyOnlyXmlParser.cc | C++ | apache-2.0 | 1,350 |
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.msf4j.stockquote.example.exception;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
/**
* EntityNotFoundMapper.
*/
public class EntityNotFoundMapper implements ExceptionMapper<EntityNotFoundException> {
@Override
public Response toResponse(EntityNotFoundException ex) {
return Response.status(404).
entity(ex.getMessage() + " [from EntityNotFoundMapper]").
type("text/plain").
build();
}
}
| callkalpa/product-mss | samples/stockquote/deployable-jar/src/main/java/org/wso2/msf4j/stockquote/example/exception/EntityNotFoundMapper.java | Java | apache-2.0 | 1,196 |
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.swiftpm.internal;
import com.google.common.collect.ImmutableSet;
import javax.annotation.Nullable;
import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class DefaultTarget implements Serializable {
private final String name;
private final File path;
private final Collection<File> sourceFiles;
private final List<String> requiredTargets = new ArrayList<String>();
private final List<String> requiredProducts = new ArrayList<String>();
private File publicHeaderDir;
public DefaultTarget(String name, File path, Iterable<File> sourceFiles) {
this.name = name;
this.path = path;
this.sourceFiles = ImmutableSet.copyOf(sourceFiles);
}
public String getName() {
return name;
}
public File getPath() {
return path;
}
public Collection<File> getSourceFiles() {
return sourceFiles;
}
@Nullable
public File getPublicHeaderDir() {
return publicHeaderDir;
}
public void setPublicHeaderDir(File publicHeaderDir) {
this.publicHeaderDir = publicHeaderDir;
}
public Collection<String> getRequiredTargets() {
return requiredTargets;
}
public Collection<String> getRequiredProducts() {
return requiredProducts;
}
}
| gradle/gradle | subprojects/language-native/src/main/java/org/gradle/swiftpm/internal/DefaultTarget.java | Java | apache-2.0 | 2,004 |
/*
* Copyright 2016 The Closure Compiler Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Externs for Intersection Observer objects.
* @see https://wicg.github.io/IntersectionObserver/
* @externs
*/
// TODO(user): Once the Intersection Observer spec is adopted by W3C, add
// a w3c_ prefix to this file's name.
/**
* These contain the information provided from a change event.
* @see https://wicg.github.io/IntersectionObserver/#intersection-observer-entry
* @record
*/
function IntersectionObserverEntry() {}
/**
* The time the change was observed.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserverentry-time
* @type {number}
* @const
*/
IntersectionObserverEntry.prototype.time;
/**
* The root intersection rectangle, if target belongs to the same unit of
* related similar-origin browsing contexts as the intersection root, null
* otherwise.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserverentry-rootbounds
* @type {{top: number, right: number, bottom: number, left: number,
* height: number, width: number}}
* @const
*/
IntersectionObserverEntry.prototype.rootBounds;
/**
* The rectangle describing the element being observed.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserverentry-boundingclientrect
* @type {!{top: number, right: number, bottom: number, left: number,
* height: number, width: number}}
* @const
*/
IntersectionObserverEntry.prototype.boundingClientRect;
/**
* The rectangle describing the intersection between the observed element and
* the viewport.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserverentry-intersectionrect
* @type {!{top: number, right: number, bottom: number, left: number,
* height: number, width: number}}
* @const
*/
IntersectionObserverEntry.prototype.intersectionRect;
/**
* Ratio of intersectionRect area to boundingClientRect area.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserverentry-intersectionratio
* @type {!number}
* @const
*/
IntersectionObserverEntry.prototype.intersectionRatio;
/**
* The Element whose intersection with the intersection root changed.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserverentry-target
* @type {!Element}
* @const
*/
IntersectionObserverEntry.prototype.target;
/**
* Whether or not the target is intersecting with the root.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserverentry-isintersecting
* @type {boolean}
* @const
*/
IntersectionObserverEntry.prototype.isIntersecting;
/**
* Callback for the IntersectionObserver.
* @see https://wicg.github.io/IntersectionObserver/#intersection-observer-callback
* @typedef {function(!Array<!IntersectionObserverEntry>,!IntersectionObserver)}
*/
var IntersectionObserverCallback;
/**
* Options for the IntersectionObserver.
* @see https://wicg.github.io/IntersectionObserver/#intersection-observer-init
* @typedef {{
* threshold: (!Array<number>|number|undefined),
* root: (!Element|undefined),
* rootMargin: (string|undefined)
* }}
*/
var IntersectionObserverInit;
/**
* This is the constructor for Intersection Observer objects.
* @see https://wicg.github.io/IntersectionObserver/#intersection-observer-interface
* @param {!IntersectionObserverCallback} handler The callback for the observer.
* @param {!IntersectionObserverInit=} opt_options The object defining the
* thresholds, etc.
* @constructor
*/
function IntersectionObserver(handler, opt_options) {};
/**
* The root Element to use for intersection, or null if the observer uses the
* implicit root.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserver-root
* @type {?Element}
* @const
*/
IntersectionObserver.prototype.root;
/**
* Offsets applied to the intersection root’s bounding box, effectively growing
* or shrinking the box that is used to calculate intersections.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserver-rootmargin
* @type {!string}
* @const
*/
IntersectionObserver.prototype.rootMargin;
/**
* A list of thresholds, sorted in increasing numeric order, where each
* threshold is a ratio of intersection area to bounding box area of an observed
* target.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserver-thresholds
* @type {!Array.<!number>}
* @const
*/
IntersectionObserver.prototype.thresholds;
/**
* This is used to set which element to observe.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserver-observe
* @param {!Element} element The element to observe.
* @return {undefined}
*/
IntersectionObserver.prototype.observe = function(element) {};
/**
* This is used to stop observing a given element.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserver-unobserve
* @param {!Element} element The elmenent to stop observing.
* @return {undefined}
*/
IntersectionObserver.prototype.unobserve = function(element) {};
/**
* Disconnect.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserver-disconnect
*/
IntersectionObserver.prototype.disconnect = function() {};
/**
* Take records.
* @see https://wicg.github.io/IntersectionObserver/#dom-intersectionobserver-takerecords
* @return {!Array.<!IntersectionObserverEntry>}
*/
IntersectionObserver.prototype.takeRecords = function() {};
| MatrixFrog/closure-compiler | externs/browser/intersection_observer.js | JavaScript | apache-2.0 | 6,017 |
// PSI_ELEMENT: com.intellij.psi.PsiMethod
// OPTIONS: usages
// PLAIN_WHEN_NEEDED
public class JavaWithGroovyInvoke_0 {
public void <caret>invoke() {
}
public static class OtherJavaClass extends JavaWithGroovyInvoke_0 {
}
}
// CRI_IGNORE | smmribeiro/intellij-community | plugins/kotlin/idea/tests/testData/findUsages/java/findJavaMethodUsages/JavaWithGroovyInvoke.0.java | Java | apache-2.0 | 257 |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.application.common.model.test;
import org.junit.Test;
import org.wso2.carbon.identity.application.common.model.Property;
import org.wso2.carbon.identity.application.common.model.ProvisioningConnectorConfig;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertFalse;
/**
* Testing the ProvisioningConnectorConfig class
*/
public class ProvisioningConnectorConfigTest {
@Test
public void shouldGenerateDifferentHashCodesForDifferentNames() {
ProvisioningConnectorConfig config1 = new ProvisioningConnectorConfig();
config1.setName("Name1");
config1.setProvisioningProperties(new Property[0]);
ProvisioningConnectorConfig config2 = new ProvisioningConnectorConfig();
config2.setName("Name2");
config2.setProvisioningProperties(new Property[0]);
assertNotEquals(config1.hashCode(), config2.hashCode());
}
@Test
public void shouldReturnFalseByEqualsForDifferentNames() {
ProvisioningConnectorConfig config1 = new ProvisioningConnectorConfig();
config1.setName("Name1");
config1.setProvisioningProperties(new Property[0]);
ProvisioningConnectorConfig config2 = new ProvisioningConnectorConfig();
config2.setName("Name2");
config2.setProvisioningProperties(new Property[0]);
assertFalse(config1.equals(config2));
}
}
| lakshani/carbon-identity | components/application-mgt/org.wso2.carbon.identity.application.common/src/test/java/org/wso2/carbon/identity/application/common/model/test/ProvisioningConnectorConfigTest.java | Java | apache-2.0 | 2,091 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.jca.test;
import org.apache.jackrabbit.jca.JCAConnectionRequestInfo;
import javax.jcr.SimpleCredentials;
import java.util.HashMap;
/**
* This case executes tests on the connection request info.
*/
public final class ConnectionRequestInfoTest
extends AbstractTestCase {
private SimpleCredentials creds1 = new SimpleCredentials("user", "password".toCharArray());
private SimpleCredentials creds2 = new SimpleCredentials("user", "password".toCharArray());
private SimpleCredentials creds3 = new SimpleCredentials("another_user", "password".toCharArray());
private JCAConnectionRequestInfo info1 = new JCAConnectionRequestInfo(creds1, "default");
private JCAConnectionRequestInfo info2 = new JCAConnectionRequestInfo(creds2, "default");
private JCAConnectionRequestInfo info3 = new JCAConnectionRequestInfo(creds3, "default");
/**
* Test the JCAConnectionRequestInfo equals() method.
*/
public void testEquals() throws Exception {
assertEquals("Object must be equal to itself", info1, info1);
assertEquals("Infos with the same auth data must be equal", info1, info2);
assertTrue("Infos with different auth data must not be equal", !info1.equals(info3));
}
/**
* Test the JCAConnectionRequestInfo hashCode() method.
*/
public void testHashCode() throws Exception {
assertEquals("Object must be equal to itself", info1.hashCode(), info1.hashCode());
assertEquals("Infos with the same auth data must have same hashCode", info1.hashCode(), info2.hashCode());
assertTrue("Infos with different auth data must not have same hashCode", info1.hashCode() != info3.hashCode());
}
/**
* Tests that JCAConnectionRequestInfo works as a HashMap key correctly.
*/
public void testPutToHashMap() throws Exception {
HashMap map = new HashMap();
map.put(info1, new Object());
assertTrue("Map must contain the info", map.containsKey(info2));
}
}
| sdmcraft/jackrabbit | jackrabbit-jca/src/test/java/org/apache/jackrabbit/jca/test/ConnectionRequestInfoTest.java | Java | apache-2.0 | 2,835 |
package rolebinding
import (
apierrors "k8s.io/apimachinery/pkg/api/errors"
metainternal "k8s.io/apimachinery/pkg/apis/meta/internalversion"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/conversion"
"k8s.io/apimachinery/pkg/runtime"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/rest"
restclient "k8s.io/client-go/rest"
rbacinternalversion "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset/typed/rbac/internalversion"
authclient "github.com/openshift/origin/pkg/auth/client"
authorizationapi "github.com/openshift/origin/pkg/authorization/apis/authorization"
"github.com/openshift/origin/pkg/authorization/registry/util"
utilregistry "github.com/openshift/origin/pkg/util/registry"
)
type REST struct {
privilegedClient restclient.Interface
}
var _ rest.Lister = &REST{}
var _ rest.Getter = &REST{}
var _ rest.CreaterUpdater = &REST{}
var _ rest.GracefulDeleter = &REST{}
func NewREST(client restclient.Interface) utilregistry.NoWatchStorage {
return utilregistry.WrapNoWatchStorageError(&REST{privilegedClient: client})
}
func (s *REST) New() runtime.Object {
return &authorizationapi.RoleBinding{}
}
func (s *REST) NewList() runtime.Object {
return &authorizationapi.RoleBindingList{}
}
func (s *REST) List(ctx apirequest.Context, options *metainternal.ListOptions) (runtime.Object, error) {
client, err := s.getImpersonatingClient(ctx)
if err != nil {
return nil, err
}
optv1 := metav1.ListOptions{}
if err := metainternal.Convert_internalversion_ListOptions_To_v1_ListOptions(options, &optv1, nil); err != nil {
return nil, err
}
bindings, err := client.List(optv1)
if err != nil {
return nil, err
}
ret := &authorizationapi.RoleBindingList{}
for _, curr := range bindings.Items {
role, err := util.RoleBindingFromRBAC(&curr)
if err != nil {
return nil, err
}
ret.Items = append(ret.Items, *role)
}
ret.ListMeta.ResourceVersion = bindings.ResourceVersion
return ret, nil
}
func (s *REST) Get(ctx apirequest.Context, name string, options *metav1.GetOptions) (runtime.Object, error) {
client, err := s.getImpersonatingClient(ctx)
if err != nil {
return nil, err
}
ret, err := client.Get(name, *options)
if err != nil {
return nil, err
}
binding, err := util.RoleBindingFromRBAC(ret)
if err != nil {
return nil, err
}
return binding, nil
}
func (s *REST) Delete(ctx apirequest.Context, name string, options *metav1.DeleteOptions) (runtime.Object, bool, error) {
client, err := s.getImpersonatingClient(ctx)
if err != nil {
return nil, false, err
}
if err := client.Delete(name, options); err != nil {
return nil, false, err
}
return &metav1.Status{Status: metav1.StatusSuccess}, true, nil
}
func (s *REST) Create(ctx apirequest.Context, obj runtime.Object, _ bool) (runtime.Object, error) {
client, err := s.getImpersonatingClient(ctx)
if err != nil {
return nil, err
}
rb := obj.(*authorizationapi.RoleBinding)
// Default the namespace if it is not specified so conversion does not error
// Normally this is done during the REST strategy but we avoid those here to keep the proxies simple
if ns, ok := apirequest.NamespaceFrom(ctx); ok && len(ns) > 0 && len(rb.Namespace) == 0 && len(rb.RoleRef.Namespace) > 0 {
deepcopiedObj := rb.DeepCopy()
deepcopiedObj.Namespace = ns
rb = deepcopiedObj
}
convertedObj, err := util.RoleBindingToRBAC(rb)
if err != nil {
return nil, err
}
ret, err := client.Create(convertedObj)
if err != nil {
return nil, err
}
binding, err := util.RoleBindingFromRBAC(ret)
if err != nil {
return nil, err
}
return binding, nil
}
func (s *REST) Update(ctx apirequest.Context, name string, objInfo rest.UpdatedObjectInfo) (runtime.Object, bool, error) {
client, err := s.getImpersonatingClient(ctx)
if err != nil {
return nil, false, err
}
old, err := client.Get(name, metav1.GetOptions{})
if err != nil {
return nil, false, err
}
oldRoleBinding, err := util.RoleBindingFromRBAC(old)
if err != nil {
return nil, false, err
}
obj, err := objInfo.UpdatedObject(ctx, oldRoleBinding)
if err != nil {
return nil, false, err
}
updatedRoleBinding, err := util.RoleBindingToRBAC(obj.(*authorizationapi.RoleBinding))
if err != nil {
return nil, false, err
}
ret, err := client.Update(updatedRoleBinding)
if err != nil {
return nil, false, err
}
role, err := util.RoleBindingFromRBAC(ret)
if err != nil {
return nil, false, err
}
return role, false, err
}
func (s *REST) getImpersonatingClient(ctx apirequest.Context) (rbacinternalversion.RoleBindingInterface, error) {
namespace, ok := apirequest.NamespaceFrom(ctx)
if !ok {
return nil, apierrors.NewBadRequest("namespace parameter required")
}
rbacClient, err := authclient.NewImpersonatingRBACFromContext(ctx, s.privilegedClient)
if err != nil {
return nil, err
}
return rbacClient.RoleBindings(namespace), nil
}
var cloner = conversion.NewCloner()
| raffaelespazzoli/origin | pkg/authorization/registry/rolebinding/proxy.go | GO | apache-2.0 | 4,978 |
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
/**
* Additional information of DPM Protected item.
*
*/
class DPMProtectedItemExtendedInfo {
/**
* Create a DPMProtectedItemExtendedInfo.
* @member {object} [protectableObjectLoadPath] Attribute to provide
* information on various DBs.
* @member {boolean} [protectedProperty] To check if backup item is disk
* protected.
* @member {boolean} [isPresentOnCloud] To check if backup item is cloud
* protected.
* @member {string} [lastBackupStatus] Last backup status information on
* backup item.
* @member {date} [lastRefreshedAt] Last refresh time on backup item.
* @member {date} [oldestRecoveryPoint] Oldest cloud recovery point time.
* @member {number} [recoveryPointCount] cloud recovery point count.
* @member {date} [onPremiseOldestRecoveryPoint] Oldest disk recovery point
* time.
* @member {date} [onPremiseLatestRecoveryPoint] latest disk recovery point
* time.
* @member {number} [onPremiseRecoveryPointCount] disk recovery point count.
* @member {boolean} [isCollocated] To check if backup item is collocated.
* @member {string} [protectionGroupName] Protection group name of the backup
* item.
* @member {string} [diskStorageUsedInBytes] Used Disk storage in bytes.
* @member {string} [totalDiskStorageSizeInBytes] total Disk storage in
* bytes.
*/
constructor() {
}
/**
* Defines the metadata of DPMProtectedItemExtendedInfo
*
* @returns {object} metadata of DPMProtectedItemExtendedInfo
*
*/
mapper() {
return {
required: false,
serializedName: 'DPMProtectedItemExtendedInfo',
type: {
name: 'Composite',
className: 'DPMProtectedItemExtendedInfo',
modelProperties: {
protectableObjectLoadPath: {
required: false,
serializedName: 'protectableObjectLoadPath',
type: {
name: 'Dictionary',
value: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
},
protectedProperty: {
required: false,
serializedName: 'protected',
type: {
name: 'Boolean'
}
},
isPresentOnCloud: {
required: false,
serializedName: 'isPresentOnCloud',
type: {
name: 'Boolean'
}
},
lastBackupStatus: {
required: false,
serializedName: 'lastBackupStatus',
type: {
name: 'String'
}
},
lastRefreshedAt: {
required: false,
serializedName: 'lastRefreshedAt',
type: {
name: 'DateTime'
}
},
oldestRecoveryPoint: {
required: false,
serializedName: 'oldestRecoveryPoint',
type: {
name: 'DateTime'
}
},
recoveryPointCount: {
required: false,
serializedName: 'recoveryPointCount',
type: {
name: 'Number'
}
},
onPremiseOldestRecoveryPoint: {
required: false,
serializedName: 'onPremiseOldestRecoveryPoint',
type: {
name: 'DateTime'
}
},
onPremiseLatestRecoveryPoint: {
required: false,
serializedName: 'onPremiseLatestRecoveryPoint',
type: {
name: 'DateTime'
}
},
onPremiseRecoveryPointCount: {
required: false,
serializedName: 'onPremiseRecoveryPointCount',
type: {
name: 'Number'
}
},
isCollocated: {
required: false,
serializedName: 'isCollocated',
type: {
name: 'Boolean'
}
},
protectionGroupName: {
required: false,
serializedName: 'protectionGroupName',
type: {
name: 'String'
}
},
diskStorageUsedInBytes: {
required: false,
serializedName: 'diskStorageUsedInBytes',
type: {
name: 'String'
}
},
totalDiskStorageSizeInBytes: {
required: false,
serializedName: 'totalDiskStorageSizeInBytes',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = DPMProtectedItemExtendedInfo;
| xingwu1/azure-sdk-for-node | lib/services/recoveryServicesBackupManagement/lib/models/dPMProtectedItemExtendedInfo.js | JavaScript | apache-2.0 | 5,048 |
//===-- FreeBSDSignals.cpp --------------------------------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "FreeBSDSignals.h"
using namespace lldb_private;
FreeBSDSignals::FreeBSDSignals() : UnixSignals() { Reset(); }
void FreeBSDSignals::Reset() {
UnixSignals::Reset();
// SIGNO NAME SUPPRESS STOP NOTIFY DESCRIPTION
// ====== ============ ======== ====== ======
// ===================================================
AddSignal(32, "SIGTHR", false, false, false, "thread interrupt");
AddSignal(33, "SIGLIBRT", false, false, false,
"reserved by real-time library");
AddSignal(65, "SIGRTMIN", false, false, false, "real time signal 0");
AddSignal(66, "SIGRTMIN+1", false, false, false, "real time signal 1");
AddSignal(67, "SIGRTMIN+2", false, false, false, "real time signal 2");
AddSignal(68, "SIGRTMIN+3", false, false, false, "real time signal 3");
AddSignal(69, "SIGRTMIN+4", false, false, false, "real time signal 4");
AddSignal(70, "SIGRTMIN+5", false, false, false, "real time signal 5");
AddSignal(71, "SIGRTMIN+6", false, false, false, "real time signal 6");
AddSignal(72, "SIGRTMIN+7", false, false, false, "real time signal 7");
AddSignal(73, "SIGRTMIN+8", false, false, false, "real time signal 8");
AddSignal(74, "SIGRTMIN+9", false, false, false, "real time signal 9");
AddSignal(75, "SIGRTMIN+10", false, false, false, "real time signal 10");
AddSignal(76, "SIGRTMIN+11", false, false, false, "real time signal 11");
AddSignal(77, "SIGRTMIN+12", false, false, false, "real time signal 12");
AddSignal(78, "SIGRTMIN+13", false, false, false, "real time signal 13");
AddSignal(79, "SIGRTMIN+14", false, false, false, "real time signal 14");
AddSignal(80, "SIGRTMIN+15", false, false, false, "real time signal 15");
AddSignal(81, "SIGRTMIN+16", false, false, false, "real time signal 16");
AddSignal(82, "SIGRTMIN+17", false, false, false, "real time signal 17");
AddSignal(83, "SIGRTMIN+18", false, false, false, "real time signal 18");
AddSignal(84, "SIGRTMIN+19", false, false, false, "real time signal 19");
AddSignal(85, "SIGRTMIN+20", false, false, false, "real time signal 20");
AddSignal(86, "SIGRTMIN+21", false, false, false, "real time signal 21");
AddSignal(87, "SIGRTMIN+22", false, false, false, "real time signal 22");
AddSignal(88, "SIGRTMIN+23", false, false, false, "real time signal 23");
AddSignal(89, "SIGRTMIN+24", false, false, false, "real time signal 24");
AddSignal(90, "SIGRTMIN+25", false, false, false, "real time signal 25");
AddSignal(91, "SIGRTMIN+26", false, false, false, "real time signal 26");
AddSignal(92, "SIGRTMIN+27", false, false, false, "real time signal 27");
AddSignal(93, "SIGRTMIN+28", false, false, false, "real time signal 28");
AddSignal(94, "SIGRTMIN+29", false, false, false, "real time signal 29");
AddSignal(95, "SIGRTMIN+30", false, false, false, "real time signal 30");
AddSignal(96, "SIGRTMAX-30", false, false, false, "real time signal 31");
AddSignal(97, "SIGRTMAX-29", false, false, false, "real time signal 32");
AddSignal(98, "SIGRTMAX-28", false, false, false, "real time signal 33");
AddSignal(99, "SIGRTMAX-27", false, false, false, "real time signal 34");
AddSignal(100, "SIGRTMAX-26", false, false, false, "real time signal 35");
AddSignal(101, "SIGRTMAX-25", false, false, false, "real time signal 36");
AddSignal(102, "SIGRTMAX-24", false, false, false, "real time signal 37");
AddSignal(103, "SIGRTMAX-23", false, false, false, "real time signal 38");
AddSignal(104, "SIGRTMAX-22", false, false, false, "real time signal 39");
AddSignal(105, "SIGRTMAX-21", false, false, false, "real time signal 40");
AddSignal(106, "SIGRTMAX-20", false, false, false, "real time signal 41");
AddSignal(107, "SIGRTMAX-19", false, false, false, "real time signal 42");
AddSignal(108, "SIGRTMAX-18", false, false, false, "real time signal 43");
AddSignal(109, "SIGRTMAX-17", false, false, false, "real time signal 44");
AddSignal(110, "SIGRTMAX-16", false, false, false, "real time signal 45");
AddSignal(111, "SIGRTMAX-15", false, false, false, "real time signal 46");
AddSignal(112, "SIGRTMAX-14", false, false, false, "real time signal 47");
AddSignal(113, "SIGRTMAX-13", false, false, false, "real time signal 48");
AddSignal(114, "SIGRTMAX-12", false, false, false, "real time signal 49");
AddSignal(115, "SIGRTMAX-11", false, false, false, "real time signal 50");
AddSignal(116, "SIGRTMAX-10", false, false, false, "real time signal 51");
AddSignal(117, "SIGRTMAX-9", false, false, false, "real time signal 52");
AddSignal(118, "SIGRTMAX-8", false, false, false, "real time signal 53");
AddSignal(119, "SIGRTMAX-7", false, false, false, "real time signal 54");
AddSignal(120, "SIGRTMAX-6", false, false, false, "real time signal 55");
AddSignal(121, "SIGRTMAX-5", false, false, false, "real time signal 56");
AddSignal(122, "SIGRTMAX-4", false, false, false, "real time signal 57");
AddSignal(123, "SIGRTMAX-3", false, false, false, "real time signal 58");
AddSignal(124, "SIGRTMAX-2", false, false, false, "real time signal 59");
AddSignal(125, "SIGRTMAX-1", false, false, false, "real time signal 60");
AddSignal(126, "SIGRTMAX", false, false, false, "real time signal 61");
}
| apple/swift-lldb | source/Plugins/Process/Utility/FreeBSDSignals.cpp | C++ | apache-2.0 | 5,613 |
define([
"dojo/_base/declare", // declare
"dojo/dom-construct", // domConstruct.destroy domConstruct.place
"dojo/keys", // keys.ENTER
"dojo/_base/lang",
"dojo/on",
"dojo/sniff", // has("ie") has("mozilla") has("webkit")
"dojo/_base/window", // win.withGlobal
"dojo/window", // winUtils.scrollIntoView
"../_Plugin",
"../RichText",
"../range",
"../../_base/focus"
], function(declare, domConstruct, keys, lang, on, has, win, winUtils, _Plugin, RichText, rangeapi, baseFocus){
// module:
// dijit/_editor/plugins/EnterKeyHandling
return declare("dijit._editor.plugins.EnterKeyHandling", _Plugin, {
// summary:
// This plugin tries to make all browsers behave consistently with regard to
// how ENTER behaves in the editor window. It traps the ENTER key and alters
// the way DOM is constructed in certain cases to try to commonize the generated
// DOM and behaviors across browsers.
//
// description:
// This plugin has three modes:
//
// - blockNodeForEnter=BR
// - blockNodeForEnter=DIV
// - blockNodeForEnter=P
//
// In blockNodeForEnter=P, the ENTER key starts a new
// paragraph, and shift-ENTER starts a new line in the current paragraph.
// For example, the input:
//
// | first paragraph <shift-ENTER>
// | second line of first paragraph <ENTER>
// | second paragraph
//
// will generate:
//
// | <p>
// | first paragraph
// | <br/>
// | second line of first paragraph
// | </p>
// | <p>
// | second paragraph
// | </p>
//
// In BR and DIV mode, the ENTER key conceptually goes to a new line in the
// current paragraph, and users conceptually create a new paragraph by pressing ENTER twice.
// For example, if the user enters text into an editor like this:
//
// | one <ENTER>
// | two <ENTER>
// | three <ENTER>
// | <ENTER>
// | four <ENTER>
// | five <ENTER>
// | six <ENTER>
//
// It will appear on the screen as two 'paragraphs' of three lines each. Markupwise, this generates:
//
// BR:
// | one<br/>
// | two<br/>
// | three<br/>
// | <br/>
// | four<br/>
// | five<br/>
// | six<br/>
//
// DIV:
// | <div>one</div>
// | <div>two</div>
// | <div>three</div>
// | <div> </div>
// | <div>four</div>
// | <div>five</div>
// | <div>six</div>
// blockNodeForEnter: String
// This property decides the behavior of Enter key. It can be either P,
// DIV, BR, or empty (which means disable this feature). Anything else
// will trigger errors. The default is 'BR'
//
// See class description for more details.
blockNodeForEnter: 'BR',
constructor: function(args){
if(args){
if("blockNodeForEnter" in args){
args.blockNodeForEnter = args.blockNodeForEnter.toUpperCase();
}
lang.mixin(this, args);
}
},
setEditor: function(editor){
// Overrides _Plugin.setEditor().
if(this.editor === editor){
return;
}
this.editor = editor;
if(this.blockNodeForEnter == 'BR'){
// While Moz has a mode tht mostly works, it's still a little different,
// So, try to just have a common mode and be consistent. Which means
// we need to enable customUndo, if not already enabled.
this.editor.customUndo = true;
editor.onLoadDeferred.then(lang.hitch(this, function(d){
this.own(on(editor.document, "keydown", lang.hitch(this, function(e){
if(e.keyCode == keys.ENTER){
// Just do it manually. The handleEnterKey has a shift mode that
// Always acts like <br>, so just use it.
var ne = lang.mixin({}, e);
ne.shiftKey = true;
if(!this.handleEnterKey(ne)){
e.stopPropagation();
e.preventDefault();
}
}
})));
if(has("ie") >= 9 && has("ie") <= 10){
this.own(on(editor.document, "paste", lang.hitch(this, function(e){
setTimeout(lang.hitch(this, function(){
// Use the old range/selection code to kick IE 9 into updating
// its range by moving it back, then forward, one 'character'.
var r = this.editor.document.selection.createRange();
r.move('character', -1);
r.select();
r.move('character', 1);
r.select();
}), 0);
})));
}
return d;
}));
}else if(this.blockNodeForEnter){
// add enter key handler
var h = lang.hitch(this, "handleEnterKey");
editor.addKeyHandler(13, 0, 0, h); //enter
editor.addKeyHandler(13, 0, 1, h); //shift+enter
this.own(this.editor.on('KeyPressed', lang.hitch(this, 'onKeyPressed')));
}
},
onKeyPressed: function(){
// summary:
// Handler for after the user has pressed a key, and the display has been updated.
// Connected to RichText's onKeyPressed() method.
// tags:
// private
if(this._checkListLater){
if(win.withGlobal(this.editor.window, 'isCollapsed', baseFocus)){ // TODO: stop using withGlobal(), and baseFocus
var liparent = this.editor.selection.getAncestorElement('LI');
if(!liparent){
// circulate the undo detection code by calling RichText::execCommand directly
RichText.prototype.execCommand.call(this.editor, 'formatblock', this.blockNodeForEnter);
// set the innerHTML of the new block node
var block = this.editor.selection.getAncestorElement(this.blockNodeForEnter);
if(block){
block.innerHTML = this.bogusHtmlContent;
if(has("ie") <= 9){
// move to the start by moving backwards one char
var r = this.editor.document.selection.createRange();
r.move('character', -1);
r.select();
}
}else{
console.error('onKeyPressed: Cannot find the new block node'); // FIXME
}
}else{
if(has("mozilla")){
if(liparent.parentNode.parentNode.nodeName == 'LI'){
liparent = liparent.parentNode.parentNode;
}
}
var fc = liparent.firstChild;
if(fc && fc.nodeType == 1 && (fc.nodeName == 'UL' || fc.nodeName == 'OL')){
liparent.insertBefore(fc.ownerDocument.createTextNode('\xA0'), fc);
var newrange = rangeapi.create(this.editor.window);
newrange.setStart(liparent.firstChild, 0);
var selection = rangeapi.getSelection(this.editor.window, true);
selection.removeAllRanges();
selection.addRange(newrange);
}
}
}
this._checkListLater = false;
}
if(this._pressedEnterInBlock){
// the new created is the original current P, so we have previousSibling below
if(this._pressedEnterInBlock.previousSibling){
this.removeTrailingBr(this._pressedEnterInBlock.previousSibling);
}
delete this._pressedEnterInBlock;
}
},
// bogusHtmlContent: [private] String
// HTML to stick into a new empty block
bogusHtmlContent: ' ', //
// blockNodes: [private] Regex
// Regex for testing if a given tag is a block level (display:block) tag
blockNodes: /^(?:P|H1|H2|H3|H4|H5|H6|LI)$/,
handleEnterKey: function(e){
// summary:
// Handler for enter key events when blockNodeForEnter is DIV or P.
// description:
// Manually handle enter key event to make the behavior consistent across
// all supported browsers. See class description for details.
// tags:
// private
var selection, range, newrange, startNode, endNode, brNode, doc = this.editor.document, br, rs, txt;
if(e.shiftKey){ // shift+enter always generates <br>
var parent = this.editor.selection.getParentElement();
var header = rangeapi.getAncestor(parent, this.blockNodes);
if(header){
if(header.tagName == 'LI'){
return true; // let browser handle
}
selection = rangeapi.getSelection(this.editor.window);
range = selection.getRangeAt(0);
if(!range.collapsed){
range.deleteContents();
selection = rangeapi.getSelection(this.editor.window);
range = selection.getRangeAt(0);
}
if(rangeapi.atBeginningOfContainer(header, range.startContainer, range.startOffset)){
br = doc.createElement('br');
newrange = rangeapi.create(this.editor.window);
header.insertBefore(br, header.firstChild);
newrange.setStartAfter(br);
selection.removeAllRanges();
selection.addRange(newrange);
}else if(rangeapi.atEndOfContainer(header, range.startContainer, range.startOffset)){
newrange = rangeapi.create(this.editor.window);
br = doc.createElement('br');
header.appendChild(br);
header.appendChild(doc.createTextNode('\xA0'));
newrange.setStart(header.lastChild, 0);
selection.removeAllRanges();
selection.addRange(newrange);
}else{
rs = range.startContainer;
if(rs && rs.nodeType == 3){
// Text node, we have to split it.
txt = rs.nodeValue;
startNode = doc.createTextNode(txt.substring(0, range.startOffset));
endNode = doc.createTextNode(txt.substring(range.startOffset));
brNode = doc.createElement("br");
if(endNode.nodeValue == "" && has("webkit")){
endNode = doc.createTextNode('\xA0')
}
domConstruct.place(startNode, rs, "after");
domConstruct.place(brNode, startNode, "after");
domConstruct.place(endNode, brNode, "after");
domConstruct.destroy(rs);
newrange = rangeapi.create(this.editor.window);
newrange.setStart(endNode, 0);
selection.removeAllRanges();
selection.addRange(newrange);
return false;
}
return true; // let browser handle
}
}else{
selection = rangeapi.getSelection(this.editor.window);
if(selection.rangeCount){
range = selection.getRangeAt(0);
if(range && range.startContainer){
if(!range.collapsed){
range.deleteContents();
selection = rangeapi.getSelection(this.editor.window);
range = selection.getRangeAt(0);
}
rs = range.startContainer;
if(rs && rs.nodeType == 3){
// Text node, we have to split it.
var endEmpty = false;
var offset = range.startOffset;
if(rs.length < offset){
//We are not splitting the right node, try to locate the correct one
ret = this._adjustNodeAndOffset(rs, offset);
rs = ret.node;
offset = ret.offset;
}
txt = rs.nodeValue;
startNode = doc.createTextNode(txt.substring(0, offset));
endNode = doc.createTextNode(txt.substring(offset));
brNode = doc.createElement("br");
if(!endNode.length){
endNode = doc.createTextNode('\xA0');
endEmpty = true;
}
if(startNode.length){
domConstruct.place(startNode, rs, "after");
}else{
startNode = rs;
}
domConstruct.place(brNode, startNode, "after");
domConstruct.place(endNode, brNode, "after");
domConstruct.destroy(rs);
newrange = rangeapi.create(this.editor.window);
newrange.setStart(endNode, 0);
newrange.setEnd(endNode, endNode.length);
selection.removeAllRanges();
selection.addRange(newrange);
if(endEmpty && !has("webkit")){
this.editor.selection.remove();
}else{
this.editor.selection.collapse(true);
}
}else{
var targetNode;
if(range.startOffset >= 0){
targetNode = rs.childNodes[range.startOffset];
}
var brNode = doc.createElement("br");
var endNode = doc.createTextNode('\xA0');
if(!targetNode){
rs.appendChild(brNode);
rs.appendChild(endNode);
}else{
domConstruct.place(brNode, targetNode, "before");
domConstruct.place(endNode, brNode, "after");
}
newrange = rangeapi.create(this.editor.window);
newrange.setStart(endNode, 0);
newrange.setEnd(endNode, endNode.length);
selection.removeAllRanges();
selection.addRange(newrange);
this.editor.selection.collapse(true);
}
}
}else{
// don't change this: do not call this.execCommand, as that may have other logic in subclass
RichText.prototype.execCommand.call(this.editor, 'inserthtml', '<br>');
}
}
return false;
}
var _letBrowserHandle = true;
// first remove selection
selection = rangeapi.getSelection(this.editor.window);
range = selection.getRangeAt(0);
if(!range.collapsed){
range.deleteContents();
selection = rangeapi.getSelection(this.editor.window);
range = selection.getRangeAt(0);
}
var block = rangeapi.getBlockAncestor(range.endContainer, null, this.editor.editNode);
var blockNode = block.blockNode;
// if this is under a LI or the parent of the blockNode is LI, just let browser to handle it
if((this._checkListLater = (blockNode && (blockNode.nodeName == 'LI' || blockNode.parentNode.nodeName == 'LI')))){
if(has("mozilla")){
// press enter in middle of P may leave a trailing <br/>, let's remove it later
this._pressedEnterInBlock = blockNode;
}
// if this li only contains spaces, set the content to empty so the browser will outdent this item
if(/^(\s| | |\xA0|<span\b[^>]*\bclass=['"]Apple-style-span['"][^>]*>(\s| | |\xA0)<\/span>)?(<br>)?$/.test(blockNode.innerHTML)){
// empty LI node
blockNode.innerHTML = '';
if(has("webkit")){ // WebKit tosses the range when innerHTML is reset
newrange = rangeapi.create(this.editor.window);
newrange.setStart(blockNode, 0);
selection.removeAllRanges();
selection.addRange(newrange);
}
this._checkListLater = false; // nothing to check since the browser handles outdent
}
return true;
}
// text node directly under body, let's wrap them in a node
if(!block.blockNode || block.blockNode === this.editor.editNode){
try{
RichText.prototype.execCommand.call(this.editor, 'formatblock', this.blockNodeForEnter);
}catch(e2){ /*squelch FF3 exception bug when editor content is a single BR*/
}
// get the newly created block node
// FIXME
block = {blockNode: this.editor.selection.getAncestorElement(this.blockNodeForEnter),
blockContainer: this.editor.editNode};
if(block.blockNode){
if(block.blockNode != this.editor.editNode &&
(!(block.blockNode.textContent || block.blockNode.innerHTML).replace(/^\s+|\s+$/g, "").length)){
this.removeTrailingBr(block.blockNode);
return false;
}
}else{ // we shouldn't be here if formatblock worked
block.blockNode = this.editor.editNode;
}
selection = rangeapi.getSelection(this.editor.window);
range = selection.getRangeAt(0);
}
var newblock = doc.createElement(this.blockNodeForEnter);
newblock.innerHTML = this.bogusHtmlContent;
this.removeTrailingBr(block.blockNode);
var endOffset = range.endOffset;
var node = range.endContainer;
if(node.length < endOffset){
//We are not checking the right node, try to locate the correct one
var ret = this._adjustNodeAndOffset(node, endOffset);
node = ret.node;
endOffset = ret.offset;
}
if(rangeapi.atEndOfContainer(block.blockNode, node, endOffset)){
if(block.blockNode === block.blockContainer){
block.blockNode.appendChild(newblock);
}else{
domConstruct.place(newblock, block.blockNode, "after");
}
_letBrowserHandle = false;
// lets move caret to the newly created block
newrange = rangeapi.create(this.editor.window);
newrange.setStart(newblock, 0);
selection.removeAllRanges();
selection.addRange(newrange);
if(this.editor.height){
winUtils.scrollIntoView(newblock);
}
}else if(rangeapi.atBeginningOfContainer(block.blockNode,
range.startContainer, range.startOffset)){
domConstruct.place(newblock, block.blockNode, block.blockNode === block.blockContainer ? "first" : "before");
if(newblock.nextSibling && this.editor.height){
// position input caret - mostly WebKit needs this
newrange = rangeapi.create(this.editor.window);
newrange.setStart(newblock.nextSibling, 0);
selection.removeAllRanges();
selection.addRange(newrange);
// browser does not scroll the caret position into view, do it manually
winUtils.scrollIntoView(newblock.nextSibling);
}
_letBrowserHandle = false;
}else{ //press enter in the middle of P/DIV/Whatever/
if(block.blockNode === block.blockContainer){
block.blockNode.appendChild(newblock);
}else{
domConstruct.place(newblock, block.blockNode, "after");
}
_letBrowserHandle = false;
// Clone any block level styles.
if(block.blockNode.style){
if(newblock.style){
if(block.blockNode.style.cssText){
newblock.style.cssText = block.blockNode.style.cssText;
}
}
}
// Okay, we probably have to split.
rs = range.startContainer;
var firstNodeMoved;
if(rs && rs.nodeType == 3){
// Text node, we have to split it.
var nodeToMove, tNode;
endOffset = range.endOffset;
if(rs.length < endOffset){
//We are not splitting the right node, try to locate the correct one
ret = this._adjustNodeAndOffset(rs, endOffset);
rs = ret.node;
endOffset = ret.offset;
}
txt = rs.nodeValue;
startNode = doc.createTextNode(txt.substring(0, endOffset));
endNode = doc.createTextNode(txt.substring(endOffset, txt.length));
// Place the split, then remove original nodes.
domConstruct.place(startNode, rs, "before");
domConstruct.place(endNode, rs, "after");
domConstruct.destroy(rs);
// Okay, we split the text. Now we need to see if we're
// parented to the block element we're splitting and if
// not, we have to split all the way up. Ugh.
var parentC = startNode.parentNode;
while(parentC !== block.blockNode){
var tg = parentC.tagName;
var newTg = doc.createElement(tg);
// Clone over any 'style' data.
if(parentC.style){
if(newTg.style){
if(parentC.style.cssText){
newTg.style.cssText = parentC.style.cssText;
}
}
}
// If font also need to clone over any font data.
if(parentC.tagName === "FONT"){
if(parentC.color){
newTg.color = parentC.color;
}
if(parentC.face){
newTg.face = parentC.face;
}
if(parentC.size){ // this check was necessary on IE
newTg.size = parentC.size;
}
}
nodeToMove = endNode;
while(nodeToMove){
tNode = nodeToMove.nextSibling;
newTg.appendChild(nodeToMove);
nodeToMove = tNode;
}
domConstruct.place(newTg, parentC, "after");
startNode = parentC;
endNode = newTg;
parentC = parentC.parentNode;
}
// Lastly, move the split out tags to the new block.
// as they should now be split properly.
nodeToMove = endNode;
if(nodeToMove.nodeType == 1 || (nodeToMove.nodeType == 3 && nodeToMove.nodeValue)){
// Non-blank text and non-text nodes need to clear out that blank space
// before moving the contents.
newblock.innerHTML = "";
}
firstNodeMoved = nodeToMove;
while(nodeToMove){
tNode = nodeToMove.nextSibling;
newblock.appendChild(nodeToMove);
nodeToMove = tNode;
}
}
//lets move caret to the newly created block
newrange = rangeapi.create(this.editor.window);
var nodeForCursor;
var innerMostFirstNodeMoved = firstNodeMoved;
if(this.blockNodeForEnter !== 'BR'){
while(innerMostFirstNodeMoved){
nodeForCursor = innerMostFirstNodeMoved;
tNode = innerMostFirstNodeMoved.firstChild;
innerMostFirstNodeMoved = tNode;
}
if(nodeForCursor && nodeForCursor.parentNode){
newblock = nodeForCursor.parentNode;
newrange.setStart(newblock, 0);
selection.removeAllRanges();
selection.addRange(newrange);
if(this.editor.height){
winUtils.scrollIntoView(newblock);
}
if(has("mozilla")){
// press enter in middle of P may leave a trailing <br/>, let's remove it later
this._pressedEnterInBlock = block.blockNode;
}
}else{
_letBrowserHandle = true;
}
}else{
newrange.setStart(newblock, 0);
selection.removeAllRanges();
selection.addRange(newrange);
if(this.editor.height){
winUtils.scrollIntoView(newblock);
}
if(has("mozilla")){
// press enter in middle of P may leave a trailing <br/>, let's remove it later
this._pressedEnterInBlock = block.blockNode;
}
}
}
return _letBrowserHandle;
},
_adjustNodeAndOffset: function(/*DomNode*/node, /*Int*/offset){
// summary:
// In the case there are multiple text nodes in a row the offset may not be within the node. If the offset is larger than the node length, it will attempt to find
// the next text sibling until it locates the text node in which the offset refers to
// node:
// The node to check.
// offset:
// The position to find within the text node
// tags:
// private.
while(node.length < offset && node.nextSibling && node.nextSibling.nodeType == 3){
//Adjust the offset and node in the case of multiple text nodes in a row
offset = offset - node.length;
node = node.nextSibling;
}
return {"node": node, "offset": offset};
},
removeTrailingBr: function(container){
// summary:
// If last child of container is a `<br>`, then remove it.
// tags:
// private
var para = /P|DIV|LI/i.test(container.tagName) ?
container : this.editor.selection.getParentOfType(container, ['P', 'DIV', 'LI']);
if(!para){
return;
}
if(para.lastChild){
if((para.childNodes.length > 1 && para.lastChild.nodeType == 3 && /^[\s\xAD]*$/.test(para.lastChild.nodeValue)) ||
para.lastChild.tagName == 'BR'){
domConstruct.destroy(para.lastChild);
}
}
if(!para.childNodes.length){
para.innerHTML = this.bogusHtmlContent;
}
}
});
});
| denov/dojo-demo | src/main/webapp/js/dijit/_editor/plugins/EnterKeyHandling.js | JavaScript | apache-2.0 | 22,062 |
/**
* Copyright (C) 2012 Iordan Iordanov
* Copyright (C) 2010 Michael A. MacDonald
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
* USA.
*/
package com.iiordanov.bVNC;
import java.io.IOException;
public enum COLORMODEL {
C24bit, C256, C64, C8, C4, C2;
public int bpp() {
switch (this) {
case C24bit:
return 4;
default:
return 1;
}
}
public int[] palette() {
switch (this) {
case C24bit:
return null;
case C256:
return ColorModel256.colors;
case C64:
return ColorModel64.colors;
case C8:
return ColorModel8.colors;
case C4:
return ColorModel64.colors;
case C2:
return ColorModel8.colors;
default:
return null;
}
}
public String nameString()
{
return super.toString();
}
public void setPixelFormat(RfbConnectable rfb) throws IOException {
switch (this) {
case C24bit:
// 24-bit color
rfb.writeSetPixelFormat(32, 24, false, true, 255, 255, 255, 16, 8, 0, false);
break;
case C256:
rfb.writeSetPixelFormat(8, 8, false, true, 7, 7, 3, 0, 3, 6, false);
break;
case C64:
rfb.writeSetPixelFormat(8, 6, false, true, 3, 3, 3, 4, 2, 0, false);
break;
case C8:
rfb.writeSetPixelFormat(8, 3, false, true, 1, 1, 1, 2, 1, 0, false);
break;
case C4:
// Greyscale
rfb.writeSetPixelFormat(8, 6, false, true, 3, 3, 3, 4, 2, 0, true);
break;
case C2:
// B&W
rfb.writeSetPixelFormat(8, 3, false, true, 1, 1, 1, 2, 1, 0, true);
break;
default:
// Default is 24 bit color
rfb.writeSetPixelFormat(32, 24, false, true, 255, 255, 255, 16, 8, 0, false);
break;
}
}
public String toString() {
switch (this) {
case C24bit:
return "24-bit color (4 bpp)";
case C256:
return "256 colors (1 bpp)";
case C64:
return "64 colors (1 bpp)";
case C8:
return "8 colors (1 bpp)";
case C4:
return "Greyscale (1 bpp)";
case C2:
return "Black & White (1 bpp)";
default:
return "24-bit color (4 bpp)";
}
}
}
| x-hansong/aSpice | src/com/iiordanov/bVNC/COLORMODEL.java | Java | apache-2.0 | 3,262 |
/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
"fmt"
"reflect"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/conversion"
"k8s.io/kubernetes/pkg/runtime"
"speter.net/go/exp/math/dec/inf"
)
const (
// Annotation key used to identify mirror pods.
mirrorAnnotationKey = "kubernetes.io/config.mirror"
// Value used to identify mirror pods from pre-v1.1 kubelet.
mirrorAnnotationValue_1_0 = "mirror"
)
func addConversionFuncs(scheme *runtime.Scheme) {
// Add non-generated conversion functions
err := scheme.AddConversionFuncs(
Convert_api_Pod_To_v1_Pod,
Convert_api_PodSpec_To_v1_PodSpec,
Convert_api_ReplicationControllerSpec_To_v1_ReplicationControllerSpec,
Convert_api_ServiceSpec_To_v1_ServiceSpec,
Convert_v1_Pod_To_api_Pod,
Convert_v1_PodSpec_To_api_PodSpec,
Convert_v1_ReplicationControllerSpec_To_api_ReplicationControllerSpec,
Convert_v1_ServiceSpec_To_api_ServiceSpec,
Convert_v1_ResourceList_To_api_ResourceList,
Convert_api_VolumeSource_To_v1_VolumeSource,
Convert_v1_VolumeSource_To_api_VolumeSource,
Convert_v1_SecurityContextConstraints_To_api_SecurityContextConstraints,
Convert_api_SecurityContextConstraints_To_v1_SecurityContextConstraints,
)
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
// Add field label conversions for kinds having selectable nothing but ObjectMeta fields.
for _, kind := range []string{
"Endpoints",
"ResourceQuota",
"PersistentVolumeClaim",
"Service",
"ServiceAccount",
"ConfigMap",
} {
err = api.Scheme.AddFieldLabelConversionFunc("v1", kind,
func(label, value string) (string, string, error) {
switch label {
case "metadata.namespace",
"metadata.name":
return label, value, nil
default:
return "", "", fmt.Errorf("field label %q not supported for %q", label, kind)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
}
// Add field conversion funcs.
err = api.Scheme.AddFieldLabelConversionFunc("v1", "Pod",
func(label, value string) (string, string, error) {
switch label {
case "metadata.name",
"metadata.namespace",
"metadata.labels",
"metadata.annotations",
"status.phase",
"status.podIP",
"spec.nodeName",
"spec.restartPolicy":
return label, value, nil
// This is for backwards compatibility with old v1 clients which send spec.host
case "spec.host":
return "spec.nodeName", value, nil
default:
return "", "", fmt.Errorf("field label not supported: %s", label)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
err = api.Scheme.AddFieldLabelConversionFunc("v1", "Node",
func(label, value string) (string, string, error) {
switch label {
case "metadata.name":
return label, value, nil
case "spec.unschedulable":
return label, value, nil
default:
return "", "", fmt.Errorf("field label not supported: %s", label)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
err = api.Scheme.AddFieldLabelConversionFunc("v1", "ReplicationController",
func(label, value string) (string, string, error) {
switch label {
case "metadata.name",
"metadata.namespace",
"status.replicas":
return label, value, nil
default:
return "", "", fmt.Errorf("field label not supported: %s", label)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
err = api.Scheme.AddFieldLabelConversionFunc("v1", "Event",
func(label, value string) (string, string, error) {
switch label {
case "involvedObject.kind",
"involvedObject.namespace",
"involvedObject.name",
"involvedObject.uid",
"involvedObject.apiVersion",
"involvedObject.resourceVersion",
"involvedObject.fieldPath",
"reason",
"source",
"type",
"metadata.namespace",
"metadata.name":
return label, value, nil
default:
return "", "", fmt.Errorf("field label not supported: %s", label)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
err = api.Scheme.AddFieldLabelConversionFunc("v1", "Namespace",
func(label, value string) (string, string, error) {
switch label {
case "status.phase",
"metadata.name":
return label, value, nil
default:
return "", "", fmt.Errorf("field label not supported: %s", label)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
err = api.Scheme.AddFieldLabelConversionFunc("v1", "PersistentVolume",
func(label, value string) (string, string, error) {
switch label {
case "metadata.name":
return label, value, nil
default:
return "", "", fmt.Errorf("field label not supported: %s", label)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
err = api.Scheme.AddFieldLabelConversionFunc("v1", "Secret",
func(label, value string) (string, string, error) {
switch label {
case "type",
"metadata.namespace",
"metadata.name":
return label, value, nil
default:
return "", "", fmt.Errorf("field label not supported: %s", label)
}
})
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
}
func Convert_api_ReplicationControllerSpec_To_v1_ReplicationControllerSpec(in *api.ReplicationControllerSpec, out *ReplicationControllerSpec, s conversion.Scope) error {
out.Replicas = new(int32)
*out.Replicas = int32(in.Replicas)
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
//if in.TemplateRef != nil {
// out.TemplateRef = new(ObjectReference)
// if err := Convert_api_ObjectReference_To_v1_ObjectReference(in.TemplateRef, out.TemplateRef, s); err != nil {
// return err
// }
//} else {
// out.TemplateRef = nil
//}
if in.Template != nil {
out.Template = new(PodTemplateSpec)
if err := Convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(in.Template, out.Template, s); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func Convert_v1_ReplicationControllerSpec_To_api_ReplicationControllerSpec(in *ReplicationControllerSpec, out *api.ReplicationControllerSpec, s conversion.Scope) error {
out.Replicas = *in.Replicas
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
//if in.TemplateRef != nil {
// out.TemplateRef = new(api.ObjectReference)
// if err := Convert_v1_ObjectReference_To_api_ObjectReference(in.TemplateRef, out.TemplateRef, s); err != nil {
// return err
// }
//} else {
// out.TemplateRef = nil
//}
if in.Template != nil {
out.Template = new(api.PodTemplateSpec)
if err := Convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(in.Template, out.Template, s); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
// The following two PodSpec conversions are done here to support ServiceAccount
// as an alias for ServiceAccountName.
func Convert_api_PodSpec_To_v1_PodSpec(in *api.PodSpec, out *PodSpec, s conversion.Scope) error {
if in.Volumes != nil {
out.Volumes = make([]Volume, len(in.Volumes))
for i := range in.Volumes {
if err := Convert_api_Volume_To_v1_Volume(&in.Volumes[i], &out.Volumes[i], s); err != nil {
return err
}
}
} else {
out.Volumes = nil
}
if in.Containers != nil {
out.Containers = make([]Container, len(in.Containers))
for i := range in.Containers {
if err := Convert_api_Container_To_v1_Container(&in.Containers[i], &out.Containers[i], s); err != nil {
return err
}
}
} else {
out.Containers = nil
}
out.RestartPolicy = RestartPolicy(in.RestartPolicy)
if in.TerminationGracePeriodSeconds != nil {
out.TerminationGracePeriodSeconds = new(int64)
*out.TerminationGracePeriodSeconds = *in.TerminationGracePeriodSeconds
} else {
out.TerminationGracePeriodSeconds = nil
}
if in.ActiveDeadlineSeconds != nil {
out.ActiveDeadlineSeconds = new(int64)
*out.ActiveDeadlineSeconds = *in.ActiveDeadlineSeconds
} else {
out.ActiveDeadlineSeconds = nil
}
out.DNSPolicy = DNSPolicy(in.DNSPolicy)
if in.NodeSelector != nil {
out.NodeSelector = make(map[string]string)
for key, val := range in.NodeSelector {
out.NodeSelector[key] = val
}
} else {
out.NodeSelector = nil
}
out.ServiceAccountName = in.ServiceAccountName
// DeprecatedServiceAccount is an alias for ServiceAccountName.
out.DeprecatedServiceAccount = in.ServiceAccountName
out.NodeName = in.NodeName
if in.SecurityContext != nil {
out.SecurityContext = new(PodSecurityContext)
if err := Convert_api_PodSecurityContext_To_v1_PodSecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil {
return err
}
// the host namespace fields have to be handled here for backward compatibility
// with v1.0.0
out.HostPID = in.SecurityContext.HostPID
out.HostNetwork = in.SecurityContext.HostNetwork
out.HostIPC = in.SecurityContext.HostIPC
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := Convert_api_LocalObjectReference_To_v1_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
out.Hostname = in.Hostname
out.Subdomain = in.Subdomain
// carry conversion
out.DeprecatedHost = in.NodeName
return nil
}
func Convert_v1_PodSpec_To_api_PodSpec(in *PodSpec, out *api.PodSpec, s conversion.Scope) error {
SetDefaults_PodSpec(in)
if in.Volumes != nil {
out.Volumes = make([]api.Volume, len(in.Volumes))
for i := range in.Volumes {
if err := Convert_v1_Volume_To_api_Volume(&in.Volumes[i], &out.Volumes[i], s); err != nil {
return err
}
}
} else {
out.Volumes = nil
}
if in.Containers != nil {
out.Containers = make([]api.Container, len(in.Containers))
for i := range in.Containers {
if err := Convert_v1_Container_To_api_Container(&in.Containers[i], &out.Containers[i], s); err != nil {
return err
}
}
} else {
out.Containers = nil
}
out.RestartPolicy = api.RestartPolicy(in.RestartPolicy)
if in.TerminationGracePeriodSeconds != nil {
out.TerminationGracePeriodSeconds = new(int64)
*out.TerminationGracePeriodSeconds = *in.TerminationGracePeriodSeconds
} else {
out.TerminationGracePeriodSeconds = nil
}
if in.ActiveDeadlineSeconds != nil {
out.ActiveDeadlineSeconds = new(int64)
*out.ActiveDeadlineSeconds = *in.ActiveDeadlineSeconds
} else {
out.ActiveDeadlineSeconds = nil
}
out.DNSPolicy = api.DNSPolicy(in.DNSPolicy)
if in.NodeSelector != nil {
out.NodeSelector = make(map[string]string)
for key, val := range in.NodeSelector {
out.NodeSelector[key] = val
}
} else {
out.NodeSelector = nil
}
// We support DeprecatedServiceAccount as an alias for ServiceAccountName.
// If both are specified, ServiceAccountName (the new field) wins.
out.ServiceAccountName = in.ServiceAccountName
if in.ServiceAccountName == "" {
out.ServiceAccountName = in.DeprecatedServiceAccount
}
out.NodeName = in.NodeName
// carry conversion
if in.NodeName == "" {
out.NodeName = in.DeprecatedHost
}
if in.SecurityContext != nil {
out.SecurityContext = new(api.PodSecurityContext)
if err := Convert_v1_PodSecurityContext_To_api_PodSecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil {
return err
}
}
// the host namespace fields have to be handled specially for backward compatibility
// with v1.0.0
if out.SecurityContext == nil {
out.SecurityContext = new(api.PodSecurityContext)
}
out.SecurityContext.HostNetwork = in.HostNetwork
out.SecurityContext.HostPID = in.HostPID
out.SecurityContext.HostIPC = in.HostIPC
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]api.LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := Convert_v1_LocalObjectReference_To_api_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
out.Hostname = in.Hostname
out.Subdomain = in.Subdomain
return nil
}
func Convert_api_Pod_To_v1_Pod(in *api.Pod, out *Pod, s conversion.Scope) error {
if err := autoConvert_api_Pod_To_v1_Pod(in, out, s); err != nil {
return err
}
// We need to reset certain fields for mirror pods from pre-v1.1 kubelet
// (#15960).
// TODO: Remove this code after we drop support for v1.0 kubelets.
if value, ok := in.Annotations[mirrorAnnotationKey]; ok && value == mirrorAnnotationValue_1_0 {
// Reset the TerminationGracePeriodSeconds.
out.Spec.TerminationGracePeriodSeconds = nil
// Reset the resource requests.
for i := range out.Spec.Containers {
out.Spec.Containers[i].Resources.Requests = nil
}
}
return nil
}
func Convert_v1_Pod_To_api_Pod(in *Pod, out *api.Pod, s conversion.Scope) error {
return autoConvert_v1_Pod_To_api_Pod(in, out, s)
}
func Convert_api_ServiceSpec_To_v1_ServiceSpec(in *api.ServiceSpec, out *ServiceSpec, s conversion.Scope) error {
if err := autoConvert_api_ServiceSpec_To_v1_ServiceSpec(in, out, s); err != nil {
return err
}
// Publish both externalIPs and deprecatedPublicIPs fields in v1.
for _, ip := range in.ExternalIPs {
out.DeprecatedPublicIPs = append(out.DeprecatedPublicIPs, ip)
}
// Carry conversion
out.DeprecatedPortalIP = in.ClusterIP
return nil
}
func Convert_v1_ServiceSpec_To_api_ServiceSpec(in *ServiceSpec, out *api.ServiceSpec, s conversion.Scope) error {
if err := autoConvert_v1_ServiceSpec_To_api_ServiceSpec(in, out, s); err != nil {
return err
}
// Prefer the legacy deprecatedPublicIPs field, if provided.
if len(in.DeprecatedPublicIPs) > 0 {
out.ExternalIPs = nil
for _, ip := range in.DeprecatedPublicIPs {
out.ExternalIPs = append(out.ExternalIPs, ip)
}
}
return nil
}
func Convert_api_PodSecurityContext_To_v1_PodSecurityContext(in *api.PodSecurityContext, out *PodSecurityContext, s conversion.Scope) error {
out.SupplementalGroups = in.SupplementalGroups
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(SELinuxOptions)
if err := Convert_api_SELinuxOptions_To_v1_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
if in.RunAsNonRoot != nil {
out.RunAsNonRoot = new(bool)
*out.RunAsNonRoot = *in.RunAsNonRoot
} else {
out.RunAsNonRoot = nil
}
if in.FSGroup != nil {
out.FSGroup = new(int64)
*out.FSGroup = *in.FSGroup
} else {
out.FSGroup = nil
}
return nil
}
func Convert_v1_PodSecurityContext_To_api_PodSecurityContext(in *PodSecurityContext, out *api.PodSecurityContext, s conversion.Scope) error {
out.SupplementalGroups = in.SupplementalGroups
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(api.SELinuxOptions)
if err := Convert_v1_SELinuxOptions_To_api_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
if in.RunAsNonRoot != nil {
out.RunAsNonRoot = new(bool)
*out.RunAsNonRoot = *in.RunAsNonRoot
} else {
out.RunAsNonRoot = nil
}
if in.FSGroup != nil {
out.FSGroup = new(int64)
*out.FSGroup = *in.FSGroup
} else {
out.FSGroup = nil
}
return nil
}
func Convert_v1_ResourceList_To_api_ResourceList(in *ResourceList, out *api.ResourceList, s conversion.Scope) error {
if *in == nil {
return nil
}
converted := make(api.ResourceList)
for key, val := range *in {
value := val.Copy()
// TODO(#18538): We round up resource values to milli scale to maintain API compatibility.
// In the future, we should instead reject values that need rounding.
const milliScale = 3
value.Amount.Round(value.Amount, milliScale, inf.RoundUp)
converted[api.ResourceName(key)] = *value
}
*out = converted
return nil
}
// This will Convert our internal represantation of VolumeSource to its v1 representation
// Used for keeping backwards compatibility for the Metadata field
func Convert_api_VolumeSource_To_v1_VolumeSource(in *api.VolumeSource, out *VolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.VolumeSource))(in)
}
if err := s.DefaultConvert(in, out, conversion.IgnoreMissingFields); err != nil {
return err
}
if in.DownwardAPI != nil {
out.Metadata = new(MetadataVolumeSource)
if err := Convert_api_DownwardAPIVolumeSource_To_v1_MetadataVolumeSource(in.DownwardAPI, out.Metadata, s); err != nil {
return err
}
}
return nil
}
// downward -> metadata (api -> v1)
func Convert_api_DownwardAPIVolumeSource_To_v1_MetadataVolumeSource(in *api.DownwardAPIVolumeSource, out *MetadataVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.DownwardAPIVolumeSource))(in)
}
if in.Items != nil {
out.Items = make([]MetadataFile, len(in.Items))
for i := range in.Items {
if err := Convert_api_DownwardAPIVolumeFile_To_v1_MetadataFile(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
}
return nil
}
func Convert_api_DownwardAPIVolumeFile_To_v1_MetadataFile(in *api.DownwardAPIVolumeFile, out *MetadataFile, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.DownwardAPIVolumeFile))(in)
}
out.Name = in.Path
if err := Convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector(&in.FieldRef, &out.FieldRef, s); err != nil {
return err
}
return nil
}
// This will Convert the v1 representation of VolumeSource to our internal representation
// Used for keeping backwards compatibility for the Metadata field
func Convert_v1_VolumeSource_To_api_VolumeSource(in *VolumeSource, out *api.VolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*VolumeSource))(in)
}
if err := s.DefaultConvert(in, out, conversion.IgnoreMissingFields); err != nil {
return err
}
if in.Metadata != nil {
out.DownwardAPI = new(api.DownwardAPIVolumeSource)
if err := Convert_v1_MetadataVolumeSource_To_api_DownwardAPIVolumeSource(in.Metadata, out.DownwardAPI, s); err != nil {
return err
}
}
return nil
}
// metadata -> downward (v1 -> api)
func Convert_v1_MetadataVolumeSource_To_api_DownwardAPIVolumeSource(in *MetadataVolumeSource, out *api.DownwardAPIVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*MetadataVolumeSource))(in)
}
if in.Items != nil {
out.Items = make([]api.DownwardAPIVolumeFile, len(in.Items))
for i := range in.Items {
if err := Convert_v1_MetadataFile_To_api_DownwardAPIVolumeFile(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
}
return nil
}
func Convert_v1_MetadataFile_To_api_DownwardAPIVolumeFile(in *MetadataFile, out *api.DownwardAPIVolumeFile, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*MetadataFile))(in)
}
out.Path = in.Name
if err := Convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector(&in.FieldRef, &out.FieldRef, s); err != nil {
return err
}
return nil
}
func Convert_v1_SecurityContextConstraints_To_api_SecurityContextConstraints(in *SecurityContextConstraints, out *api.SecurityContextConstraints, s conversion.Scope) error {
return autoConvert_v1_SecurityContextConstraints_To_api_SecurityContextConstraints(in, out, s)
}
func Convert_api_SecurityContextConstraints_To_v1_SecurityContextConstraints(in *api.SecurityContextConstraints, out *SecurityContextConstraints, s conversion.Scope) error {
if err := autoConvert_api_SecurityContextConstraints_To_v1_SecurityContextConstraints(in, out, s); err != nil {
return err
}
if in.Volumes != nil {
for _, v := range in.Volumes {
// set the Allow* fields based on the existence in the volume slice
switch v {
case api.FSTypeHostPath, api.FSTypeAll:
out.AllowHostDirVolumePlugin = true
}
}
}
return nil
}
| danmcp/source-to-image | vendor/k8s.io/kubernetes/pkg/api/v1/conversion.go | GO | apache-2.0 | 21,451 |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.hyracks.hdfs.api;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
/**
* Users need to implement this interface to use the HDFSReadOperatorDescriptor.
*
* @param <K>
* the key type
* @param <V>
* the value type
*/
public interface IKeyValueParser<K, V> {
/**
* Initialize the key value parser.
*
* @param writer
* The hyracks writer for outputting data.
* @throws HyracksDataException
*/
public void open(IFrameWriter writer) throws HyracksDataException;
/**
* @param key
* @param value
* @param writer
* @param fileName
* @throws HyracksDataException
*/
public void parse(K key, V value, IFrameWriter writer, String fileString) throws HyracksDataException;
/**
* Flush the residual tuples in the internal buffer to the writer.
* This method is called in the close() of HDFSReadOperatorDescriptor.
*
* @param writer
* The hyracks writer for outputting data.
* @throws HyracksDataException
*/
public void close(IFrameWriter writer) throws HyracksDataException;
}
| ilovesoup/hyracks | hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParser.java | Java | apache-2.0 | 1,862 |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.12.11 at 12:17:22 PM IST
//
package org.wso2.developerstudio.eclipse.security.project.model;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{}service" maxOccurs="unbounded"/>
* <choice maxOccurs="unbounded">
* <element ref="{}module" minOccurs="0"/>
* <element ref="{}parameter" minOccurs="0"/>
* </choice>
* </sequence>
* <attribute name="hashValue" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}NCName" />
* <attribute name="successfullyAdded" use="required" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"service",
"moduleOrParameter"
})
@XmlRootElement(name = "serviceGroup", namespace = "")
public class ServiceGroup {
@XmlElement(namespace = "", required = true)
protected List<Service> service;
@XmlElements({
@XmlElement(name = "module", namespace = "", type = Module.class),
@XmlElement(name = "parameter", namespace = "", type = Parameter.class)
})
protected List<Object> moduleOrParameter;
@XmlAttribute(name = "hashValue")
@XmlSchemaType(name = "anySimpleType")
protected String hashValue;
@XmlAttribute(name = "name", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NCName")
protected String name;
@XmlAttribute(name = "successfullyAdded", required = true)
protected boolean successfullyAdded;
/**
* Gets the value of the service property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the service property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getService().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Service }
*
*
*/
public List<Service> getService() {
if (service == null) {
service = new ArrayList<Service>();
}
return this.service;
}
/**
* Gets the value of the moduleOrParameter property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the moduleOrParameter property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getModuleOrParameter().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Module }
* {@link Parameter }
*
*
*/
public List<Object> getModuleOrParameter() {
if (moduleOrParameter == null) {
moduleOrParameter = new ArrayList<Object>();
}
return this.moduleOrParameter;
}
/**
* Gets the value of the hashValue property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHashValue() {
return hashValue;
}
/**
* Sets the value of the hashValue property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHashValue(String value) {
this.hashValue = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the successfullyAdded property.
*
*/
public boolean isSuccessfullyAdded() {
return successfullyAdded;
}
/**
* Sets the value of the successfullyAdded property.
*
*/
public void setSuccessfullyAdded(boolean value) {
this.successfullyAdded = value;
}
}
| knadikari/developer-studio | common/org.wso2.developerstudio.eclipse.artifact.security/src/org/wso2/developerstudio/eclipse/security/project/model/ServiceGroup.java | Java | apache-2.0 | 5,773 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.avro;
import io.netty.buffer.DrillBuf;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import java.security.PrivilegedExceptionAction;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericContainer;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.mapred.FsInput;
import org.apache.avro.util.Utf8;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.PathSegment;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.expr.holders.BigIntHolder;
import org.apache.drill.exec.expr.holders.BitHolder;
import org.apache.drill.exec.expr.holders.Float4Holder;
import org.apache.drill.exec.expr.holders.Float8Holder;
import org.apache.drill.exec.expr.holders.IntHolder;
import org.apache.drill.exec.expr.holders.VarBinaryHolder;
import org.apache.drill.exec.expr.holders.VarCharHolder;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.physical.impl.OutputMutator;
import org.apache.drill.exec.store.AbstractRecordReader;
import org.apache.drill.exec.store.RecordReader;
import org.apache.drill.exec.util.ImpersonationUtil;
import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Charsets;
import com.google.common.base.Stopwatch;
import org.apache.hadoop.security.UserGroupInformation;
/**
* A RecordReader implementation for Avro data files.
*
* @see RecordReader
*/
public class AvroRecordReader extends AbstractRecordReader {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AvroRecordReader.class);
private final Path hadoop;
private final long start;
private final long end;
private DrillBuf buffer;
private VectorContainerWriter writer;
private DataFileReader<GenericContainer> reader = null;
private OperatorContext operatorContext;
private FileSystem fs;
private final String opUserName;
private final String queryUserName;
private static final int DEFAULT_BATCH_SIZE = 1000;
public AvroRecordReader(final FragmentContext fragmentContext,
final String inputPath,
final long start,
final long length,
final FileSystem fileSystem,
final List<SchemaPath> projectedColumns,
final String userName) {
this(fragmentContext, inputPath, start, length, fileSystem, projectedColumns, userName, DEFAULT_BATCH_SIZE);
}
public AvroRecordReader(final FragmentContext fragmentContext,
final String inputPath,
final long start,
final long length,
final FileSystem fileSystem,
List<SchemaPath> projectedColumns,
final String userName,
final int defaultBatchSize) {
hadoop = new Path(inputPath);
this.start = start;
this.end = start + length;
buffer = fragmentContext.getManagedBuffer();
this.fs = fileSystem;
this.opUserName = userName;
this.queryUserName = fragmentContext.getQueryUserName();
setColumns(projectedColumns);
}
private DataFileReader getReader(final Path hadoop, final FileSystem fs) throws ExecutionSetupException {
try {
final UserGroupInformation ugi = ImpersonationUtil.createProxyUgi(this.opUserName, this.queryUserName);
return ugi.doAs(new PrivilegedExceptionAction<DataFileReader>() {
@Override
public DataFileReader run() throws Exception {
return new DataFileReader<>(new FsInput(hadoop, fs.getConf()), new GenericDatumReader<GenericContainer>());
}
});
} catch (IOException | InterruptedException e) {
throw new ExecutionSetupException(
String.format("Error in creating avro reader for file: %s", hadoop), e);
}
}
@Override
public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException {
operatorContext = context;
writer = new VectorContainerWriter(output);
try {
reader = getReader(hadoop, fs);
logger.debug("Processing file : {}, start position : {}, end position : {} ", hadoop, start, end);
reader.sync(this.start);
} catch (IOException e) {
throw new ExecutionSetupException(e);
}
}
@Override
public int next() {
final Stopwatch watch = new Stopwatch().start();
if (reader == null) {
throw new IllegalStateException("Avro reader is not open.");
}
if (!reader.hasNext()) {
return 0;
}
int recordCount = 0;
writer.allocate();
writer.reset();
try {
// XXX - Implement batch size
for (GenericContainer container = null; reader.hasNext() && !reader.pastSync(end); recordCount++) {
writer.setPosition(recordCount);
container = reader.next(container);
processRecord(container, container.getSchema());
}
writer.setValueCount(recordCount);
} catch (IOException e) {
throw new DrillRuntimeException(e);
}
logger.debug("Read {} records in {} ms", recordCount, watch.elapsed(TimeUnit.MILLISECONDS));
return recordCount;
}
private void processRecord(final GenericContainer container, final Schema schema) {
final Schema.Type type = schema.getType();
switch (type) {
case RECORD:
process(container, schema, null, new MapOrListWriter(writer.rootAsMap()));
break;
default:
throw new DrillRuntimeException("Root object must be record type. Found: " + type);
}
}
private void process(final Object value, final Schema schema, final String fieldName, MapOrListWriter writer) {
if (value == null) {
return;
}
final Schema.Type type = schema.getType();
switch (type) {
case RECORD:
// list field of MapOrListWriter will be non null when we want to store array of maps/records.
MapOrListWriter _writer = writer;
for (final Schema.Field field : schema.getFields()) {
if (field.schema().getType() == Schema.Type.RECORD ||
(field.schema().getType() == Schema.Type.UNION &&
field.schema().getTypes().get(0).getType() == Schema.Type.NULL &&
field.schema().getTypes().get(1).getType() == Schema.Type.RECORD)) {
_writer = writer.map(field.name());
}
process(((GenericRecord) value).get(field.name()), field.schema(), field.name(), _writer);
}
break;
case ARRAY:
assert fieldName != null;
final GenericArray array = (GenericArray) value;
Schema elementSchema = array.getSchema().getElementType();
Type elementType = elementSchema.getType();
if (elementType == Schema.Type.RECORD || elementType == Schema.Type.MAP){
writer = writer.list(fieldName).listoftmap(fieldName);
} else {
writer = writer.list(fieldName);
}
writer.start();
for (final Object o : array) {
process(o, elementSchema, fieldName, writer);
}
writer.end();
break;
case UNION:
// currently supporting only nullable union (optional fields) like ["null", "some-type"].
if (schema.getTypes().get(0).getType() != Schema.Type.NULL) {
throw new UnsupportedOperationException("Avro union type must be of the format : [\"null\", \"some-type\"]");
}
process(value, schema.getTypes().get(1), fieldName, writer);
break;
case MAP:
@SuppressWarnings("unchecked")
final HashMap<Object, Object> map = (HashMap<Object, Object>) value;
Schema valueSchema = schema.getValueType();
writer = writer.map(fieldName);
writer.start();
for (Entry<Object, Object> entry : map.entrySet()) {
process(entry.getValue(), valueSchema, entry.getKey().toString(), writer);
}
writer.end();
break;
case FIXED:
throw new UnsupportedOperationException("Unimplemented type: " + type.toString());
case ENUM: // Enum symbols are strings
case NULL: // Treat null type as a primitive
default:
assert fieldName != null;
if (writer.isMapWriter()) {
SchemaPath path;
if (writer.map.getField().getPath().getRootSegment().getPath().equals("")) {
path = new SchemaPath(new PathSegment.NameSegment(fieldName));
} else {
path = writer.map.getField().getPath().getChild(fieldName);
}
if (!selected(path)) {
break;
}
}
processPrimitive(value, schema.getType(), fieldName, writer);
break;
}
}
private void processPrimitive(final Object value, final Schema.Type type, final String fieldName,
final MapOrListWriter writer) {
if (value == null) {
return;
}
switch (type) {
case STRING:
byte[] binary = null;
if (value instanceof Utf8) {
binary = ((Utf8) value).getBytes();
} else {
binary = value.toString().getBytes(Charsets.UTF_8);
}
final int length = binary.length;
final VarCharHolder vh = new VarCharHolder();
ensure(length);
buffer.setBytes(0, binary);
vh.buffer = buffer;
vh.start = 0;
vh.end = length;
writer.varChar(fieldName).write(vh);
break;
case INT:
final IntHolder ih = new IntHolder();
ih.value = (Integer) value;
writer.integer(fieldName).write(ih);
break;
case LONG:
final BigIntHolder bh = new BigIntHolder();
bh.value = (Long) value;
writer.bigInt(fieldName).write(bh);
break;
case FLOAT:
final Float4Holder fh = new Float4Holder();
fh.value = (Float) value;
writer.float4(fieldName).write(fh);
break;
case DOUBLE:
final Float8Holder f8h = new Float8Holder();
f8h.value = (Double) value;
writer.float8(fieldName).write(f8h);
break;
case BOOLEAN:
final BitHolder bit = new BitHolder();
bit.value = (Boolean) value ? 1 : 0;
writer.bit(fieldName).write(bit);
break;
case BYTES:
// XXX - Not sure if this is correct. Nothing prints from sqlline for byte fields.
final VarBinaryHolder vb = new VarBinaryHolder();
final ByteBuffer buf = (ByteBuffer) value;
final byte[] bytes = buf.array();
ensure(bytes.length);
buffer.setBytes(0, bytes);
vb.buffer = buffer;
vb.start = 0;
vb.end = bytes.length;
writer.binary(fieldName).write(vb);
break;
case NULL:
// Nothing to do for null type
break;
case ENUM:
final String symbol = value.toString();
final byte[] b;
try {
b = symbol.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new DrillRuntimeException("Unable to read enum value for field: " + fieldName, e);
}
final VarCharHolder vch = new VarCharHolder();
ensure(b.length);
buffer.setBytes(0, b);
vch.buffer = buffer;
vch.start = 0;
vch.end = b.length;
writer.varChar(fieldName).write(vch);
break;
default:
throw new DrillRuntimeException("Unhandled Avro type: " + type.toString());
}
}
private boolean selected(SchemaPath field) {
if (isStarQuery()) {
return true;
}
for (final SchemaPath sp : getColumns()) {
if (sp.contains(field)) {
return true;
}
}
return false;
}
private void ensure(final int length) {
buffer = buffer.reallocIfNeeded(length);
}
@Override
public void close() {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
logger.warn("Error closing Avro reader", e);
} finally {
reader = null;
}
}
}
}
| mehant/drill | exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java | Java | apache-2.0 | 13,522 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.ambari.view.assets;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonParser;
import org.apache.ambari.view.ViewContext;
import org.apache.oozie.ambari.view.*;
import org.apache.oozie.ambari.view.assets.model.ActionAsset;
import org.apache.oozie.ambari.view.assets.model.ActionAssetDefinition;
import org.apache.oozie.ambari.view.assets.model.AssetDefintion;
import org.apache.oozie.ambari.view.exception.ErrorCode;
import org.apache.oozie.ambari.view.exception.WfmException;
import org.apache.oozie.ambari.view.exception.WfmWebException;
import org.apache.oozie.ambari.view.model.APIResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
import java.io.IOException;
import java.util.*;
import static org.apache.oozie.ambari.view.Constants.*;
public class AssetResource {
private final static Logger LOGGER = LoggerFactory
.getLogger(AssetResource.class);
private final AssetService assetService;
private final ViewContext viewContext;
private final HDFSFileUtils hdfsFileUtils;
private final OozieUtils oozieUtils = new OozieUtils();
private final OozieDelegate oozieDelegate;
public AssetResource(ViewContext viewContext) {
this.viewContext = viewContext;
this.assetService = new AssetService(viewContext);
hdfsFileUtils = new HDFSFileUtils(viewContext);
oozieDelegate = new OozieDelegate(viewContext);
}
@GET
public Response getAssets() {
try {
Collection<ActionAsset> assets = assetService.getAssets();
APIResult result = new APIResult();
result.setStatus(APIResult.Status.SUCCESS);
result.getPaging().setTotal(assets != null ? assets.size() : 0L);
result.setData(assets);
return Response.ok(result).build();
} catch (Exception ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex);
}
}
@GET
@Path("/mine")
public Response getMyAssets() {
try {
Collection<ActionAsset> assets = assetService.getMyAssets();
APIResult result = new APIResult();
result.setStatus(APIResult.Status.SUCCESS);
result.getPaging().setTotal(assets != null ? assets.size() : 0L);
result.setData(assets);
return Response.ok(result).build();
} catch (Exception ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex);
}
}
@POST
public Response saveAsset(@Context HttpHeaders headers,
@QueryParam("id") String id, @Context UriInfo ui, String body) {
try {
Gson gson = new Gson();
AssetDefintion assetDefinition = gson.fromJson(body, AssetDefintion.class);
Map<String, String> validateAsset = validateAsset(headers,
assetDefinition.getDefinition(), ui.getQueryParameters());
if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
throw new WfmWebException(ErrorCode.ASSET_INVALID_FROM_OOZIE);
}
assetService.saveAsset(id, viewContext.getUsername(), assetDefinition);
APIResult result = new APIResult();
result.setStatus(APIResult.Status.SUCCESS);
return Response.ok(result).build();
} catch (WfmWebException ex) {
LOGGER.error(ex.getMessage(),ex);
throw ex;
} catch (Exception ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex);
}
}
private List<String> getAsList(String string) {
ArrayList<String> li = new ArrayList<>(1);
li.add(string);
return li;
}
public Map<String, String> validateAsset(HttpHeaders headers,
String postBody, MultivaluedMap<String, String> queryParams) {
String workflowXml = oozieUtils.generateWorkflowXml(postBody);
Map<String, String> result = new HashMap<>();
String tempWfPath = "/tmp" + "/tmpooziewfs/tempwf_" + Math.round(Math.random() * 100000) + ".xml";
try {
hdfsFileUtils.writeToFile(tempWfPath, workflowXml, true);
} catch (IOException ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
}
queryParams.put("oozieparam.action", getAsList("dryrun"));
queryParams.put("oozieconfig.rerunOnFailure", getAsList("false"));
queryParams.put("oozieconfig.useSystemLibPath", getAsList("true"));
queryParams.put("resourceManager", getAsList("useDefault"));
String dryRunResp = oozieDelegate.submitWorkflowJobToOozie(headers,
tempWfPath, queryParams, JobType.WORKFLOW);
LOGGER.info(String.format("resp from validating asset=[%s]", dryRunResp));
try {
hdfsFileUtils.deleteFile(tempWfPath);
} catch (IOException ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
}
if (dryRunResp != null && dryRunResp.trim().startsWith("{")) {
JsonElement jsonElement = new JsonParser().parse(dryRunResp);
JsonElement idElem = jsonElement.getAsJsonObject().get("id");
if (idElem != null) {
result.put(STATUS_KEY, STATUS_OK);
} else {
result.put(STATUS_KEY, STATUS_FAILED);
result.put(MESSAGE_KEY, dryRunResp);
}
} else {
result.put(STATUS_KEY, STATUS_FAILED);
result.put(MESSAGE_KEY, dryRunResp);
}
return result;
}
@GET
@Path("/assetNameAvailable")
public Response assetNameAvailable(@QueryParam("name") String name){
try {
boolean available = assetService.isAssetNameAvailable(name);
return Response.ok(available).build();
}catch (Exception ex){
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex);
}
}
@GET
@Path("/{id}")
public Response getAssetDetail(@PathParam("id") String id) {
try {
AssetDefintion assetDefinition = assetService.getAssetDetail(id);
APIResult result = new APIResult();
result.setStatus(APIResult.Status.SUCCESS);
result.setData(assetDefinition);
return Response.ok(result).build();
} catch (Exception ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex);
}
}
@GET
@Path("/definition/id}")
public Response getAssetDefinition(@PathParam("defnitionId") String id) {
try {
ActionAssetDefinition assetDefinition = assetService.getAssetDefinition(id);
APIResult result = new APIResult();
result.setStatus(APIResult.Status.SUCCESS);
result.setData(assetDefinition);
return Response.ok(result).build();
} catch (Exception ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex);
}
}
@DELETE
@Path("/{id}")
public Response delete(@PathParam("id") String id) {
try {
ActionAsset asset = assetService.getAsset(id);
if (asset == null) {
throw new WfmWebException(ErrorCode.ASSET_NOT_EXIST);
}
if (!viewContext.getUsername().equals(asset.getOwner())){
throw new WfmWebException(ErrorCode.PERMISSION_ERROR);
}
assetService.deleteAsset(id);
APIResult result = new APIResult();
result.setStatus(APIResult.Status.SUCCESS);
return Response.ok(result).build();
} catch (WfmWebException ex) {
LOGGER.error(ex.getMessage(),ex);
throw ex;
} catch (Exception ex) {
LOGGER.error(ex.getMessage(),ex);
throw new WfmWebException(ex);
}
}
}
| arenadata/ambari | contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java | Java | apache-2.0 | 8,219 |
/**
* @copyright
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
* @endcopyright
*
* @file org_apache_subversion_javahl_types_Version.cpp
* @brief Implementation of the native methods in the Java class Version.
*/
#include "../include/org_apache_subversion_javahl_types_Version.h"
#include "JNIStackElement.h"
#include "svn_version.h"
JNIEXPORT jint JNICALL
Java_org_apache_subversion_javahl_types_Version_getMajor(JNIEnv *env,
jobject jthis)
{
JNIEntry(Version, getMajor);
return SVN_VER_MAJOR;
}
JNIEXPORT jint JNICALL
Java_org_apache_subversion_javahl_types_Version_getMinor(JNIEnv *env,
jobject jthis)
{
JNIEntry(Version, getMinor);
return SVN_VER_MINOR;
}
JNIEXPORT jint JNICALL
Java_org_apache_subversion_javahl_types_Version_getPatch(JNIEnv *env,
jobject jthis)
{
JNIEntry(Version, getPatch);
return SVN_VER_PATCH;
}
JNIEXPORT jstring JNICALL
Java_org_apache_subversion_javahl_types_Version_getTag(JNIEnv *env,
jobject jthis)
{
JNIEntry(Version, getTag);
jstring tag = JNIUtil::makeJString(SVN_VER_TAG);
if (JNIUtil::isJavaExceptionThrown())
return NULL;
return tag;
}
JNIEXPORT jstring JNICALL
Java_org_apache_subversion_javahl_types_Version_getNumberTag(JNIEnv *env,
jobject jthis)
{
JNIEntry(Version, getNumberTag);
jstring numtag = JNIUtil::makeJString(SVN_VER_NUMTAG);
if (JNIUtil::isJavaExceptionThrown())
return NULL;
return numtag;
}
| centic9/subversion-ppa | subversion/bindings/javahl/native/org_apache_subversion_javahl_types_Version.cpp | C++ | apache-2.0 | 2,343 |
/****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.smtpserver.netty;
import org.apache.james.lifecycle.api.LifecycleUtil;
import org.apache.james.protocols.api.Encryption;
import org.apache.james.protocols.api.Protocol;
import org.apache.james.protocols.api.ProtocolSession.State;
import org.apache.james.protocols.netty.BasicChannelUpstreamHandler;
import org.apache.james.protocols.smtp.SMTPSession;
import org.apache.james.smtpserver.SMTPConstants;
import org.jboss.netty.channel.ChannelHandler.Sharable;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelUpstreamHandler;
import org.slf4j.Logger;
/**
* {@link ChannelUpstreamHandler} which is used by the SMTPServer
*/
@Sharable
public class SMTPChannelUpstreamHandler extends BasicChannelUpstreamHandler {
public SMTPChannelUpstreamHandler(Protocol protocol, Logger logger, Encryption encryption) {
super(protocol, encryption);
}
public SMTPChannelUpstreamHandler(Protocol protocol, Logger logger) {
super(protocol);
}
/**
* Cleanup temporary files
*
* @param ctx
*/
protected void cleanup(ChannelHandlerContext ctx) {
// Make sure we dispose everything on exit on session close
SMTPSession smtpSession = (SMTPSession) ctx.getAttachment();
if (smtpSession != null) {
LifecycleUtil.dispose(smtpSession.getAttachment(SMTPConstants.MAIL, State.Transaction));
LifecycleUtil.dispose(smtpSession.getAttachment(SMTPConstants.DATA_MIMEMESSAGE_STREAMSOURCE, State.Transaction));
}
super.cleanup(ctx);
}
}
| chibenwa/james | protocols/protocols-smtp/src/main/java/org/apache/james/smtpserver/netty/SMTPChannelUpstreamHandler.java | Java | apache-2.0 | 2,858 |
<?php
/**
* The foo test class
*
* @author mepeisen
*/
class FooTest extends PHPUnit_Framework_TestCase
{
/**
* tests the bar function
*/
public function testFoo()
{
include "folderA/MyClassA.php";
$o = new folderA\MyMavenTestClassA();
$this->assertEquals("foo", $o->getFoo());
include "folderB/MyClassB.php";
$o = new folderB\MyMavenTestClassB();
$this->assertEquals("foo", $o->getFoo());
}
} | Vaysman/maven-php-plugin | maven-plugins/it/src/test/resources/org/phpmaven/test/projects/mojos-phar/phar-with-dep1-folders/src/test/php/FooTest.php | PHP | apache-2.0 | 423 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.credential.store.store.impl.db;
import org.apache.airavata.common.utils.DBUtil;
import org.apache.airavata.credential.store.credential.CommunityUser;
import org.apache.airavata.credential.store.store.CredentialStoreException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* Data access class for community_user table.
*/
public class CommunityUserDAO extends ParentDAO {
public CommunityUserDAO() {
super();
}
public void addCommunityUser(CommunityUser user, String token, Connection connection)
throws CredentialStoreException {
String sql = "INSERT INTO COMMUNITY_USER VALUES (?, ?, ?, ?)";
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setString(1, user.getGatewayName());
preparedStatement.setString(2, user.getUserName());
preparedStatement.setString(3, token);
preparedStatement.setString(4, user.getUserEmail());
preparedStatement.executeUpdate();
connection.commit();
} catch (SQLException e) {
StringBuilder stringBuilder = new StringBuilder("Error persisting community user.");
stringBuilder.append("gateway - ").append(user.getGatewayName());
stringBuilder.append("community user name - ").append(user.getUserName());
stringBuilder.append("community user email - ").append(user.getUserEmail());
stringBuilder.append("token id - ").append(token);
log.error(stringBuilder.toString(), e);
throw new CredentialStoreException(stringBuilder.toString(), e);
} finally {
DBUtil.cleanup(preparedStatement);
}
}
public void deleteCommunityUser(CommunityUser user, Connection connection) throws CredentialStoreException {
String sql = "DELETE FROM COMMUNITY_USER WHERE GATEWAY_ID=? AND COMMUNITY_USER_NAME=?";
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setString(1, user.getGatewayName());
preparedStatement.setString(2, user.getUserName());
preparedStatement.executeUpdate();
connection.commit();
} catch (SQLException e) {
StringBuilder stringBuilder = new StringBuilder("Error deleting community user.");
stringBuilder.append("gateway - ").append(user.getGatewayName());
stringBuilder.append("community user name - ").append(user.getUserName());
log.error(stringBuilder.toString(), e);
throw new CredentialStoreException(stringBuilder.toString(), e);
} finally {
DBUtil.cleanup(preparedStatement);
}
}
public void deleteCommunityUserByToken(CommunityUser user, String token, Connection connection)
throws CredentialStoreException {
String sql = "DELETE FROM COMMUNITY_USER WHERE GATEWAY_ID=? AND COMMUNITY_USER_NAME=? AND TOKEN_ID=?";
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setString(1, user.getGatewayName());
preparedStatement.setString(2, user.getUserName());
preparedStatement.setString(3, token);
preparedStatement.executeUpdate();
connection.commit();
} catch (SQLException e) {
StringBuilder stringBuilder = new StringBuilder("Error deleting community user.");
stringBuilder.append("gateway - ").append(user.getGatewayName());
stringBuilder.append("community user name - ").append(user.getUserName());
log.error(stringBuilder.toString(), e);
throw new CredentialStoreException(stringBuilder.toString(), e);
} finally {
DBUtil.cleanup(preparedStatement);
}
}
public void updateCommunityUser(CommunityUser user) throws CredentialStoreException {
// TODO
}
public CommunityUser getCommunityUser(String gatewayName, String communityUserName, Connection connection)
throws CredentialStoreException {
String sql = "SELECT * FROM COMMUNITY_USER WHERE GATEWAY_ID=? AND COMMUNITY_USER_NAME=?";
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setString(1, gatewayName);
preparedStatement.setString(2, communityUserName);
ResultSet resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
String email = resultSet.getString("COMMUNITY_USER_EMAIL"); // TODO fix typo
return new CommunityUser(gatewayName, communityUserName, email);
}
} catch (SQLException e) {
StringBuilder stringBuilder = new StringBuilder("Error retrieving community user.");
stringBuilder.append("gateway - ").append(gatewayName);
stringBuilder.append("community user name - ").append(communityUserName);
log.error(stringBuilder.toString(), e);
throw new CredentialStoreException(stringBuilder.toString(), e);
} finally {
DBUtil.cleanup(preparedStatement);
}
return null;
}
public CommunityUser getCommunityUserByToken(String gatewayName, String tokenId, Connection connection)
throws CredentialStoreException {
String sql = "SELECT * FROM COMMUNITY_USER WHERE GATEWAY_ID=? AND TOKEN_ID=?";
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setString(1, gatewayName);
preparedStatement.setString(2, tokenId);
ResultSet resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
String communityUserName = resultSet.getString("COMMUNITY_USER_NAME");
String email = resultSet.getString("COMMUNITY_USER_EMAIL"); // TODO fix typo
return new CommunityUser(gatewayName, communityUserName, email);
}
} catch (SQLException e) {
StringBuilder stringBuilder = new StringBuilder("Error retrieving community user.");
stringBuilder.append("gateway - ").append(gatewayName);
stringBuilder.append("token- ").append(tokenId);
log.error(stringBuilder.toString(), e);
throw new CredentialStoreException(stringBuilder.toString(), e);
} finally {
DBUtil.cleanup(preparedStatement);
}
return null;
}
public List<CommunityUser> getCommunityUsers(String gatewayName, Connection connection)
throws CredentialStoreException {
List<CommunityUser> userList = new ArrayList<CommunityUser>();
String sql = "SELECT * FROM COMMUNITY_USER WHERE GATEWAY_ID=?";
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement(sql);
preparedStatement.setString(1, gatewayName);
ResultSet resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
String userName = resultSet.getString("COMMUNITY_USER_NAME");
String email = resultSet.getString("COMMUNITY_USER_EMAIL"); // TODO fix typo
userList.add(new CommunityUser(gatewayName, userName, email));
}
} catch (SQLException e) {
StringBuilder stringBuilder = new StringBuilder("Error retrieving community users for ");
stringBuilder.append("gateway - ").append(gatewayName);
log.error(stringBuilder.toString(), e);
throw new CredentialStoreException(stringBuilder.toString(), e);
} finally {
DBUtil.cleanup(preparedStatement);
}
return userList;
}
}
| hasinitg/airavata | modules/credential-store/credential-store-service/src/main/java/org/apache/airavata/credential/store/store/impl/db/CommunityUserDAO.java | Java | apache-2.0 | 9,093 |
'use strict';
(function (scope) {
/**
* Shape erased
*
* @class ShapeErased
* @extends ShapeCandidate
* @param {Object} [obj]
* @constructor
*/
function ShapeErased(obj) {
scope.ShapeCandidate.call(this, obj);
}
/**
* Inheritance property
*/
ShapeErased.prototype = new scope.ShapeCandidate();
/**
* Constructor property
*/
ShapeErased.prototype.constructor = ShapeErased;
// Export
scope.ShapeErased = ShapeErased;
})(MyScript); | countshadow/MyScriptJS | src/output/shape/shapeErased.js | JavaScript | apache-2.0 | 532 |
/*
* Copyright (c) 2009 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package nu.validator.htmlparser.annotation;
public @interface Inline {
}
| googlearchive/caja | third_party/java/htmlparser/src/nu/validator/htmlparser/annotation/Inline.java | Java | apache-2.0 | 1,211 |
// -------------------------------------------------------------------------
// @FileName : NFCGameServerScriptModule.cpp
// @Author : LvSheng.Huang
// @Date : 2013-01-02
// @Module : NFCGameServerScriptModule
// @Desc :
// -------------------------------------------------------------------------
//#include "stdafx.h"
#include "NFCGameServerScriptModule.h"
#include "NFGameServerScriptPlugin.h"
bool NFCGameServerScriptModule::Init()
{
m_pEventProcessModule = dynamic_cast<NFIEventProcessModule*>(pPluginManager->FindModule("NFCEventProcessModule"));
m_pKernelModule = dynamic_cast<NFIKernelModule*>(pPluginManager->FindModule("NFCKernelModule"));
m_pLogicClassModule = dynamic_cast<NFILogicClassModule*>(pPluginManager->FindModule("NFCLogicClassModule"));
assert(NULL != m_pEventProcessModule);
assert(NULL != m_pKernelModule);
assert(NULL != m_pLogicClassModule);
return true;
}
bool NFCGameServerScriptModule::AfterInit()
{
return true;
}
bool NFCGameServerScriptModule::Shut()
{
return true;
}
bool NFCGameServerScriptModule::Execute(const float fLasFrametime, const float fStartedTime)
{
return true;
}
| MRunFoss/NoahGameFrame | NFServer/NFGameServerScriptPlugin/NFCGameServerScriptModule.cpp | C++ | apache-2.0 | 1,286 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.lang.regexp;
import com.intellij.psi.PsiElement;
import org.intellij.lang.regexp.psi.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.EnumSet;
/**
* @author yole
*/
public interface RegExpLanguageHost {
EnumSet<RegExpGroup.Type> EMPTY_NAMED_GROUP_TYPES = EnumSet.noneOf(RegExpGroup.Type.class);
boolean characterNeedsEscaping(char c);
boolean supportsPerl5EmbeddedComments();
boolean supportsPossessiveQuantifiers();
boolean supportsPythonConditionalRefs();
boolean supportsNamedGroupSyntax(RegExpGroup group);
boolean supportsNamedGroupRefSyntax(RegExpNamedGroupRef ref);
@NotNull
default EnumSet<RegExpGroup.Type> getSupportedNamedGroupTypes(RegExpElement context) {
return EMPTY_NAMED_GROUP_TYPES;
}
boolean supportsExtendedHexCharacter(RegExpChar regExpChar);
default boolean isValidGroupName(String name, @NotNull RegExpGroup group) {
for (int i = 0, length = name.length(); i < length; i++) {
final char c = name.charAt(i);
if (!AsciiUtil.isLetterOrDigit(c) && c != '_') {
return false;
}
}
return true;
}
default boolean supportsSimpleClass(RegExpSimpleClass simpleClass) {
return true;
}
default boolean supportsNamedCharacters(RegExpNamedCharacter namedCharacter) {
return false;
}
default boolean isValidNamedCharacter(RegExpNamedCharacter namedCharacter) {
return supportsNamedCharacters(namedCharacter);
}
default boolean supportsBoundary(RegExpBoundary boundary) {
switch (boundary.getType()) {
case UNICODE_EXTENDED_GRAPHEME:
return false;
case LINE_START:
case LINE_END:
case WORD:
case NON_WORD:
case BEGIN:
case END:
case END_NO_LINE_TERM:
case PREVIOUS_MATCH:
default:
return true;
}
}
default boolean supportsLiteralBackspace(RegExpChar aChar) {
return true;
}
default boolean supportsInlineOptionFlag(char flag, PsiElement context) {
return true;
}
boolean isValidCategory(@NotNull String category);
@NotNull
String[][] getAllKnownProperties();
@Nullable
String getPropertyDescription(@Nullable final String name);
@NotNull
String[][] getKnownCharacterClasses();
/**
* @param number the number element to extract the value from
* @return the value, or null when the value is out of range
*/
@Nullable
default Number getQuantifierValue(@NotNull RegExpNumber number) {
return Double.parseDouble(number.getText());
}
default Lookbehind supportsLookbehind(@NotNull RegExpGroup lookbehindGroup) {
return Lookbehind.FULL; // to not break existing implementations, although rarely actually supported.
}
enum Lookbehind {
/** Lookbehind not supported. */
NOT_SUPPORTED,
/**
* Alternation inside lookbehind (a|b|c) branches must have same length,
* finite repetition with identical min, max values (a{3} or a{3,3}) allowed.
*/
FIXED_LENGTH_ALTERNATION,
/** Alternation (a|bc|def) branches inside look behind may have different length */
VARIABLE_LENGTH_ALTERNATION,
/** Finite repetition inside lookbehind with different minimum, maximum values allowed */
FINITE_REPETITION,
/** Full regex syntax inside lookbehind, i.e. star (*) and plus (*) repetition and backreferences, allowed. */
FULL
}
}
| goodwinnk/intellij-community | RegExpSupport/src/org/intellij/lang/regexp/RegExpLanguageHost.java | Java | apache-2.0 | 4,005 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.jobexecutor;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.ActivitiIllegalArgumentException;
import org.activiti.engine.impl.context.Context;
import org.activiti.engine.impl.el.NoExecutionVariableScope;
import org.activiti.engine.impl.persistence.entity.ExecutionEntity;
import org.activiti.engine.impl.persistence.entity.TimerJobEntity;
import org.flowable.common.engine.api.delegate.Expression;
import org.flowable.common.engine.impl.calendar.BusinessCalendar;
import org.flowable.engine.impl.jobexecutor.TimerDeclarationType;
import org.flowable.variable.api.delegate.VariableScope;
import org.joda.time.DateTime;
/**
* @author Tom Baeyens
*/
public class TimerDeclarationImpl implements Serializable {
private static final long serialVersionUID = 1L;
protected Expression description;
protected TimerDeclarationType type;
protected Expression endDateExpression;
protected Expression calendarNameExpression;
protected String jobHandlerType;
protected String jobHandlerConfiguration;
protected String repeat;
protected boolean exclusive = TimerJobEntity.DEFAULT_EXCLUSIVE;
protected int retries = TimerJobEntity.DEFAULT_RETRIES;
protected boolean isInterruptingTimer; // For boundary timers
public TimerDeclarationImpl(Expression expression, TimerDeclarationType type, String jobHandlerType, Expression endDateExpression, Expression calendarNameExpression) {
this(expression, type, jobHandlerType);
this.endDateExpression = endDateExpression;
this.calendarNameExpression = calendarNameExpression;
}
public TimerDeclarationImpl(Expression expression, TimerDeclarationType type, String jobHandlerType) {
this.jobHandlerType = jobHandlerType;
this.description = expression;
this.type = type;
}
public Expression getDescription() {
return description;
}
public String getJobHandlerType() {
return jobHandlerType;
}
public String getJobHandlerConfiguration() {
return jobHandlerConfiguration;
}
public void setJobHandlerConfiguration(String jobHandlerConfiguration) {
this.jobHandlerConfiguration = jobHandlerConfiguration;
}
public String getRepeat() {
return repeat;
}
public void setRepeat(String repeat) {
this.repeat = repeat;
}
public boolean isExclusive() {
return exclusive;
}
public void setExclusive(boolean exclusive) {
this.exclusive = exclusive;
}
public int getRetries() {
return retries;
}
public void setRetries(int retries) {
this.retries = retries;
}
public void setJobHandlerType(String jobHandlerType) {
this.jobHandlerType = jobHandlerType;
}
public boolean isInterruptingTimer() {
return isInterruptingTimer;
}
public void setInterruptingTimer(boolean isInterruptingTimer) {
this.isInterruptingTimer = isInterruptingTimer;
}
public TimerJobEntity prepareTimerEntity(ExecutionEntity executionEntity) {
// ACT-1415: timer-declaration on start-event may contain expressions NOT
// evaluating variables but other context, evaluating should happen nevertheless
VariableScope scopeForExpression = executionEntity;
if (scopeForExpression == null) {
scopeForExpression = NoExecutionVariableScope.getSharedInstance();
}
String calendarNameValue = type.calendarName;
if (this.calendarNameExpression != null) {
calendarNameValue = (String) this.calendarNameExpression.getValue(scopeForExpression);
}
BusinessCalendar businessCalendar = Context
.getProcessEngineConfiguration()
.getBusinessCalendarManager()
.getBusinessCalendar(calendarNameValue);
if (description == null) {
// Prevent NPE from happening in the next line
throw new ActivitiIllegalArgumentException("Timer '" + executionEntity.getActivityId() + "' was not configured with a valid duration/time");
}
String endDateString = null;
String dueDateString = null;
Date duedate = null;
Date endDate = null;
if (endDateExpression != null && !(scopeForExpression instanceof NoExecutionVariableScope)) {
Object endDateValue = endDateExpression.getValue(scopeForExpression);
if (endDateValue instanceof String) {
endDateString = (String) endDateValue;
} else if (endDateValue instanceof Date) {
endDate = (Date) endDateValue;
} else if (endDateValue instanceof DateTime) {
// Joda DateTime support
duedate = ((DateTime) endDateValue).toDate();
} else {
throw new ActivitiException("Timer '" + executionEntity.getActivityId() + "' was not configured with a valid duration/time, either hand in a java.util.Date or a String in format 'yyyy-MM-dd'T'hh:mm:ss'");
}
if (endDate == null) {
endDate = businessCalendar.resolveEndDate(endDateString);
}
}
Object dueDateValue = description.getValue(scopeForExpression);
if (dueDateValue instanceof String) {
dueDateString = (String) dueDateValue;
} else if (dueDateValue instanceof Date) {
duedate = (Date) dueDateValue;
} else if (dueDateValue instanceof DateTime) {
// Joda DateTime support
duedate = ((DateTime) dueDateValue).toDate();
} else if (dueDateValue != null) {
// dueDateValue==null is OK - but unexpected class type must throw an error.
throw new ActivitiException("Timer '" + executionEntity.getActivityId() + "' was not configured with a valid duration/time, either hand in a java.util.Date or a String in format 'yyyy-MM-dd'T'hh:mm:ss'");
}
if (duedate == null && dueDateString != null) {
duedate = businessCalendar.resolveDuedate(dueDateString);
}
TimerJobEntity timer = null;
// if dueDateValue is null -> this is OK - timer will be null and job not scheduled
if (duedate != null) {
timer = new TimerJobEntity(this);
timer.setDuedate(duedate);
timer.setEndDate(endDate);
if (executionEntity != null) {
timer.setExecution(executionEntity);
timer.setProcessDefinitionId(executionEntity.getProcessDefinitionId());
timer.setProcessInstanceId(executionEntity.getProcessInstanceId());
// Inherit tenant identifier (if applicable)
if (executionEntity.getTenantId() != null) {
timer.setTenantId(executionEntity.getTenantId());
}
}
if (type == TimerDeclarationType.CYCLE) {
// See ACT-1427: A boundary timer with a cancelActivity='true', doesn't need to repeat itself
boolean repeat = !isInterruptingTimer;
// ACT-1951: intermediate catching timer events shouldn't repeat according to spec
if (TimerCatchIntermediateEventJobHandler.TYPE.equals(jobHandlerType)) {
repeat = false;
if (endDate != null) {
long endDateMiliss = endDate.getTime();
long dueDateMiliss = duedate.getTime();
long dueDate = Math.min(endDateMiliss, dueDateMiliss);
timer.setDuedate(new Date(dueDate));
}
}
if (repeat) {
String prepared = prepareRepeat(dueDateString);
timer.setRepeat(prepared);
}
}
}
return timer;
}
private String prepareRepeat(String dueDate) {
if (dueDate.startsWith("R") && dueDate.split("/").length == 2) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
return dueDate.replace("/", "/" + sdf.format(Context.getProcessEngineConfiguration().getClock().getCurrentTime()) + "/");
}
return dueDate;
}
}
| dbmalkovsky/flowable-engine | modules/flowable5-engine/src/main/java/org/activiti/engine/impl/jobexecutor/TimerDeclarationImpl.java | Java | apache-2.0 | 8,975 |
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.customers.checklist.business;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.mifos.customers.checklist.exceptions.CheckListException;
import org.mifos.customers.checklist.persistence.CheckListPersistence;
import org.mifos.customers.checklist.util.helpers.CheckListConstants;
import org.mifos.customers.checklist.util.helpers.CheckListType;
import org.mifos.framework.business.AbstractBusinessObject;
import org.mifos.framework.exceptions.PersistenceException;
import org.mifos.framework.util.DateTimeService;
public abstract class CheckListBO extends AbstractBusinessObject {
private final Short checklistId;
private String checklistName;
private Short checklistStatus;
private Set<CheckListDetailEntity> checklistDetails;
private Short supportedLocales;
protected CheckListBO() {
this.checklistId = null;
checklistDetails = new LinkedHashSet<CheckListDetailEntity>();
}
protected CheckListBO(String checkListName, Short checkListStatus, List<String> details, Short localeId,
Short userId) throws CheckListException {
setCreateDetails(userId, new DateTimeService().getCurrentJavaDateTime());
this.checklistId = null;
if (details.size() > 0) {
setCheckListDetails(details, localeId);
} else {
throw new CheckListException(CheckListConstants.CHECKLIST_CREATION_EXCEPTION);
}
if (checkListName != null) {
this.checklistName = checkListName;
} else {
throw new CheckListException(CheckListConstants.CHECKLIST_CREATION_EXCEPTION);
}
this.checklistStatus = checkListStatus;
this.supportedLocales = localeId;
}
public Short getChecklistId() {
return checklistId;
}
public String getChecklistName() {
return this.checklistName;
}
@SuppressWarnings("unused")
// see .hbm.xml file
private void setChecklistName(String checklistName) {
this.checklistName = checklistName;
}
public Short getChecklistStatus() {
return this.checklistStatus;
}
@SuppressWarnings("unused")
// see .hbm.xml file
private void setChecklistStatus(Short checklistStatus) {
this.checklistStatus = checklistStatus;
}
public Set<CheckListDetailEntity> getChecklistDetails() {
return this.checklistDetails;
}
@SuppressWarnings("unused")
// see .hbm.xml file
private void setChecklistDetails(Set<CheckListDetailEntity> checklistDetailSet) {
this.checklistDetails = checklistDetailSet;
}
public Short getSupportedLocales() {
return this.supportedLocales;
}
@SuppressWarnings("unused")
// see .hbm.xml file
private void setSupportedLocales(Short supportedLocales) {
this.supportedLocales = supportedLocales;
}
public void addChecklistDetail(CheckListDetailEntity checkListDetailEntity) {
checklistDetails.add(checkListDetailEntity);
}
protected CheckListPersistence getCheckListPersistence() {
return new CheckListPersistence();
}
private void setCheckListDetails(List<String> details, Short locale) {
checklistDetails = new HashSet<CheckListDetailEntity>();
for (String detail : details) {
CheckListDetailEntity checkListDetailEntity = new CheckListDetailEntity(detail, Short.valueOf("1"), this,
locale);
checklistDetails.add(checkListDetailEntity);
}
}
public abstract CheckListType getCheckListType();
protected void update(String checkListName, Short checkListStatus, List<String> details, Short localeId,
Short userId) throws CheckListException {
setUpdateDetails(userId);
if (details == null || details.size() <= 0) {
throw new CheckListException(CheckListConstants.CHECKLIST_CREATION_EXCEPTION);
}
if (StringUtils.isBlank(checkListName)) {
throw new CheckListException(CheckListConstants.CHECKLIST_CREATION_EXCEPTION);
}
this.checklistName = checkListName;
getChecklistDetails().clear();
for (String detail : details) {
CheckListDetailEntity checkListDetailEntity = new CheckListDetailEntity(detail, Short.valueOf("1"), this,localeId);
getChecklistDetails().add(checkListDetailEntity);
}
this.checklistStatus = checkListStatus;
this.supportedLocales = localeId;
}
protected void validateCheckListState(Short masterTypeId, Short stateId, boolean isCustomer)
throws CheckListException {
try {
Long records = getCheckListPersistence().isValidCheckListState(masterTypeId, stateId, isCustomer);
if (records.intValue() != 0) {
throw new CheckListException(CheckListConstants.EXCEPTION_STATE_ALREADY_EXIST);
}
} catch (PersistenceException pe) {
throw new CheckListException(pe);
}
}
}
| madhav123/gkmaster | appdomain/src/main/java/org/mifos/customers/checklist/business/CheckListBO.java | Java | apache-2.0 | 5,902 |
"use strict";
const HTMLElementImpl = require("./HTMLElement-impl").implementation;
const Document = require("../generated/Document");
const DocumentFragment = require("../generated/DocumentFragment");
const { cloningSteps, domSymbolTree } = require("../helpers/internal-constants");
const { clone } = require("../node");
class HTMLTemplateElementImpl extends HTMLElementImpl {
constructor(globalObject, args, privateData) {
super(globalObject, args, privateData);
const doc = this._appropriateTemplateContentsOwnerDocument(this._ownerDocument);
this._templateContents = DocumentFragment.createImpl(this._globalObject, [], {
ownerDocument: doc,
host: this
});
}
// https://html.spec.whatwg.org/multipage/scripting.html#appropriate-template-contents-owner-document
_appropriateTemplateContentsOwnerDocument(doc) {
if (!doc._isInertTemplateDocument) {
if (doc._associatedInertTemplateDocument === undefined) {
const newDoc = Document.createImpl(this._globalObject, [], {
options: {
parsingMode: doc._parsingMode,
encoding: doc._encoding
}
});
newDoc._isInertTemplateDocument = true;
doc._associatedInertTemplateDocument = newDoc;
}
doc = doc._associatedInertTemplateDocument;
}
return doc;
}
// https://html.spec.whatwg.org/multipage/scripting.html#template-adopting-steps
_adoptingSteps() {
const doc = this._appropriateTemplateContentsOwnerDocument(this._ownerDocument);
doc._adoptNode(this._templateContents);
}
get content() {
return this._templateContents;
}
[cloningSteps](copy, node, document, cloneChildren) {
if (!cloneChildren) {
return;
}
for (const child of domSymbolTree.childrenIterator(node._templateContents)) {
const childCopy = clone(child, copy._templateContents._ownerDocument, true);
copy._templateContents.appendChild(childCopy);
}
}
}
module.exports = {
implementation: HTMLTemplateElementImpl
};
| GoogleCloudPlatform/prometheus-engine | third_party/prometheus_ui/base/web/ui/react-app/node_modules/jsdom/lib/jsdom/living/nodes/HTMLTemplateElement-impl.js | JavaScript | apache-2.0 | 2,038 |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.types;
import com.google.common.collect.ImmutableSet;
import com.google.template.soy.base.SoyBackendKind;
/**
* Type representing an object. Object types have a unique name,
* and can have zero or more member fields.
*
* <p>Object types are always referred to by their fully-qualified name; That
* is, there's no concept of packages or scopes in this type system (those
* concepts are already factored out before the type definition reaches this
* point.)
*
* <p> Important: Do not use outside of Soy code (treat as superpackage-private).
*
*/
public interface SoyObjectType extends SoyType {
/**
* Return the fully-qualified name of this object type.
*/
String getName();
/**
* Return the fully-qualified name of this type for a given output context.
*
* @param backend Which backend we're generating code for.
*/
String getNameForBackend(SoyBackendKind backend);
/**
* Return the data type of the field with the given name; If there's no such
* field, then return {@code null}.
*
* @param fieldName The name of the field.
* @return The field type, or null.
*/
SoyType getFieldType(String fieldName);
/**
* Return all the possible field names that can be referenced from this ObjectType.
*/
ImmutableSet<String> getFieldNames();
/**
* Return the expression used to access the value of the field, for a given output context.
*
* @param fieldContainerExpr An expression that evaluates to the container of the named field.
* This expression may have any operator precedence that binds more tightly than unary
* operators.
* @param fieldName Name of the field.
* @param backend Which backend we're generating code for.
* @return Expression used to access the field data.
*/
String getFieldAccessExpr(String fieldContainerExpr, String fieldName, SoyBackendKind backend);
/**
* In some cases, {@link #getFieldAccessExpr accessing a field} requires importing
* symbols into the generated code (example being protobuf extension fields which
* require importing the extension type). If this field requires imports, then this
* method will return the strings representing the symbol needed to import.
* Otherwise, returns the empty set.
*
* @param fieldName The name of the field being accessed.
* @param backend Which backend we're generating code for.
* @return String Symbols in the backend's output language.
*/
ImmutableSet<String> getFieldAccessImports(String fieldName, SoyBackendKind backend);
}
| atul-bhouraskar/closure-templates | java/src/com/google/template/soy/types/SoyObjectType.java | Java | apache-2.0 | 3,172 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse.api.imports;
/**
*
* @author ellenk
*/
public interface ImportUtil {
public enum ImportType{ NEW, MIGRATION, HARVEST};
}
| quarian/dataverse | src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java | Java | apache-2.0 | 352 |
require 'support/shared/integration/integration_helper'
describe "Recipe DSL methods" do
include IntegrationSupport
module Namer
extend self
attr_accessor :current_index
end
before(:all) { Namer.current_index = 1 }
before { Namer.current_index += 1 }
context "with resource 'base_thingy' declared as BaseThingy" do
before(:context) {
class BaseThingy < Chef::Resource
resource_name 'base_thingy'
default_action :create
class<<self
attr_accessor :created_name
attr_accessor :created_resource
attr_accessor :created_provider
end
def provider
Provider
end
class Provider < Chef::Provider
def load_current_resource
end
def action_create
BaseThingy.created_name = new_resource.name
BaseThingy.created_resource = new_resource.class
BaseThingy.created_provider = self.class
end
end
end
# Modules to put stuff in
module RecipeDSLSpecNamespace; end
module RecipeDSLSpecNamespace::Bar; end
}
before :each do
BaseThingy.created_resource = nil
BaseThingy.created_provider = nil
end
it "creates base_thingy when you call base_thingy in a recipe" do
recipe = converge {
base_thingy 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_name).to eq 'blah'
expect(BaseThingy.created_resource).to eq BaseThingy
end
it "errors out when you call base_thingy do ... end in a recipe" do
expect_converge {
base_thingy do; end
}.to raise_error(ArgumentError, 'You must supply a name when declaring a base_thingy resource')
end
it "emits a warning when you call base_thingy 'foo', 'bar' do ... end in a recipe" do
Chef::Config[:treat_deprecation_warnings_as_errors] = false
recipe = converge {
base_thingy 'foo', 'bar' do
end
}
expect(recipe.logged_warnings).to match(/Cannot create resource base_thingy with more than one argument. All arguments except the name \("foo"\) will be ignored. This will cause an error in Chef 13. Arguments: \["foo", "bar"\]/)
expect(BaseThingy.created_name).to eq 'foo'
expect(BaseThingy.created_resource).to eq BaseThingy
end
context "Deprecated automatic resource DSL" do
before do
Chef::Config[:treat_deprecation_warnings_as_errors] = false
end
context "with a resource 'backcompat_thingy' declared in Chef::Resource and Chef::Provider" do
before(:context) {
class Chef::Resource::BackcompatThingy < Chef::Resource
default_action :create
end
class Chef::Provider::BackcompatThingy < Chef::Provider
def load_current_resource
end
def action_create
BaseThingy.created_resource = new_resource.class
BaseThingy.created_provider = self.class
end
end
}
it "backcompat_thingy creates a Chef::Resource::BackcompatThingy" do
recipe = converge {
backcompat_thingy 'blah' do; end
}
expect(BaseThingy.created_resource).to eq Chef::Resource::BackcompatThingy
expect(BaseThingy.created_provider).to eq Chef::Provider::BackcompatThingy
end
context "and another resource 'backcompat_thingy' in BackcompatThingy with 'provides'" do
before(:context) {
class RecipeDSLSpecNamespace::BackcompatThingy < BaseThingy
provides :backcompat_thingy
resource_name :backcompat_thingy
end
}
it "backcompat_thingy creates a BackcompatThingy" do
recipe = converge {
backcompat_thingy 'blah' do; end
}
expect(recipe.logged_warnings).to match(/Class Chef::Provider::BackcompatThingy does not declare 'resource_name :backcompat_thingy'./)
expect(BaseThingy.created_resource).not_to be_nil
end
end
end
context "with a resource named RecipeDSLSpecNamespace::Bar::BarThingy" do
before(:context) {
class RecipeDSLSpecNamespace::Bar::BarThingy < BaseThingy
end
}
it "bar_thingy does not work" do
expect_converge {
bar_thingy 'blah' do; end
}.to raise_error(NoMethodError)
end
end
context "with a resource named Chef::Resource::NoNameThingy with resource_name nil" do
before(:context) {
class Chef::Resource::NoNameThingy < BaseThingy
resource_name nil
end
}
it "no_name_thingy does not work" do
expect_converge {
no_name_thingy 'blah' do; end
}.to raise_error(NoMethodError)
end
end
context "with a resource named AnotherNoNameThingy with resource_name :another_thingy_name" do
before(:context) {
class AnotherNoNameThingy < BaseThingy
resource_name :another_thingy_name
end
}
it "another_no_name_thingy does not work" do
expect_converge {
another_no_name_thingy 'blah' do; end
}.to raise_error(NoMethodError)
end
it "another_thingy_name works" do
recipe = converge {
another_thingy_name 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq(AnotherNoNameThingy)
end
end
context "with a resource named AnotherNoNameThingy2 with resource_name :another_thingy_name2; resource_name :another_thingy_name3" do
before(:context) {
class AnotherNoNameThingy2 < BaseThingy
resource_name :another_thingy_name2
resource_name :another_thingy_name3
end
}
it "another_no_name_thingy does not work" do
expect_converge {
another_no_name_thingy2 'blah' do; end
}.to raise_error(NoMethodError)
end
it "another_thingy_name2 does not work" do
expect_converge {
another_thingy_name2 'blah' do; end
}.to raise_error(NoMethodError)
end
it "yet_another_thingy_name3 works" do
recipe = converge {
another_thingy_name3 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq(AnotherNoNameThingy2)
end
end
context "provides overriding resource_name" do
context "with a resource named AnotherNoNameThingy3 with provides :another_no_name_thingy3, os: 'blarghle'" do
before(:context) {
class AnotherNoNameThingy3 < BaseThingy
resource_name :another_no_name_thingy_3
provides :another_no_name_thingy3, os: 'blarghle'
end
}
it "and os = linux, another_no_name_thingy3 does not work" do
expect_converge {
# TODO this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_no_name_thingy3 'blah' do; end
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
end
it "and os = blarghle, another_no_name_thingy3 works" do
recipe = converge {
# TODO this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'blarghle'
another_no_name_thingy3 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy3)
end
end
context "with a resource named AnotherNoNameThingy4 with two provides" do
before(:context) {
class AnotherNoNameThingy4 < BaseThingy
resource_name :another_no_name_thingy_4
provides :another_no_name_thingy4, os: 'blarghle'
provides :another_no_name_thingy4, platform_family: 'foo'
end
}
it "and os = linux, another_no_name_thingy4 does not work" do
expect_converge {
# TODO this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_no_name_thingy4 'blah' do; end
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
end
it "and os = blarghle, another_no_name_thingy4 works" do
recipe = converge {
# TODO this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'blarghle'
another_no_name_thingy4 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy4)
end
it "and platform_family = foo, another_no_name_thingy4 works" do
recipe = converge {
# TODO this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:platform_family] = 'foo'
another_no_name_thingy4 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy4)
end
end
context "with a resource named AnotherNoNameThingy5, a different resource_name, and a provides with the original resource_name" do
before(:context) {
class AnotherNoNameThingy5 < BaseThingy
resource_name :another_thingy_name_for_another_no_name_thingy5
provides :another_no_name_thingy5, os: 'blarghle'
end
}
it "and os = linux, another_no_name_thingy5 does not work" do
expect_converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_no_name_thingy5 'blah' do; end
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
end
it "and os = blarghle, another_no_name_thingy5 works" do
recipe = converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'blarghle'
another_no_name_thingy5 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy5)
end
it "the new resource name can be used in a recipe" do
recipe = converge {
another_thingy_name_for_another_no_name_thingy5 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy5)
end
end
context "with a resource named AnotherNoNameThingy6, a provides with the original resource name, and a different resource_name" do
before(:context) {
class AnotherNoNameThingy6 < BaseThingy
provides :another_no_name_thingy6, os: 'blarghle'
resource_name :another_thingy_name_for_another_no_name_thingy6
end
}
it "and os = linux, another_no_name_thingy6 does not work" do
expect_converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_no_name_thingy6 'blah' do; end
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
end
it "and os = blarghle, another_no_name_thingy6 works" do
recipe = converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'blarghle'
another_no_name_thingy6 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy6)
end
it "the new resource name can be used in a recipe" do
recipe = converge {
another_thingy_name_for_another_no_name_thingy6 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy6)
end
end
context "with a resource named AnotherNoNameThingy7, a new resource_name, and provides with that new resource name" do
before(:context) {
class AnotherNoNameThingy7 < BaseThingy
resource_name :another_thingy_name_for_another_no_name_thingy7
provides :another_thingy_name_for_another_no_name_thingy7, os: 'blarghle'
end
}
it "and os = linux, another_thingy_name_for_another_no_name_thingy7 does not work" do
expect_converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_thingy_name_for_another_no_name_thingy7 'blah' do; end
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
end
it "and os = blarghle, another_thingy_name_for_another_no_name_thingy7 works" do
recipe = converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'blarghle'
another_thingy_name_for_another_no_name_thingy7 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy7)
end
it "the old resource name does not work" do
expect_converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_no_name_thingy_7 'blah' do; end
}.to raise_error(NoMethodError)
end
end
# opposite order from the previous test (provides, then resource_name)
context "with a resource named AnotherNoNameThingy8, a provides with a new resource name, and resource_name with that new resource name" do
before(:context) {
class AnotherNoNameThingy8 < BaseThingy
provides :another_thingy_name_for_another_no_name_thingy8, os: 'blarghle'
resource_name :another_thingy_name_for_another_no_name_thingy8
end
}
it "and os = linux, another_thingy_name_for_another_no_name_thingy8 does not work" do
expect_converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_thingy_name_for_another_no_name_thingy8 'blah' do; end
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
end
it "and os = blarghle, another_thingy_name_for_another_no_name_thingy8 works" do
recipe = converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'blarghle'
another_thingy_name_for_another_no_name_thingy8 'blah' do; end
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq (AnotherNoNameThingy8)
end
it "the old resource name does not work" do
expect_converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
another_thingy_name8 'blah' do; end
}.to raise_error(NoMethodError)
end
end
end
end
context "provides" do
context "when MySupplier provides :hemlock" do
before(:context) {
class RecipeDSLSpecNamespace::MySupplier < BaseThingy
resource_name :hemlock
end
}
it "my_supplier does not work in a recipe" do
expect_converge {
my_supplier 'blah' do; end
}.to raise_error(NoMethodError)
end
it "hemlock works in a recipe" do
expect_recipe {
hemlock 'blah' do; end
}.to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::MySupplier
end
end
context "when Thingy3 has resource_name :thingy3" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy3 < BaseThingy
resource_name :thingy3
end
}
it "thingy3 works in a recipe" do
expect_recipe {
thingy3 'blah' do; end
}.to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy3
end
context "and Thingy4 has resource_name :thingy3" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy4 < BaseThingy
resource_name :thingy3
end
}
it "thingy3 works in a recipe and yields Thingy3 (the alphabetical one)" do
recipe = converge {
thingy3 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy3
end
it "thingy4 does not work in a recipe" do
expect_converge {
thingy4 'blah' do; end
}.to raise_error(NoMethodError)
end
it "resource_matching_short_name returns Thingy4" do
expect(Chef::Resource.resource_matching_short_name(:thingy3)).to eq RecipeDSLSpecNamespace::Thingy3
end
end
end
context "when Thingy5 has resource_name :thingy5 and provides :thingy5reverse, :thingy5_2 and :thingy5_2reverse" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy5 < BaseThingy
resource_name :thingy5
provides :thingy5reverse
provides :thingy5_2
provides :thingy5_2reverse
end
}
it "thingy5 works in a recipe" do
expect_recipe {
thingy5 'blah' do; end
}.to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy5
end
context "and Thingy6 provides :thingy5" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy6 < BaseThingy
resource_name :thingy6
provides :thingy5
end
}
it "thingy6 works in a recipe and yields Thingy6" do
recipe = converge {
thingy6 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy6
end
it "thingy5 works in a recipe and yields Foo::Thingy5 (the alphabetical one)" do
recipe = converge {
thingy5 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy5
end
it "resource_matching_short_name returns Thingy5" do
expect(Chef::Resource.resource_matching_short_name(:thingy5)).to eq RecipeDSLSpecNamespace::Thingy5
end
context "and AThingy5 provides :thingy5reverse" do
before(:context) {
class RecipeDSLSpecNamespace::AThingy5 < BaseThingy
resource_name :thingy5reverse
end
}
it "thingy5reverse works in a recipe and yields AThingy5 (the alphabetical one)" do
recipe = converge {
thingy5reverse 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::AThingy5
end
end
context "and ZRecipeDSLSpecNamespace::Thingy5 provides :thingy5_2" do
before(:context) {
module ZRecipeDSLSpecNamespace
class Thingy5 < BaseThingy
resource_name :thingy5_2
end
end
}
it "thingy5_2 works in a recipe and yields the RecipeDSLSpaceNamespace one (the alphabetical one)" do
recipe = converge {
thingy5_2 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy5
end
end
context "and ARecipeDSLSpecNamespace::Thingy5 provides :thingy5_2" do
before(:context) {
module ARecipeDSLSpecNamespace
class Thingy5 < BaseThingy
resource_name :thingy5_2reverse
end
end
}
it "thingy5_2reverse works in a recipe and yields the ARecipeDSLSpaceNamespace one (the alphabetical one)" do
recipe = converge {
thingy5_2reverse 'blah' do; end
}
expect(BaseThingy.created_resource).to eq ARecipeDSLSpecNamespace::Thingy5
end
end
end
context "when Thingy3 has resource_name :thingy3" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy3 < BaseThingy
resource_name :thingy3
end
}
it "thingy3 works in a recipe" do
expect_recipe {
thingy3 'blah' do; end
}.to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy3
end
context "and Thingy4 has resource_name :thingy3" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy4 < BaseThingy
resource_name :thingy3
end
}
it "thingy3 works in a recipe and yields Thingy3 (the alphabetical one)" do
recipe = converge {
thingy3 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy3
end
it "thingy4 does not work in a recipe" do
expect_converge {
thingy4 'blah' do; end
}.to raise_error(NoMethodError)
end
it "resource_matching_short_name returns Thingy4" do
expect(Chef::Resource.resource_matching_short_name(:thingy3)).to eq RecipeDSLSpecNamespace::Thingy3
end
end
context "and Thingy4 has resource_name :thingy3" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy4 < BaseThingy
resource_name :thingy3
end
}
it "thingy3 works in a recipe and yields Thingy3 (the alphabetical one)" do
recipe = converge {
thingy3 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy3
end
it "thingy4 does not work in a recipe" do
expect_converge {
thingy4 'blah' do; end
}.to raise_error(NoMethodError)
end
it "resource_matching_short_name returns Thingy4" do
expect(Chef::Resource.resource_matching_short_name(:thingy3)).to eq RecipeDSLSpecNamespace::Thingy3
end
end
end
end
context "when Thingy7 provides :thingy8" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy7 < BaseThingy
resource_name :thingy7
provides :thingy8
end
}
context "and Thingy8 has resource_name :thingy8" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy8 < BaseThingy
resource_name :thingy8
end
}
it "thingy7 works in a recipe and yields Thingy7" do
recipe = converge {
thingy7 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy7
end
it "thingy8 works in a recipe and yields Thingy7 (alphabetical)" do
recipe = converge {
thingy8 'blah' do; end
}
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy7
end
it "resource_matching_short_name returns Thingy8" do
expect(Chef::Resource.resource_matching_short_name(:thingy8)).to eq RecipeDSLSpecNamespace::Thingy8
end
end
end
context "when Thingy12 provides :thingy12, :twizzle and :twizzle2" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy12 < BaseThingy
resource_name :thingy12
provides :twizzle
provides :twizzle2
end
}
it "thingy12 works in a recipe and yields Thingy12" do
expect_recipe {
thingy12 'blah' do; end
}.to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy12
end
it "twizzle works in a recipe and yields Thingy12" do
expect_recipe {
twizzle 'blah' do; end
}.to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy12
end
it "twizzle2 works in a recipe and yields Thingy12" do
expect_recipe {
twizzle2 'blah' do; end
}.to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq RecipeDSLSpecNamespace::Thingy12
end
end
context "with platform-specific resources 'my_super_thingy_foo' and 'my_super_thingy_bar'" do
before(:context) {
class MySuperThingyFoo < BaseThingy
resource_name :my_super_thingy_foo
provides :my_super_thingy, platform: 'foo'
end
class MySuperThingyBar < BaseThingy
resource_name :my_super_thingy_bar
provides :my_super_thingy, platform: 'bar'
end
}
it "A run with platform 'foo' uses MySuperThingyFoo" do
r = Cheffish::ChefRun.new(chef_config)
r.client.run_context.node.automatic['platform'] = 'foo'
r.compile_recipe {
my_super_thingy 'blah' do; end
}
r.converge
expect(r).to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq MySuperThingyFoo
end
it "A run with platform 'bar' uses MySuperThingyBar" do
r = Cheffish::ChefRun.new(chef_config)
r.client.run_context.node.automatic['platform'] = 'bar'
r.compile_recipe {
my_super_thingy 'blah' do; end
}
r.converge
expect(r).to emit_no_warnings_or_errors
expect(BaseThingy.created_resource).to eq MySuperThingyBar
end
it "A run with platform 'x' reports that my_super_thingy is not supported" do
r = Cheffish::ChefRun.new(chef_config)
r.client.run_context.node.automatic['platform'] = 'x'
expect {
r.compile_recipe {
my_super_thingy 'blah' do; end
}
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
end
end
context "when Thingy9 provides :thingy9" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy9 < BaseThingy
resource_name :thingy9
end
}
it "declaring a resource providing the same :thingy9 produces a warning" do
expect(Chef::Log).to receive(:warn).with("You declared a new resource RecipeDSLSpecNamespace::Thingy9AlternateProvider for resource thingy9, but it comes alphabetically after RecipeDSLSpecNamespace::Thingy9 and has the same filters ({}), so it will not be used. Use override: true if you want to use it for thingy9.")
class RecipeDSLSpecNamespace::Thingy9AlternateProvider < BaseThingy
resource_name :thingy9
end
end
end
context "when Thingy10 provides :thingy10" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy10 < BaseThingy
resource_name :thingy10
end
}
it "declaring a resource providing the same :thingy10 with override: true does not produce a warning" do
expect(Chef::Log).not_to receive(:warn)
class RecipeDSLSpecNamespace::Thingy10AlternateProvider < BaseThingy
provides :thingy10, override: true
end
end
end
context "when Thingy11 provides :thingy11" do
before(:context) {
class RecipeDSLSpecNamespace::Thingy11 < BaseThingy
resource_name :thingy10
end
}
it "declaring a resource providing the same :thingy11 with os: 'linux' does not produce a warning" do
expect(Chef::Log).not_to receive(:warn)
class RecipeDSLSpecNamespace::Thingy11AlternateProvider < BaseThingy
provides :thingy11, os: 'linux'
end
end
end
end
context "with a resource named 'B' with resource name :two_classes_one_dsl" do
let(:two_classes_one_dsl) { :"two_classes_one_dsl#{Namer.current_index}" }
let(:resource_class) {
result = Class.new(BaseThingy) do
def self.name
"B"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
end
result.resource_name two_classes_one_dsl
result
}
before { resource_class } # pull on it so it gets defined before the recipe runs
context "and another resource named 'A' with resource_name :two_classes_one_dsl" do
let(:resource_class_a) {
result = Class.new(BaseThingy) do
def self.name
"A"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
end
result.resource_name two_classes_one_dsl
result
}
before { resource_class_a } # pull on it so it gets defined before the recipe runs
it "two_classes_one_dsl resolves to A (alphabetically earliest)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class_a
end
it "resource_matching_short_name returns B" do
expect(Chef::Resource.resource_matching_short_name(two_classes_one_dsl)).to eq resource_class_a
end
end
context "and another resource named 'Z' with resource_name :two_classes_one_dsl" do
let(:resource_class_z) {
result = Class.new(BaseThingy) do
def self.name
"Z"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
end
result.resource_name two_classes_one_dsl
result
}
before { resource_class_z } # pull on it so it gets defined before the recipe runs
it "two_classes_one_dsl resolves to B (alphabetically earliest)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class
end
it "resource_matching_short_name returns B" do
expect(Chef::Resource.resource_matching_short_name(two_classes_one_dsl)).to eq resource_class
end
context "and a priority array [ Z, B ]" do
before do
Chef.set_resource_priority_array(two_classes_one_dsl, [ resource_class_z, resource_class ])
end
it "two_classes_one_dsl resolves to Z (respects the priority array)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class_z
end
it "resource_matching_short_name returns B" do
expect(Chef::Resource.resource_matching_short_name(two_classes_one_dsl)).to eq resource_class
end
context "when Z provides(:two_classes_one_dsl) { false }" do
before do
resource_class_z.provides(two_classes_one_dsl) { false }
end
it "two_classes_one_dsl resolves to B (picks the next thing in the priority array)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class
end
it "resource_matching_short_name returns B" do
expect(Chef::Resource.resource_matching_short_name(two_classes_one_dsl)).to eq resource_class
end
end
end
context "and priority arrays [ B ] and [ Z ]" do
before do
Chef.set_resource_priority_array(two_classes_one_dsl, [ resource_class ])
Chef.set_resource_priority_array(two_classes_one_dsl, [ resource_class_z ])
end
it "two_classes_one_dsl resolves to Z (respects the most recent priority array)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class_z
end
it "resource_matching_short_name returns B" do
expect(Chef::Resource.resource_matching_short_name(two_classes_one_dsl)).to eq resource_class
end
context "when Z provides(:two_classes_one_dsl) { false }" do
before do
resource_class_z.provides(two_classes_one_dsl) { false }
end
it "two_classes_one_dsl resolves to B (picks the first match from the other priority array)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class
end
it "resource_matching_short_name returns B" do
expect(Chef::Resource.resource_matching_short_name(two_classes_one_dsl)).to eq resource_class
end
end
end
context "and a priority array [ Z ]" do
before do
Chef.set_resource_priority_array(two_classes_one_dsl, [ resource_class_z ])
end
context "when Z provides(:two_classes_one_dsl) { false }" do
before do
resource_class_z.provides(two_classes_one_dsl) { false }
end
it "two_classes_one_dsl resolves to B (picks the first match outside the priority array)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class
end
it "resource_matching_short_name returns B" do
expect(Chef::Resource.resource_matching_short_name(two_classes_one_dsl)).to eq resource_class
end
end
end
end
context "and a provider named 'B' which provides :two_classes_one_dsl" do
before do
resource_class.send(:define_method, :provider) { nil }
end
let(:provider_class) {
result = Class.new(BaseThingy::Provider) do
def self.name
"B"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
end
result.provides two_classes_one_dsl
result
}
before { provider_class } # pull on it so it gets defined before the recipe runs
context "and another provider named 'A'" do
let(:provider_class_a) {
result = Class.new(BaseThingy::Provider) do
def self.name
"A"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
end
result
}
context "which provides :two_classes_one_dsl" do
before { provider_class_a.provides two_classes_one_dsl }
it "two_classes_one_dsl resolves to A (alphabetically earliest)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class_a
end
end
context "which provides(:two_classes_one_dsl) { false }" do
before { provider_class_a.provides(two_classes_one_dsl) { false } }
it "two_classes_one_dsl resolves to B (since A declined)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
end
end
end
context "and another provider named 'Z'" do
let(:provider_class_z) {
result = Class.new(BaseThingy::Provider) do
def self.name
"Z"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
end
result
}
before { provider_class_z } # pull on it so it gets defined before the recipe runs
context "which provides :two_classes_one_dsl" do
before { provider_class_z.provides two_classes_one_dsl }
it "two_classes_one_dsl resolves to B (alphabetically earliest)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
end
context "with a priority array [ Z, B ]" do
before { Chef.set_provider_priority_array two_classes_one_dsl, [ provider_class_z, provider_class ] }
it "two_classes_one_dsl resolves to Z (respects the priority map)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class_z
end
end
end
context "which provides(:two_classes_one_dsl) { false }" do
before { provider_class_z.provides(two_classes_one_dsl) { false } }
context "with a priority array [ Z, B ]" do
before { Chef.set_provider_priority_array two_classes_one_dsl, [ provider_class_z, provider_class ] }
it "two_classes_one_dsl resolves to B (the next one in the priority map)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
end
end
context "with priority arrays [ B ] and [ Z ]" do
before { Chef.set_provider_priority_array two_classes_one_dsl, [ provider_class_z ] }
before { Chef.set_provider_priority_array two_classes_one_dsl, [ provider_class ] }
it "two_classes_one_dsl resolves to B (the one in the next priority map)" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
instance_eval("#{two_classes_one_dsl} 'blah'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
end
end
end
end
end
context "and another resource Blarghle with provides :two_classes_one_dsl, os: 'blarghle'" do
let(:resource_class_blarghle) {
result = Class.new(BaseThingy) do
def self.name
"Blarghle"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
end
result.resource_name two_classes_one_dsl
result.provides two_classes_one_dsl, os: 'blarghle'
result
}
before { resource_class_blarghle } # pull on it so it gets defined before the recipe runs
it "on os = blarghle, two_classes_one_dsl resolves to Blarghle" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'blarghle'
instance_eval("#{two_classes_one_dsl} 'blah' do; end")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class_blarghle
end
it "on os = linux, two_classes_one_dsl resolves to B" do
two_classes_one_dsl = self.two_classes_one_dsl
recipe = converge {
# this is an ugly way to test, make Cheffish expose node attrs
run_context.node.automatic[:os] = 'linux'
instance_eval("#{two_classes_one_dsl} 'blah' do; end")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class
end
end
end
context "with a resource MyResource" do
let(:resource_class) { Class.new(BaseThingy) do
def self.called_provides
@called_provides
end
def to_s
"MyResource"
end
end }
let(:my_resource) { :"my_resource#{Namer.current_index}" }
let(:blarghle_blarghle_little_star) { :"blarghle_blarghle_little_star#{Namer.current_index}" }
context "with resource_name :my_resource" do
before {
resource_class.resource_name my_resource
}
context "with provides? returning true to my_resource" do
before {
my_resource = self.my_resource
resource_class.define_singleton_method(:provides?) do |node, resource_name|
@called_provides = true
resource_name == my_resource
end
}
it "my_resource returns the resource and calls provides?, but does not emit a warning" do
dsl_name = self.my_resource
recipe = converge {
instance_eval("#{dsl_name} 'foo'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_resource).to eq resource_class
expect(resource_class.called_provides).to be_truthy
end
end
context "with provides? returning true to blarghle_blarghle_little_star and not resource_name" do
before do
blarghle_blarghle_little_star = self.blarghle_blarghle_little_star
resource_class.define_singleton_method(:provides?) do |node, resource_name|
@called_provides = true
resource_name == blarghle_blarghle_little_star
end
end
it "my_resource does not return the resource" do
dsl_name = self.my_resource
expect_converge {
instance_eval("#{dsl_name} 'foo'")
}.to raise_error(Chef::Exceptions::NoSuchResourceType)
expect(resource_class.called_provides).to be_truthy
end
it "blarghle_blarghle_little_star 'foo' returns the resource and emits a warning" do
Chef::Config[:treat_deprecation_warnings_as_errors] = false
dsl_name = self.blarghle_blarghle_little_star
recipe = converge {
instance_eval("#{dsl_name} 'foo'")
}
expect(recipe.logged_warnings).to include "WARN: #{resource_class}.provides? returned true when asked if it provides DSL #{dsl_name}, but provides :#{dsl_name} was never called!"
expect(BaseThingy.created_resource).to eq resource_class
expect(resource_class.called_provides).to be_truthy
end
end
context "and a provider" do
let(:provider_class) do
Class.new(BaseThingy::Provider) do
def self.name
"MyProvider"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
def self.called_provides
@called_provides
end
end
end
before do
resource_class.send(:define_method, :provider) { nil }
end
context "that provides :my_resource" do
before do
provider_class.provides my_resource
end
context "with supports? returning true" do
before do
provider_class.define_singleton_method(:supports?) { |resource,action| true }
end
it "my_resource runs the provider and does not emit a warning" do
my_resource = self.my_resource
recipe = converge {
instance_eval("#{my_resource} 'foo'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
end
context "and another provider supporting :my_resource with supports? false" do
let(:provider_class2) do
Class.new(BaseThingy::Provider) do
def self.name
"MyProvider2"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
def self.called_provides
@called_provides
end
provides my_resource
def self.supports?(resource, action)
false
end
end
end
it "my_resource runs the first provider" do
my_resource = self.my_resource
recipe = converge {
instance_eval("#{my_resource} 'foo'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
end
end
end
context "with supports? returning false" do
before do
provider_class.define_singleton_method(:supports?) { |resource,action| false }
end
# TODO no warning? ick
it "my_resource runs the provider anyway" do
my_resource = self.my_resource
recipe = converge {
instance_eval("#{my_resource} 'foo'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
end
context "and another provider supporting :my_resource with supports? true" do
let(:provider_class2) do
my_resource = self.my_resource
Class.new(BaseThingy::Provider) do
def self.name
"MyProvider2"
end
def self.to_s; name; end
def self.inspect; name.inspect; end
def self.called_provides
@called_provides
end
provides my_resource
def self.supports?(resource, action)
true
end
end
end
before { provider_class2 } # make sure the provider class shows up
it "my_resource runs the other provider" do
my_resource = self.my_resource
recipe = converge {
instance_eval("#{my_resource} 'foo'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class2
end
end
end
end
context "with provides? returning true" do
before {
my_resource = self.my_resource
provider_class.define_singleton_method(:provides?) do |node, resource|
@called_provides = true
resource.declared_type == my_resource
end
}
context "that provides :my_resource" do
before {
provider_class.provides my_resource
}
it "my_resource calls the provider (and calls provides?), but does not emit a warning" do
my_resource = self.my_resource
recipe = converge {
instance_eval("#{my_resource} 'foo'")
}
expect(recipe.logged_warnings).to eq ''
expect(BaseThingy.created_provider).to eq provider_class
expect(provider_class.called_provides).to be_truthy
end
end
context "that does not call provides :my_resource" do
it "my_resource calls the provider (and calls provides?), and emits a warning" do
Chef::Config[:treat_deprecation_warnings_as_errors] = false
my_resource = self.my_resource
recipe = converge {
instance_eval("#{my_resource} 'foo'")
}
expect(recipe.logged_warnings).to include("WARN: #{provider_class}.provides? returned true when asked if it provides DSL #{my_resource}, but provides :#{my_resource} was never called!")
expect(BaseThingy.created_provider).to eq provider_class
expect(provider_class.called_provides).to be_truthy
end
end
end
context "with provides? returning false to my_resource" do
before {
my_resource = self.my_resource
provider_class.define_singleton_method(:provides?) do |node, resource|
@called_provides = true
false
end
}
context "that provides :my_resource" do
before {
provider_class.provides my_resource
}
it "my_resource fails to find a provider (and calls provides)" do
my_resource = self.my_resource
expect_converge {
instance_eval("#{my_resource} 'foo'")
}.to raise_error(Chef::Exceptions::ProviderNotFound)
expect(provider_class.called_provides).to be_truthy
end
end
context "that does not provide :my_resource" do
it "my_resource fails to find a provider (and calls provides)" do
my_resource = self.my_resource
expect_converge {
instance_eval("#{my_resource} 'foo'")
}.to raise_error(Chef::Exceptions::ProviderNotFound)
expect(provider_class.called_provides).to be_truthy
end
end
end
end
end
end
end
before(:all) { Namer.current_index = 0 }
before { Namer.current_index += 1 }
context "with an LWRP that declares actions" do
let(:resource_class) {
Class.new(Chef::Resource::LWRPBase) do
provides :"recipe_dsl_spec#{Namer.current_index}"
actions :create
end
}
let(:resource) {
resource_class.new("blah", run_context)
}
it "The actions are part of actions along with :nothing" do
expect(resource_class.actions).to eq [ :nothing, :create ]
end
it "The actions are part of allowed_actions along with :nothing" do
expect(resource.allowed_actions).to eq [ :nothing, :create ]
end
context "and a subclass that declares more actions" do
let(:subresource_class) {
Class.new(Chef::Resource::LWRPBase) do
provides :"recipe_dsl_spec_sub#{Namer.current_index}"
actions :delete
end
}
let(:subresource) {
subresource_class.new("subblah", run_context)
}
it "The parent class actions are not part of actions" do
expect(subresource_class.actions).to eq [ :nothing, :delete ]
end
it "The parent class actions are not part of allowed_actions" do
expect(subresource.allowed_actions).to eq [ :nothing, :delete ]
end
it "The parent class actions do not change" do
expect(resource_class.actions).to eq [ :nothing, :create ]
expect(resource.allowed_actions).to eq [ :nothing, :create ]
end
end
end
context "with a dynamically defined resource and regular provider" do
before(:context) do
Class.new(Chef::Resource) do
resource_name :lw_resource_with_hw_provider_test_case
default_action :create
attr_accessor :created_provider
end
class Chef::Provider::LwResourceWithHwProviderTestCase < Chef::Provider
def load_current_resource
end
def action_create
new_resource.created_provider = self.class
end
end
end
it "looks up the provider in Chef::Provider converting the resource name from snake case to camel case" do
resource = nil
recipe = converge {
resource = lw_resource_with_hw_provider_test_case 'blah' do; end
}
expect(resource.created_provider).to eq(Chef::Provider::LwResourceWithHwProviderTestCase)
end
end
end
| andrewpsp/chef | spec/integration/recipes/recipe_dsl_spec.rb | Ruby | apache-2.0 | 55,483 |
#
# Author:: John Keiser (<jkeiser@opscode.com>)
# Copyright:: Copyright (c) 2013 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'support/shared/integration/integration_helper'
require 'chef/knife/upload'
require 'chef/knife/diff'
require 'chef/knife/raw'
describe 'knife upload' do
extend IntegrationSupport
include KnifeSupport
context 'without versioned cookbooks' do
when_the_chef_server "has one of each thing" do
client 'x', {}
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"' }
data_bag 'x', { 'y' => {} }
environment 'x', {}
node 'x', {}
role 'x', {}
user 'x', {}
when_the_repository 'has only top-level directories' do
directory 'clients'
directory 'cookbooks'
directory 'data_bags'
directory 'environments'
directory 'nodes'
directory 'roles'
directory 'users'
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients/chef-validator.json
D\t/clients/chef-webui.json
D\t/clients/x.json
D\t/cookbooks/x
D\t/data_bags/x
D\t/environments/_default.json
D\t/environments/x.json
D\t/nodes/x.json
D\t/roles/x.json
D\t/users/admin.json
D\t/users/x.json
EOM
end
it 'knife upload --purge deletes everything' do
knife('upload --purge /').should_succeed(<<EOM, :stderr => "WARNING: /environments/_default.json cannot be deleted (default environment cannot be modified).\n")
Deleted extra entry /clients/chef-validator.json (purge is on)
Deleted extra entry /clients/chef-webui.json (purge is on)
Deleted extra entry /clients/x.json (purge is on)
Deleted extra entry /cookbooks/x (purge is on)
Deleted extra entry /data_bags/x (purge is on)
Deleted extra entry /environments/x.json (purge is on)
Deleted extra entry /nodes/x.json (purge is on)
Deleted extra entry /roles/x.json (purge is on)
Deleted extra entry /users/admin.json (purge is on)
Deleted extra entry /users/x.json (purge is on)
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/environments/_default.json
EOM
end
end
when_the_repository 'has an identical copy of each thing' do
file 'clients/chef-validator.json', { 'validator' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/chef-webui.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/y.json', {}
file 'environments/_default.json', { "description" => "The default Chef environment" }
file 'environments/x.json', {}
file 'nodes/x.json', {}
file 'roles/x.json', {}
file 'users/admin.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'users/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload makes no changes' do
knife('upload /cookbooks/x').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --purge makes no changes' do
knife('upload --purge /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
context 'except the role file' do
file 'roles/x.json', { 'description' => 'blarghle' }
it 'knife upload changes the role' do
knife('upload /').should_succeed "Updated /roles/x.json\n"
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --no-diff does not change the role' do
knife('upload --no-diff /').should_succeed ''
knife('diff --name-status /').should_succeed "M\t/roles/x.json\n"
end
end
context 'except the role file is textually different, but not ACTUALLY different' do
file 'roles/x.json', <<EOM
{
"chef_type": "role",
"default_attributes": {
},
"env_run_lists": {
},
"json_class": "Chef::Role",
"name": "x",
"description": "",
"override_attributes": {
},
"run_list": [
]
}
EOM
it 'knife upload / does not change anything' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
end
context 'as well as one extra copy of each thing' do
file 'clients/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x/blah.rb', ''
file 'cookbooks/y/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/z.json', {}
file 'data_bags/y/zz.json', {}
file 'environments/y.json', {}
file 'nodes/y.json', {}
file 'roles/y.json', {}
file 'users/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload adds the new files' do
knife('upload /').should_succeed <<EOM
Created /clients/y.json
Updated /cookbooks/x
Created /cookbooks/y
Created /data_bags/x/z.json
Created /data_bags/y
Created /data_bags/y/zz.json
Created /environments/y.json
Created /nodes/y.json
Created /roles/y.json
Created /users/y.json
EOM
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --no-diff adds the new files' do
knife('upload --no-diff /').should_succeed <<EOM
Created /clients/y.json
Updated /cookbooks/x
Created /cookbooks/y
Created /data_bags/x/z.json
Created /data_bags/y
Created /data_bags/y/zz.json
Created /environments/y.json
Created /nodes/y.json
Created /roles/y.json
Created /users/y.json
EOM
knife('diff --name-status /').should_succeed ''
end
end
end
when_the_repository 'is empty' do
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
it 'knife upload --purge deletes nothing' do
knife('upload --purge /').should_fail <<EOM
ERROR: /clients cannot be deleted.
ERROR: /cookbooks cannot be deleted.
ERROR: /data_bags cannot be deleted.
ERROR: /environments cannot be deleted.
ERROR: /nodes cannot be deleted.
ERROR: /roles cannot be deleted.
ERROR: /users cannot be deleted.
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
context 'when current directory is top level' do
cwd '.'
it 'knife upload with no parameters reports an error' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
end
end
end
when_the_chef_server 'is empty' do
when_the_repository 'has a data bag item' do
file 'data_bags/x/y.json', { 'foo' => 'bar' }
it 'knife upload of the data bag uploads only the values in the data bag item and no other' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
EOM
JSON.parse(knife('raw /data/x/y').stdout, :create_additions => false).keys.sort.should == [ 'foo', 'id' ]
end
end
when_the_repository 'has a data bag item with keys chef_type and data_bag' do
file 'data_bags/x/y.json', { 'chef_type' => 'aaa', 'data_bag' => 'bbb' }
it 'upload preserves chef_type and data_bag' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed ''
result = JSON.parse(knife('raw /data/x/y').stdout, :create_additions => false)
result.keys.sort.should == [ 'chef_type', 'data_bag', 'id' ]
result['chef_type'].should == 'aaa'
result['data_bag'].should == 'bbb'
end
end
# Test upload of an item when the other end doesn't even have the container
when_the_repository 'has two data bag items' do
file 'data_bags/x/y.json', {}
file 'data_bags/x/z.json', {}
it 'knife upload of one data bag item itself succeeds' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
A\t/data_bags/x/z.json
EOM
end
end
end
when_the_chef_server 'has three data bag items' do
data_bag 'x', { 'deleted' => {}, 'modified' => {}, 'unmodified' => {} }
when_the_repository 'has a modified, unmodified, added and deleted data bag item' do
file 'data_bags/x/added.json', {}
file 'data_bags/x/modified.json', { 'foo' => 'bar' }
file 'data_bags/x/unmodified.json', {}
it 'knife upload of the modified file succeeds' do
knife('upload /data_bags/x/modified.json').should_succeed <<EOM
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the unmodified file does nothing' do
knife('upload /data_bags/x/unmodified.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the added file succeeds' do
knife('upload /data_bags/x/added.json').should_succeed <<EOM
Created /data_bags/x/added.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
EOM
end
it 'knife upload of the deleted file does nothing' do
knife('upload /data_bags/x/deleted.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload --purge of the deleted file deletes it' do
knife('upload --purge /data_bags/x/deleted.json').should_succeed <<EOM
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the entire data bag uploads everything' do
knife('upload /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
EOM
end
it 'knife upload --purge of the entire data bag uploads everything' do
knife('upload --purge /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
context 'when cwd is the /data_bags directory' do
cwd 'data_bags'
it 'knife upload fails' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
it 'knife upload --purge . uploads everything' do
knife('upload --purge .').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
it 'knife upload --purge * uploads everything' do
knife('upload --purge *').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
end
end
end
# Cookbook upload is a funny thing ... direct cookbook upload works, but
# upload of a file is designed not to work at present. Make sure that is the
# case.
when_the_chef_server 'has a cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'z.rb' => '' }
when_the_repository 'has a modified, extra and missing file for the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x/y.rb', 'hi'
it 'knife upload of any individual file fails' do
knife('upload /cookbooks/x/metadata.rb').should_fail "ERROR: /cookbooks/x/metadata.rb cannot be updated.\n"
knife('upload /cookbooks/x/y.rb').should_fail "ERROR: /cookbooks/x cannot have a child created under it.\n"
knife('upload --purge /cookbooks/x/z.rb').should_fail "ERROR: /cookbooks/x/z.rb cannot be deleted.\n"
end
# TODO this is a bit of an inconsistency: if we didn't specify --purge,
# technically we shouldn't have deleted missing files. But ... cookbooks
# are a special case.
it 'knife upload of the cookbook itself succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
it 'knife upload --purge of the cookbook itself succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has a missing file for the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has an extra file for the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x/z.rb', ''
file 'cookbooks/x/blah.rb', ''
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has a different file in the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
it 'knife upload --freeze freezes the cookbook' do
knife('upload --freeze /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
# Modify a file and attempt to upload
file 'cookbooks/x/metadata.rb', 'version "1.0.0" # This is different'
knife('upload /cookbooks/x').should_fail "ERROR: /cookbooks failed to write: Cookbook x is frozen\n"
end
end
end
when_the_chef_server 'has a frozen cookbook' do
cookbook 'frozencook', '1.0.0', {
'metadata.rb' => 'version "1.0.0"'
}, :frozen => true
when_the_repository 'has an update to said cookbook' do
file 'cookbooks/frozencook/metadata.rb', 'version "1.0.0" # This is different'
it 'knife upload fails to upload the frozen cookbook' do
knife('upload /cookbooks/frozencook').should_fail "ERROR: /cookbooks failed to write: Cookbook frozencook is frozen\n"
end
it 'knife upload --force uploads the frozen cookbook' do
knife('upload --force /cookbooks/frozencook').should_succeed <<EOM
Updated /cookbooks/frozencook
EOM
end
end
end
when_the_repository 'has a cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x/onlyin1.0.0.rb', 'old_text'
when_the_chef_server 'has a later version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => '' }
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
end
end
when_the_chef_server 'has an earlier version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => ''}
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has a later version for the cookbook, and no current version' do
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
end
end
when_the_chef_server 'has an earlier version for the cookbook, and no current version' do
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the new version' do
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
end
when_the_chef_server 'has an environment' do
environment 'x', {}
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\nERROR: /environments/x.json failed to write: Parse error reading JSON: A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n", :stderr => "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\n"
end
end
when_the_repository 'has the same environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments/x.json failed to write: Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n"
end
end
when_the_repository 'has the same environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Updated /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
end
when_the_chef_server 'is empty' do
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Parse error reading JSON creating child 'x.json': A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Error creating 'x.json': Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Created /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
when_the_repository 'has a data bag with no id in the file' do
file 'data_bags/bag/x.json', { 'foo' => 'bar' }
it 'knife upload succeeds' do
knife('upload /data_bags/bag/x.json').should_succeed "Created /data_bags/bag\nCreated /data_bags/bag/x.json\n"
knife('diff --name-status /data_bags/bag/x.json').should_succeed ''
end
end
end
end # without versioned cookbooks
with_versioned_cookbooks do
when_the_chef_server "has one of each thing" do
client 'x', {}
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"' }
data_bag 'x', { 'y' => {} }
environment 'x', {}
node 'x', {}
role 'x', {}
user 'x', {}
when_the_repository 'has only top-level directories' do
directory 'clients'
directory 'cookbooks'
directory 'data_bags'
directory 'environments'
directory 'nodes'
directory 'roles'
directory 'users'
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients/chef-validator.json
D\t/clients/chef-webui.json
D\t/clients/x.json
D\t/cookbooks/x-1.0.0
D\t/data_bags/x
D\t/environments/_default.json
D\t/environments/x.json
D\t/nodes/x.json
D\t/roles/x.json
D\t/users/admin.json
D\t/users/x.json
EOM
end
it 'knife upload --purge deletes everything' do
knife('upload --purge /').should_succeed(<<EOM, :stderr => "WARNING: /environments/_default.json cannot be deleted (default environment cannot be modified).\n")
Deleted extra entry /clients/chef-validator.json (purge is on)
Deleted extra entry /clients/chef-webui.json (purge is on)
Deleted extra entry /clients/x.json (purge is on)
Deleted extra entry /cookbooks/x-1.0.0 (purge is on)
Deleted extra entry /data_bags/x (purge is on)
Deleted extra entry /environments/x.json (purge is on)
Deleted extra entry /nodes/x.json (purge is on)
Deleted extra entry /roles/x.json (purge is on)
Deleted extra entry /users/admin.json (purge is on)
Deleted extra entry /users/x.json (purge is on)
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/environments/_default.json
EOM
end
end
when_the_repository 'has an identical copy of each thing' do
file 'clients/chef-validator.json', { 'validator' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/chef-webui.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/y.json', {}
file 'environments/_default.json', { 'description' => 'The default Chef environment' }
file 'environments/x.json', {}
file 'nodes/x.json', {}
file 'roles/x.json', {}
file 'users/admin.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'users/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload makes no changes' do
knife('upload /cookbooks/x-1.0.0').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --purge makes no changes' do
knife('upload --purge /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
context 'except the role file' do
file 'roles/x.json', { 'description' => 'blarghle' }
it 'knife upload changes the role' do
knife('upload /').should_succeed "Updated /roles/x.json\n"
knife('diff --name-status /').should_succeed ''
end
end
context 'except the role file is textually different, but not ACTUALLY different' do
file 'roles/x.json', <<EOM
{
"chef_type": "role",
"default_attributes": {
},
"env_run_lists": {
},
"json_class": "Chef::Role",
"name": "x",
"description": "",
"override_attributes": {
},
"run_list": [
]
}
EOM
it 'knife upload / does not change anything' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
end
context 'as well as one extra copy of each thing' do
file 'clients/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x-1.0.0/blah.rb', ''
file 'cookbooks/x-2.0.0/metadata.rb', 'version "2.0.0"'
file 'cookbooks/y-1.0.0/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/z.json', {}
file 'data_bags/y/zz.json', {}
file 'environments/y.json', {}
file 'nodes/y.json', {}
file 'roles/y.json', {}
file 'users/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload adds the new files' do
knife('upload /').should_succeed <<EOM
Created /clients/y.json
Updated /cookbooks/x-1.0.0
Created /cookbooks/x-2.0.0
Created /cookbooks/y-1.0.0
Created /data_bags/x/z.json
Created /data_bags/y
Created /data_bags/y/zz.json
Created /environments/y.json
Created /nodes/y.json
Created /roles/y.json
Created /users/y.json
EOM
knife('diff --name-status /').should_succeed ''
end
end
end
when_the_repository 'is empty' do
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
it 'knife upload --purge deletes nothing' do
knife('upload --purge /').should_fail <<EOM
ERROR: /clients cannot be deleted.
ERROR: /cookbooks cannot be deleted.
ERROR: /data_bags cannot be deleted.
ERROR: /environments cannot be deleted.
ERROR: /nodes cannot be deleted.
ERROR: /roles cannot be deleted.
ERROR: /users cannot be deleted.
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
context 'when current directory is top level' do
cwd '.'
it 'knife upload with no parameters reports an error' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
end
end
end
# Test upload of an item when the other end doesn't even have the container
when_the_chef_server 'is empty' do
when_the_repository 'has two data bag items' do
file 'data_bags/x/y.json', {}
file 'data_bags/x/z.json', {}
it 'knife upload of one data bag item itself succeeds' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
A\t/data_bags/x/z.json
EOM
end
end
end
when_the_chef_server 'has three data bag items' do
data_bag 'x', { 'deleted' => {}, 'modified' => {}, 'unmodified' => {} }
when_the_repository 'has a modified, unmodified, added and deleted data bag item' do
file 'data_bags/x/added.json', {}
file 'data_bags/x/modified.json', { 'foo' => 'bar' }
file 'data_bags/x/unmodified.json', {}
it 'knife upload of the modified file succeeds' do
knife('upload /data_bags/x/modified.json').should_succeed <<EOM
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the unmodified file does nothing' do
knife('upload /data_bags/x/unmodified.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the added file succeeds' do
knife('upload /data_bags/x/added.json').should_succeed <<EOM
Created /data_bags/x/added.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
EOM
end
it 'knife upload of the deleted file does nothing' do
knife('upload /data_bags/x/deleted.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload --purge of the deleted file deletes it' do
knife('upload --purge /data_bags/x/deleted.json').should_succeed <<EOM
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the entire data bag uploads everything' do
knife('upload /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
EOM
end
it 'knife upload --purge of the entire data bag uploads everything' do
knife('upload --purge /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
context 'when cwd is the /data_bags directory' do
cwd 'data_bags'
it 'knife upload fails' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
it 'knife upload --purge . uploads everything' do
knife('upload --purge .').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
it 'knife upload --purge * uploads everything' do
knife('upload --purge *').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
end
end
end
# Cookbook upload is a funny thing ... direct cookbook upload works, but
# upload of a file is designed not to work at present. Make sure that is the
# case.
when_the_chef_server 'has a cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'z.rb' => '' }
when_the_repository 'has a modified, extra and missing file for the cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x-1.0.0/y.rb', 'hi'
it 'knife upload of any individual file fails' do
knife('upload /cookbooks/x-1.0.0/metadata.rb').should_fail "ERROR: /cookbooks/x-1.0.0/metadata.rb cannot be updated.\n"
knife('upload /cookbooks/x-1.0.0/y.rb').should_fail "ERROR: /cookbooks/x-1.0.0 cannot have a child created under it.\n"
knife('upload --purge /cookbooks/x-1.0.0/z.rb').should_fail "ERROR: /cookbooks/x-1.0.0/z.rb cannot be deleted.\n"
end
# TODO this is a bit of an inconsistency: if we didn't specify --purge,
# technically we shouldn't have deleted missing files. But ... cookbooks
# are a special case.
it 'knife upload of the cookbook itself succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
it 'knife upload --purge of the cookbook itself succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has a missing file for the cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has an extra file for the cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x-1.0.0/z.rb', ''
file 'cookbooks/x-1.0.0/blah.rb', ''
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
end
when_the_repository 'has a cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x-1.0.0/onlyin1.0.0.rb', 'old_text'
when_the_chef_server 'has a later version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => '' }
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x-1.0.0/onlyin1.0.0.rb
D\t/cookbooks/x-1.0.1
EOM
knife('upload --purge /cookbooks').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-1.0.1 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has an earlier version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => ''}
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks uploads the local version' do
knife('upload --purge /cookbooks').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-0.9.9 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has a later version for the cookbook, and no current version' do
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
D\t/cookbooks/x-1.0.1
A\t/cookbooks/x-1.0.0
EOM
knife('upload --purge /cookbooks').should_succeed <<EOM
Created /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-1.0.1 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has an earlier version for the cookbook, and no current version' do
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the new version' do
knife('upload --purge /cookbooks').should_succeed <<EOM
Created /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-0.9.9 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
end
when_the_chef_server 'has an environment' do
environment 'x', {}
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\nERROR: /environments/x.json failed to write: Parse error reading JSON: A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n", :stderr => "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\n"
end
end
when_the_repository 'has the same environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments/x.json failed to write: Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n"
end
end
when_the_repository 'has the same environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Updated /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
end
when_the_chef_server 'is empty' do
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Parse error reading JSON creating child 'x.json': A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Error creating 'x.json': Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Created /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
when_the_repository 'has a data bag with no id in the file' do
file 'data_bags/bag/x.json', { 'foo' => 'bar' }
it 'knife upload succeeds' do
knife('upload /data_bags/bag/x.json').should_succeed "Created /data_bags/bag\nCreated /data_bags/bag/x.json\n"
knife('diff --name-status /data_bags/bag/x.json').should_succeed ''
end
end
end
end # with versioned cookbooks
end
| luna1x/chef-server | vendor/ruby/1.9.1/gems/chef-11.6.2/spec/integration/knife/upload_spec.rb | Ruby | apache-2.0 | 41,092 |
/*
* Copyright 2002-2015 Drew Noakes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.metadata.exif.makernotes;
import com.drew.lang.annotations.NotNull;
import com.drew.lang.annotations.Nullable;
import com.drew.metadata.TagDescriptor;
import static com.drew.metadata.exif.makernotes.PentaxMakernoteDirectory.*;
/**
* Provides human-readable string representations of tag values stored in a {@link PentaxMakernoteDirectory}.
* <p>
* Some information about this makernote taken from here:
* http://www.ozhiker.com/electronics/pjmt/jpeg_info/pentax_mn.html
*
* @author Drew Noakes https://drewnoakes.com
*/
public class PentaxMakernoteDescriptor extends TagDescriptor<PentaxMakernoteDirectory>
{
public PentaxMakernoteDescriptor(@NotNull PentaxMakernoteDirectory directory)
{
super(directory);
}
@Override
@Nullable
public String getDescription(int tagType)
{
switch (tagType) {
case TAG_CAPTURE_MODE:
return getCaptureModeDescription();
case TAG_QUALITY_LEVEL:
return getQualityLevelDescription();
case TAG_FOCUS_MODE:
return getFocusModeDescription();
case TAG_FLASH_MODE:
return getFlashModeDescription();
case TAG_WHITE_BALANCE:
return getWhiteBalanceDescription();
case TAG_DIGITAL_ZOOM:
return getDigitalZoomDescription();
case TAG_SHARPNESS:
return getSharpnessDescription();
case TAG_CONTRAST:
return getContrastDescription();
case TAG_SATURATION:
return getSaturationDescription();
case TAG_ISO_SPEED:
return getIsoSpeedDescription();
case TAG_COLOUR:
return getColourDescription();
default:
return super.getDescription(tagType);
}
}
@Nullable
public String getColourDescription()
{
return getIndexedDescription(TAG_COLOUR, 1, "Normal", "Black & White", "Sepia");
}
@Nullable
public String getIsoSpeedDescription()
{
Integer value = _directory.getInteger(TAG_ISO_SPEED);
if (value == null)
return null;
switch (value) {
// TODO there must be other values which aren't catered for here
case 10: return "ISO 100";
case 16: return "ISO 200";
case 100: return "ISO 100";
case 200: return "ISO 200";
default: return "Unknown (" + value + ")";
}
}
@Nullable
public String getSaturationDescription()
{
return getIndexedDescription(TAG_SATURATION, "Normal", "Low", "High");
}
@Nullable
public String getContrastDescription()
{
return getIndexedDescription(TAG_CONTRAST, "Normal", "Low", "High");
}
@Nullable
public String getSharpnessDescription()
{
return getIndexedDescription(TAG_SHARPNESS, "Normal", "Soft", "Hard");
}
@Nullable
public String getDigitalZoomDescription()
{
Float value = _directory.getFloatObject(TAG_DIGITAL_ZOOM);
if (value == null)
return null;
if (value == 0)
return "Off";
return Float.toString(value);
}
@Nullable
public String getWhiteBalanceDescription()
{
return getIndexedDescription(TAG_WHITE_BALANCE,
"Auto", "Daylight", "Shade", "Tungsten", "Fluorescent", "Manual");
}
@Nullable
public String getFlashModeDescription()
{
return getIndexedDescription(TAG_FLASH_MODE,
1, "Auto", "Flash On", null, "Flash Off", null, "Red-eye Reduction");
}
@Nullable
public String getFocusModeDescription()
{
return getIndexedDescription(TAG_FOCUS_MODE, 2, "Custom", "Auto");
}
@Nullable
public String getQualityLevelDescription()
{
return getIndexedDescription(TAG_QUALITY_LEVEL, "Good", "Better", "Best");
}
@Nullable
public String getCaptureModeDescription()
{
return getIndexedDescription(TAG_CAPTURE_MODE,
"Auto", "Night-scene", "Manual", null, "Multiple");
}
}
| wswenyue/metadata-extractor | Source/com/drew/metadata/exif/makernotes/PentaxMakernoteDescriptor.java | Java | apache-2.0 | 4,966 |
/*
* Hibernate Validator, declare and validate application constraints
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.validator.test.internal.engine.methodvalidation.service;
import java.util.List;
import java.util.Map;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import org.joda.time.DateMidnight;
import org.hibernate.validator.constraints.NotEmpty;
import org.hibernate.validator.test.internal.engine.methodvalidation.model.Customer;
/**
* @author Gunnar Morling
*/
public interface CustomerRepository extends RepositoryBase<Customer> {
@Valid
Customer findCustomerByName(@NotNull String name);
void persistCustomer(@NotNull @Valid Customer customer);
void cascadingMapParameter(@Valid Map<String, Customer> customer);
void cascadingIterableParameter(@Valid List<Customer> customer);
void cascadingArrayParameter(@Valid Customer... customer);
void findCustomerByAgeAndName(@Min(5) Integer age, @NotNull String name);
void cascadingParameter(@NotNull @Valid Customer param1, @NotNull @Valid Customer param2);
@Override
void foo(Long id);
@Override
void bar(Customer customer);
void boz();
@Min(10)
int baz();
@Valid
Customer cascadingReturnValue();
@Valid
List<Customer> cascadingIterableReturnValue();
@Valid
Map<String, Customer> cascadingMapReturnValue();
@Valid
Customer[] cascadingArrayReturnValue();
@Override
Customer overriddenMethodWithCascadingReturnValue();
void parameterConstraintInGroup(@NotNull(groups = { ValidationGroup.class }) String name);
@Override
@Min(10)
int overriddenMethodWithReturnValueConstraint();
int getFoo(int i);
int getFoo(@NotEmpty String s);
@ConsistentDateParameters
void methodWithCrossParameterConstraint(@NotNull DateMidnight start, @NotNull DateMidnight end);
public interface ValidationGroup {
}
}
| mxrenkin/hibernate-validator | engine/src/test/java/org/hibernate/validator/test/internal/engine/methodvalidation/service/CustomerRepository.java | Java | apache-2.0 | 2,001 |
package org.zstack.network.service.lb;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.db.UpdateQuery;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.network.l3.L3NetworkVO;
import org.zstack.header.network.service.NetworkServiceL3NetworkRefVO;
import org.zstack.header.vm.*;
import org.zstack.identity.AccountManager;
import org.zstack.network.service.vip.*;
import org.zstack.tag.TagManager;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.function.Function;
import org.zstack.utils.logging.CLogger;
import static org.zstack.core.Platform.operr;
import javax.persistence.TypedQuery;
import java.util.*;
import java.util.stream.Collectors;
import static java.util.Arrays.asList;
/**
* Created by frank on 8/8/2015.
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class LoadBalancerBase {
private static final CLogger logger = Utils.getLogger(LoadBalancerBase.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private LoadBalancerManager lbMgr;
@Autowired
private ThreadFacade thdf;
@Autowired
private ErrorFacade errf;
@Autowired
private AccountManager acntMgr;
@Autowired
private TagManager tagMgr;
private LoadBalancerVO self;
private String getSyncId() {
return String.format("operate-lb-%s", self.getUuid());
}
protected LoadBalancerInventory getInventory() {
return LoadBalancerInventory.valueOf(self);
}
private LoadBalancerInventory reloadAndGetInventory() {
self = dbf.reload(self);
return getInventory();
}
public LoadBalancerBase(LoadBalancerVO self) {
this.self = self;
}
void handleMessage(Message msg) {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof LoadBalancerActiveVmNicMsg) {
handle((LoadBalancerActiveVmNicMsg) msg);
} else if (msg instanceof LoadBalancerDeactiveVmNicMsg) {
handle((LoadBalancerDeactiveVmNicMsg) msg);
} else if (msg instanceof LoadBalancerRemoveVmNicMsg) {
handle((LoadBalancerRemoveVmNicMsg) msg);
} else if (msg instanceof RefreshLoadBalancerMsg) {
handle((RefreshLoadBalancerMsg) msg);
} else if (msg instanceof DeleteLoadBalancerMsg) {
handle((DeleteLoadBalancerMsg) msg);
} else if (msg instanceof DeleteLoadBalancerOnlyMsg) {
handle((DeleteLoadBalancerOnlyMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(DeleteLoadBalancerOnlyMsg msg) {
DeleteLoadBalancerOnlyReply reply = new DeleteLoadBalancerOnlyReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(SyncTaskChain chain) {
if (self.getProviderType() == null) {
// not initialized yet
dbf.remove(self);
bus.reply(msg, reply);
chain.next();
return;
}
LoadBalancerBackend bkd = getBackend();
bkd.destroyLoadBalancer(makeStruct(), new Completion(msg, chain) {
@Override
public void success() {
dbf.remove(self);
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-load-balancer-only";
}
});
}
private void handle(final DeleteLoadBalancerMsg msg) {
final DeleteLoadBalancerReply reply = new DeleteLoadBalancerReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
delete(new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg ,reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg ,reply);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-lb";
}
});
}
private void handle(final RefreshLoadBalancerMsg msg) {
final RefreshLoadBalancerReply reply = new RefreshLoadBalancerReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
refresh(new Completion(msg, chain) {
@Override
public void success() {
reply.setInventory(getInventory());
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "refresh-lb";
}
});
}
private void refresh(final Completion completion) {
LoadBalancerBackend bkd = getBackend();
bkd.refresh(makeStruct(), completion);
}
private void handle(final LoadBalancerRemoveVmNicMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
final LoadBalancerRemoveVmNicReply reply = new LoadBalancerRemoveVmNicReply();
removeNics(msg.getListenerUuid(), msg.getVmNicUuids(), new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "remove-nic-from-lb";
}
});
}
private void checkIfNicIsAdded(List<String> nicUuids) {
List<String> allNicUuids = new ArrayList<String>();
for (LoadBalancerListenerVO l : self.getListeners()) {
allNicUuids.addAll(CollectionUtils.transformToList(l.getVmNicRefs(), new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
}
for (String nicUuid : nicUuids) {
if (!allNicUuids.contains(nicUuid)) {
throw new CloudRuntimeException(String.format("the load balancer[uuid: %s] doesn't have a vm nic[uuid: %s] added", self.getUuid(), nicUuid));
}
}
}
private void handle(final LoadBalancerDeactiveVmNicMsg msg) {
checkIfNicIsAdded(msg.getVmNicUuids());
LoadBalancerListenerVO l = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVO call(LoadBalancerListenerVO arg) {
return arg.getUuid().equals(msg.getListenerUuid()) ? arg : null;
}
});
final List<LoadBalancerListenerVmNicRefVO> refs = CollectionUtils.transformToList(l.getVmNicRefs(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVmNicRefVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVmNicRefVO arg) {
return msg.getVmNicUuids().contains(arg.getVmNicUuid()) ? arg : null;
}
});
final LoadBalancerDeactiveVmNicReply reply = new LoadBalancerDeactiveVmNicReply();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("deactive-vm-nics-on-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
String __name__ = "set-nics-to-inactive-in-db";
@Override
public void run(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Inactive);
dbf.update(ref);
}
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
dbf.update(ref);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "deactive-nics-on-backend";
@Override
public void run(final FlowTrigger trigger, Map data) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, CollectionUtils.transformToList(refs, new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
List<VmNicVO> nicvos = q.list();
LoadBalancerBackend bkd = getBackend();
bkd.removeVmNics(makeStruct(), VmNicInventory.valueOf(nicvos), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
private void activeVmNic(final LoadBalancerActiveVmNicMsg msg, final NoErrorCompletion completion) {
checkIfNicIsAdded(msg.getVmNicUuids());
LoadBalancerListenerVO l = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVO call(LoadBalancerListenerVO arg) {
return arg.getUuid().equals(msg.getListenerUuid()) ? arg : null;
}
});
final List<LoadBalancerListenerVmNicRefVO> refs = CollectionUtils.transformToList(l.getVmNicRefs(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVmNicRefVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVmNicRefVO arg) {
return msg.getVmNicUuids().contains(arg.getVmNicUuid()) ? arg : null;
}
});
final LoadBalancerActiveVmNicReply reply = new LoadBalancerActiveVmNicReply();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("active-vm-nics-on-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
String __name__ = "set-nics-to-active-in-db";
@Override
public void run(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
dbf.update(ref);
}
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Inactive);
dbf.update(ref);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "active-nics-on-backend";
@Override
public void run(final FlowTrigger trigger, Map data) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, CollectionUtils.transformToList(refs, new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
List<VmNicVO> nicvos = q.list();
LoadBalancerBackend bkd = getBackend();
bkd.addVmNics(makeStruct(), VmNicInventory.valueOf(nicvos), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
completion.done();
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
completion.done();
}
});
}
}).start();
}
private void handle(final LoadBalancerActiveVmNicMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
activeVmNic(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "deactive-nic";
}
});
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APICreateLoadBalancerListenerMsg) {
handle((APICreateLoadBalancerListenerMsg) msg);
} else if (msg instanceof APIAddVmNicToLoadBalancerMsg) {
handle((APIAddVmNicToLoadBalancerMsg) msg);
} else if (msg instanceof APIRemoveVmNicFromLoadBalancerMsg) {
handle((APIRemoveVmNicFromLoadBalancerMsg) msg);
} else if (msg instanceof APIDeleteLoadBalancerListenerMsg) {
handle((APIDeleteLoadBalancerListenerMsg) msg);
} else if (msg instanceof APIDeleteLoadBalancerMsg) {
handle((APIDeleteLoadBalancerMsg) msg);
} else if (msg instanceof APIRefreshLoadBalancerMsg) {
handle((APIRefreshLoadBalancerMsg) msg);
} else if (msg instanceof APIGetCandidateVmNicsForLoadBalancerMsg) {
handle((APIGetCandidateVmNicsForLoadBalancerMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
@Transactional(readOnly = true)
private void handle(APIGetCandidateVmNicsForLoadBalancerMsg msg) {
APIGetCandidateVmNicsForLoadBalancerReply reply = new APIGetCandidateVmNicsForLoadBalancerReply();
String sql = "select vip.peerL3NetworkUuid from VipVO vip where vip.uuid = :uuid";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("uuid", self.getVipUuid());
List<String> ret = q.getResultList();
String peerL3Uuid = ret.isEmpty() ? null : ret.get(0);
if (peerL3Uuid != null) {
// the load balancer has been bound to a private L3 network
sql = "select nic from VmNicVO nic, VmInstanceVO vm where nic.l3NetworkUuid = :l3Uuid and nic.uuid not in (select ref.vmNicUuid from LoadBalancerListenerVmNicRefVO ref" +
" where ref.listenerUuid = :luuid) and nic.vmInstanceUuid = vm.uuid and vm.type = :vmType and vm.state in (:vmStates)";
TypedQuery<VmNicVO> pq = dbf.getEntityManager().createQuery(sql, VmNicVO.class);
pq.setParameter("l3Uuid", peerL3Uuid);
pq.setParameter("luuid", msg.getListenerUuid());
pq.setParameter("vmType", VmInstanceConstant.USER_VM_TYPE);
pq.setParameter("vmStates", asList(VmInstanceState.Running, VmInstanceState.Stopped));
List<VmNicVO> nics = pq.getResultList();
reply.setInventories(VmNicInventory.valueOf(nics));
bus.reply(msg, reply);
return;
}
// the load balancer has not been bound to any private L3 network
sql = "select l3.uuid from L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref where l3.uuid = ref.l3NetworkUuid" +
" and ref.networkServiceType = :type";
q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("type", LoadBalancerConstants.LB_NETWORK_SERVICE_TYPE_STRING);
List<String> l3Uuids = q.getResultList();
if (l3Uuids.isEmpty()) {
reply.setInventories(new ArrayList<>());
bus.reply(msg, reply);
return;
}
sql = "select nic from VmNicVO nic, VmInstanceVO vm where nic.l3NetworkUuid in (select l3.uuid from L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref where l3.uuid = ref.l3NetworkUuid" +
" and ref.networkServiceType = :type) and nic.vmInstanceUuid = vm.uuid and vm.type = :vmType and vm.state in (:vmStates)";
TypedQuery<VmNicVO> nq = dbf.getEntityManager().createQuery(sql, VmNicVO.class);
nq.setParameter("type", LoadBalancerConstants.LB_NETWORK_SERVICE_TYPE_STRING);
nq.setParameter("vmType", VmInstanceConstant.USER_VM_TYPE);
nq.setParameter("vmStates", asList(VmInstanceState.Running, VmInstanceState.Stopped));
List<VmNicVO> nics = nq.getResultList();
reply.setInventories(VmNicInventory.valueOf(nics));
bus.reply(msg, reply);
}
private void handle(final APIRefreshLoadBalancerMsg msg) {
final APIRefreshLoadBalancerEvent evt = new APIRefreshLoadBalancerEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
refresh(new Completion(msg, chain) {
@Override
public void success() {
evt.setInventory(getInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "refresh-lb";
}
});
}
private void handle(final APIDeleteLoadBalancerMsg msg) {
final APIDeleteLoadBalancerEvent evt = new APIDeleteLoadBalancerEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
delete(new Completion(msg, chain) {
@Override
public void success() {
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-lb";
}
});
}
private void delete(final Completion completion) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-lb";
@Override
public void run(final FlowTrigger trigger, Map data) {
if (self.getProviderType() == null) {
trigger.next();
// not initialized yet
return;
}
LoadBalancerBackend bkd = getBackend();
bkd.destroyLoadBalancer(makeStruct(), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "release-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
new Vip(self.getVipUuid()).release(new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
dbf.remove(self);
completion.success();
}
});
error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
});
}
}).start();
}
private void handle(final APIDeleteLoadBalancerListenerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
deleteListener(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "delete-listener";
}
});
}
private LoadBalancerStruct removeListenerStruct(LoadBalancerListenerInventory listener) {
LoadBalancerStruct s = makeStruct();
for (LoadBalancerListenerInventory l : s.getListeners()) {
if (l.getUuid().equals(listener.getUuid())) {
l.setVmNicRefs(new ArrayList<>());
}
}
return s;
}
private void deleteListener(APIDeleteLoadBalancerListenerMsg msg, final NoErrorCompletion completion) {
final APIDeleteLoadBalancerListenerEvent evt = new APIDeleteLoadBalancerListenerEvent(msg.getId());
final LoadBalancerListenerVO vo = dbf.findByUuid(msg.getUuid(), LoadBalancerListenerVO.class);
if (vo == null) {
evt.setInventory(getInventory());
bus.publish(evt);
completion.done();
return;
}
if (!needAction()) {
dbf.remove(vo);
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
return;
}
LoadBalancerListenerInventory listener = LoadBalancerListenerInventory.valueOf(vo);
LoadBalancerBackend bkd = getBackend();
bkd.removeListener(removeListenerStruct(listener), listener, new Completion(msg, completion) {
@Override
public void success() {
dbf.remove(vo);
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
completion.done();
}
});
}
private void handle(final APIRemoveVmNicFromLoadBalancerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
removeNic(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "remove-nic";
}
});
}
private LoadBalancerStruct removeNicStruct(String listenerUuid, List<String> nicUuids) {
LoadBalancerStruct s = makeStruct();
Optional<LoadBalancerListenerInventory> opt = s.getListeners().stream().filter(it -> it.getUuid().equals(listenerUuid)).findAny();
DebugUtils.Assert(opt.isPresent(), String.format("cannot find listener[uuid:%s]", listenerUuid));
LoadBalancerListenerInventory l = opt.get();
l.getVmNicRefs().removeIf(loadBalancerListenerVmNicRefInventory -> nicUuids.contains(loadBalancerListenerVmNicRefInventory.getVmNicUuid()));
return s;
}
private void removeNics(String listenerUuid, final List<String> vmNicUuids, final Completion completion) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, vmNicUuids);
List<VmNicVO> vos = q.list();
List<VmNicInventory> nics = VmNicInventory.valueOf(vos);
LoadBalancerBackend bkd = getBackend();
bkd.removeVmNics(removeNicStruct(listenerUuid, vmNicUuids), nics, new Completion(completion) {
@Override
public void success() {
UpdateQuery.New(LoadBalancerListenerVmNicRefVO.class)
.condAnd(LoadBalancerListenerVmNicRefVO_.vmNicUuid, Op.IN, vmNicUuids)
.condAnd(LoadBalancerListenerVmNicRefVO_.listenerUuid, Op.EQ, listenerUuid)
.delete();
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
private void removeNic(APIRemoveVmNicFromLoadBalancerMsg msg, final NoErrorCompletion completion) {
final APIRemoveVmNicFromLoadBalancerEvent evt = new APIRemoveVmNicFromLoadBalancerEvent(msg.getId());
removeNics(msg.getListenerUuid(), msg.getVmNicUuids(), new Completion(msg, completion) {
@Override
public void success() {
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
completion.done();
}
});
}
@Transactional(readOnly = true)
private String findProviderTypeByVmNicUuid(String nicUuid) {
String sql = "select l3 from L3NetworkVO l3, VmNicVO nic where nic.l3NetworkUuid = l3.uuid and nic.uuid = :uuid";
TypedQuery<L3NetworkVO> q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class);
q.setParameter("uuid", nicUuid);
L3NetworkVO l3 = q.getSingleResult();
for (NetworkServiceL3NetworkRefVO ref : l3.getNetworkServices()) {
if (LoadBalancerConstants.LB_NETWORK_SERVICE_TYPE_STRING.equals(ref.getNetworkServiceType())) {
sql = "select p.type from NetworkServiceProviderVO p where p.uuid = :uuid";
TypedQuery<String> nq = dbf.getEntityManager().createQuery(sql, String.class);
nq.setParameter("uuid", ref.getNetworkServiceProviderUuid());
return nq.getSingleResult();
}
}
return null;
}
private void handle(final APIAddVmNicToLoadBalancerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
addVmNicToListener(msg, new NoErrorCompletion(chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void addVmNicToListener(final APIAddVmNicToLoadBalancerMsg msg, final NoErrorCompletion completion) {
final APIAddVmNicToLoadBalancerEvent evt = new APIAddVmNicToLoadBalancerEvent(msg.getId());
final String providerType = findProviderTypeByVmNicUuid(msg.getVmNicUuids().get(0));
if (providerType == null) {
throw new OperationFailureException(operr("the L3 network of vm nic[uuid:%s] doesn't have load balancer service enabled", msg.getVmNicUuids().get(0)));
}
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, msg.getVmNicUuids());
List<VmNicVO> nicVOs = q.list();
final List<VmNicInventory> nics = VmNicInventory.valueOf(nicVOs);
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("add-vm-nic-to-lb-listener-%s", msg.getListenerUuid()));
chain.then(new ShareFlow() {
List<LoadBalancerListenerVmNicRefVO> refs = new ArrayList<LoadBalancerListenerVmNicRefVO>();
boolean init = false;
@Override
public void setup() {
flow(new Flow() {
String __name__ = "check-provider-type";
@Override
public void run(FlowTrigger trigger, Map data) {
if (self.getProviderType() == null) {
self.setProviderType(providerType);
self = dbf.updateAndRefresh(self);
init = true;
} else {
if (!providerType.equals(self.getProviderType())) {
throw new OperationFailureException(operr("service provider type mismatching. The load balancer[uuid:%s] is provided by the service provider[type:%s]," +
" but the L3 network of vm nic[uuid:%s] is enabled with the service provider[type: %s]", self.getUuid(), self.getProviderType(),
msg.getVmNicUuids().get(0), providerType));
}
}
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (init) {
self = dbf.reload(self);
self.setProviderType(null);
dbf.update(self);
}
trigger.rollback();
}
});
flow(new Flow() {
String __name__ = "write-nic-to-db";
boolean s = false;
@Override
public void run(FlowTrigger trigger, Map data) {
for (String nicUuid : msg.getVmNicUuids()) {
LoadBalancerListenerVmNicRefVO ref = new LoadBalancerListenerVmNicRefVO();
ref.setListenerUuid(msg.getListenerUuid());
ref.setVmNicUuid(nicUuid);
ref.setStatus(LoadBalancerVmNicStatus.Pending);
refs.add(ref);
}
dbf.persistCollection(refs);
s = true;
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (s) {
dbf.removeCollection(refs, LoadBalancerListenerVmNicRefVO.class);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "add-nic-to-lb";
@Override
public void run(final FlowTrigger trigger, Map data) {
LoadBalancerBackend bkd = getBackend();
LoadBalancerStruct s = makeStruct();
s.setInit(init);
bkd.addVmNics(s, nics, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg, completion) {
@Override
public void handle(Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
}
dbf.updateCollection(refs);
evt.setInventory(LoadBalancerListenerInventory.valueOf(dbf.findByUuid(msg.getListenerUuid(), LoadBalancerListenerVO.class)));
bus.publish(evt);
completion.done();
}
});
error(new FlowErrorHandler(msg, completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(errCode);
bus.publish(evt);
completion.done();
}
});
}
}).start();
}
private boolean needAction() {
if (self.getProviderType() == null) {
return false;
}
LoadBalancerListenerVmNicRefVO activeNic = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVO arg) {
for (LoadBalancerListenerVmNicRefVO ref : arg.getVmNicRefs()) {
if (ref.getStatus() == LoadBalancerVmNicStatus.Active || ref.getStatus() == LoadBalancerVmNicStatus.Pending) {
return ref;
}
}
return null;
}
});
if (activeNic == null) {
return false;
}
return true;
}
private LoadBalancerBackend getBackend() {
DebugUtils.Assert(self.getProviderType() != null, "providerType cannot be null");
return lbMgr.getBackend(self.getProviderType());
}
private LoadBalancerStruct makeStruct() {
LoadBalancerStruct struct = new LoadBalancerStruct();
struct.setLb(reloadAndGetInventory());
List<String> activeNicUuids = new ArrayList<String>();
for (LoadBalancerListenerVO l : self.getListeners()) {
activeNicUuids.addAll(CollectionUtils.transformToList(l.getVmNicRefs(), new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getStatus() == LoadBalancerVmNicStatus.Active || arg.getStatus() == LoadBalancerVmNicStatus.Pending ? arg.getVmNicUuid() : null;
}
}));
}
if (activeNicUuids.isEmpty()) {
struct.setVmNics(new HashMap<String, VmNicInventory>());
} else {
SimpleQuery<VmNicVO> nq = dbf.createQuery(VmNicVO.class);
nq.add(VmNicVO_.uuid, Op.IN, activeNicUuids);
List<VmNicVO> nicvos = nq.list();
Map<String, VmNicInventory> m = new HashMap<String, VmNicInventory>();
for (VmNicVO n : nicvos) {
m.put(n.getUuid(), VmNicInventory.valueOf(n));
}
struct.setVmNics(m);
}
struct.setListeners(LoadBalancerListenerInventory.valueOf(self.getListeners()));
return struct;
}
private void handle(final APICreateLoadBalancerListenerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
createListener(msg, new NoErrorCompletion(chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "create-listener";
}
});
}
private void createListener(final APICreateLoadBalancerListenerMsg msg, final NoErrorCompletion completion) {
final APICreateLoadBalancerListenerEvent evt = new APICreateLoadBalancerListenerEvent(msg.getId());
LoadBalancerListenerVO vo = new LoadBalancerListenerVO();
vo.setLoadBalancerUuid(self.getUuid());
vo.setUuid(msg.getResourceUuid() == null ? Platform.getUuid() : msg.getResourceUuid());
vo.setDescription(vo.getDescription());
vo.setName(msg.getName());
vo.setInstancePort(msg.getInstancePort());
vo.setLoadBalancerPort(msg.getLoadBalancerPort());
vo.setProtocol(msg.getProtocol());
vo = dbf.persistAndRefresh(vo);
acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), LoadBalancerListenerVO.class);
tagMgr.createNonInherentSystemTags(msg.getSystemTags(), vo.getUuid(), LoadBalancerListenerVO.class.getSimpleName());
evt.setInventory(LoadBalancerListenerInventory.valueOf(vo));
bus.publish(evt);
completion.done();
}
}
| winger007/zstack | plugin/loadBalancer/src/main/java/org/zstack/network/service/lb/LoadBalancerBase.java | Java | apache-2.0 | 44,011 |
class WorkspaceSearchController < ApplicationController
before_filter :require_full_search
def show
workspace = Workspace.find(params[:workspace_id])
authorize! :show, workspace
present WorkspaceSearch.new(current_user, params)
end
end
| nvoron23/chorus | app/controllers/workspace_search_controller.rb | Ruby | apache-2.0 | 255 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.iceberg;
import org.apache.iceberg.PartitionField;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
import java.util.List;
import java.util.function.Consumer;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.lang.Integer.parseInt;
import static java.lang.String.format;
public final class PartitionFields
{
private static final String NAME = "[a-z_][a-z0-9_]*";
private static final String FUNCTION_NAME = "\\((" + NAME + ")\\)";
private static final String FUNCTION_NAME_INT = "\\((" + NAME + "), *(\\d+)\\)";
private static final Pattern IDENTITY_PATTERN = Pattern.compile(NAME);
private static final Pattern YEAR_PATTERN = Pattern.compile("year" + FUNCTION_NAME);
private static final Pattern MONTH_PATTERN = Pattern.compile("month" + FUNCTION_NAME);
private static final Pattern DAY_PATTERN = Pattern.compile("day" + FUNCTION_NAME);
private static final Pattern HOUR_PATTERN = Pattern.compile("hour" + FUNCTION_NAME);
private static final Pattern BUCKET_PATTERN = Pattern.compile("bucket" + FUNCTION_NAME_INT);
private static final Pattern TRUNCATE_PATTERN = Pattern.compile("truncate" + FUNCTION_NAME_INT);
private static final Pattern ICEBERG_BUCKET_PATTERN = Pattern.compile("bucket\\[(\\d+)]");
private static final Pattern ICEBERG_TRUNCATE_PATTERN = Pattern.compile("truncate\\[(\\d+)]");
private PartitionFields() {}
public static PartitionSpec parsePartitionFields(Schema schema, List<String> fields)
{
PartitionSpec.Builder builder = PartitionSpec.builderFor(schema);
for (String field : fields) {
parsePartitionField(builder, field);
}
return builder.build();
}
public static void parsePartitionField(PartitionSpec.Builder builder, String field)
{
@SuppressWarnings("PointlessBooleanExpression")
boolean matched = false ||
tryMatch(field, IDENTITY_PATTERN, match -> builder.identity(match.group())) ||
tryMatch(field, YEAR_PATTERN, match -> builder.year(match.group(1))) ||
tryMatch(field, MONTH_PATTERN, match -> builder.month(match.group(1))) ||
tryMatch(field, DAY_PATTERN, match -> builder.day(match.group(1))) ||
tryMatch(field, HOUR_PATTERN, match -> builder.hour(match.group(1))) ||
tryMatch(field, BUCKET_PATTERN, match -> builder.bucket(match.group(1), parseInt(match.group(2)))) ||
tryMatch(field, TRUNCATE_PATTERN, match -> builder.truncate(match.group(1), parseInt(match.group(2))));
if (!matched) {
throw new IllegalArgumentException("Invalid partition field declaration: " + field);
}
}
private static boolean tryMatch(CharSequence value, Pattern pattern, Consumer<MatchResult> match)
{
Matcher matcher = pattern.matcher(value);
if (matcher.matches()) {
match.accept(matcher.toMatchResult());
return true;
}
return false;
}
public static List<String> toPartitionFields(PartitionSpec spec)
{
return spec.fields().stream()
.map(field -> toPartitionField(spec, field))
.collect(toImmutableList());
}
private static String toPartitionField(PartitionSpec spec, PartitionField field)
{
String name = spec.schema().findColumnName(field.sourceId());
String transform = field.transform().toString();
switch (transform) {
case "identity":
return name;
case "year":
case "month":
case "day":
case "hour":
return format("%s(%s)", transform, name);
}
Matcher matcher = ICEBERG_BUCKET_PATTERN.matcher(transform);
if (matcher.matches()) {
return format("bucket(%s, %s)", name, matcher.group(1));
}
matcher = ICEBERG_TRUNCATE_PATTERN.matcher(transform);
if (matcher.matches()) {
return format("truncate(%s, %s)", name, matcher.group(1));
}
throw new UnsupportedOperationException("Unsupported partition transform: " + field);
}
}
| mvp/presto | presto-iceberg/src/main/java/com/facebook/presto/iceberg/PartitionFields.java | Java | apache-2.0 | 4,926 |
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.play.internal.run;
import org.gradle.api.tasks.compile.BaseForkOptions;
import java.io.File;
public interface PlayRunSpec {
BaseForkOptions getForkOptions();
Iterable<File> getClasspath();
Iterable<File> getChangingClasspath();
File getApplicationJar();
File getAssetsJar();
Iterable<File> getAssetsDirs();
File getProjectPath();
int getHttpPort();
}
| gstevey/gradle | subprojects/platform-play/src/main/java/org/gradle/play/internal/run/PlayRunSpec.java | Java | apache-2.0 | 1,031 |
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
from boxsdk import Client
from boxsdk.exception import BoxAPIException
from boxsdk.object.collaboration import CollaborationRole
from auth import authenticate
def run_user_example(client):
# 'me' is a handy value to get info on the current authenticated user.
me = client.user(user_id='me').get(fields=['login'])
print('The email of the user is: {0}'.format(me['login']))
def run_folder_examples(client):
root_folder = client.folder(folder_id='0').get()
print('The root folder is owned by: {0}'.format(root_folder.owned_by['login']))
items = root_folder.get_items(limit=100, offset=0)
print('This is the first 100 items in the root folder:')
for item in items:
print(" " + item.name)
def run_collab_examples(client):
root_folder = client.folder(folder_id='0')
collab_folder = root_folder.create_subfolder('collab folder')
try:
print('Folder {0} created'.format(collab_folder.get()['name']))
collaboration = collab_folder.add_collaborator('someone@example.com', CollaborationRole.VIEWER)
print('Created a collaboration')
try:
modified_collaboration = collaboration.update_info(role=CollaborationRole.EDITOR)
print('Modified a collaboration: {0}'.format(modified_collaboration.role))
finally:
collaboration.delete()
print('Deleted a collaboration')
finally:
# Clean up
print('Delete folder collab folder succeeded: {0}'.format(collab_folder.delete()))
def rename_folder(client):
root_folder = client.folder(folder_id='0')
foo = root_folder.create_subfolder('foo')
try:
print('Folder {0} created'.format(foo.get()['name']))
bar = foo.rename('bar')
print('Renamed to {0}'.format(bar.get()['name']))
finally:
print('Delete folder bar succeeded: {0}'.format(foo.delete()))
def get_folder_shared_link(client):
root_folder = client.folder(folder_id='0')
collab_folder = root_folder.create_subfolder('shared link folder')
try:
print('Folder {0} created'.format(collab_folder.get().name))
shared_link = collab_folder.get_shared_link()
print('Got shared link:' + shared_link)
finally:
print('Delete folder collab folder succeeded: {0}'.format(collab_folder.delete()))
def upload_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt')
try:
print('{0} uploaded: '.format(a_file.get()['name']))
finally:
print('Delete i-am-a-file.txt succeeded: {0}'.format(a_file.delete()))
def upload_accelerator(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt', upload_using_accelerator=True)
try:
print('{0} uploaded via Accelerator: '.format(a_file.get()['name']))
file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt')
a_file = a_file.update_contents(file_v2_path, upload_using_accelerator=True)
print('{0} updated via Accelerator: '.format(a_file.get()['name']))
finally:
print('Delete i-am-a-file.txt succeeded: {0}'.format(a_file.delete()))
def rename_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
foo = root_folder.upload(file_path, file_name='foo.txt')
try:
print('{0} uploaded '.format(foo.get()['name']))
bar = foo.rename('bar.txt')
print('Rename succeeded: {0}'.format(bool(bar)))
finally:
foo.delete()
def update_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
file_v1 = root_folder.upload(file_path, file_name='file_v1.txt')
try:
# print 'File content after upload: {}'.format(file_v1.content())
file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt')
file_v2 = file_v1.update_contents(file_v2_path)
# print 'File content after update: {}'.format(file_v2.content())
finally:
file_v1.delete()
def search_files(client):
search_results = client.search(
'i-am-a-file.txt',
limit=2,
offset=0,
ancestor_folders=[client.folder(folder_id='0')],
file_extensions=['txt'],
)
for item in search_results:
item_with_name = item.get(fields=['name'])
print('matching item: ' + item_with_name.id)
else:
print('no matching items')
def copy_item(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='a file.txt')
try:
subfolder1 = root_folder.create_subfolder('copy_sub')
try:
a_file.copy(subfolder1)
print(subfolder1.get_items(limit=10, offset=0))
subfolder2 = root_folder.create_subfolder('copy_sub2')
try:
subfolder1.copy(subfolder2)
print(subfolder2.get_items(limit=10, offset=0))
finally:
subfolder2.delete()
finally:
subfolder1.delete()
finally:
a_file.delete()
def move_item(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='a file.txt')
try:
subfolder1 = root_folder.create_subfolder('move_sub')
try:
a_file.move(subfolder1)
print(subfolder1.get_items(limit=10, offset=0))
subfolder2 = root_folder.create_subfolder('move_sub2')
try:
subfolder1.move(subfolder2)
print(subfolder2.get_items(limit=10, offset=0))
finally:
subfolder2.delete()
finally:
try:
subfolder1.delete()
except BoxAPIException:
pass
finally:
try:
a_file.delete()
except BoxAPIException:
pass
def get_events(client):
print(client.events().get_events(limit=100, stream_position='now'))
def get_latest_stream_position(client):
print(client.events().get_latest_stream_position())
def long_poll(client):
print(client.events().long_poll())
def _delete_leftover_group(existing_groups, group_name):
"""
delete group if it already exists
"""
existing_group = next((g for g in existing_groups if g.name == group_name), None)
if existing_group:
existing_group.delete()
def run_groups_example(client):
"""
Shows how to interact with 'Groups' in the Box API. How to:
- Get info about all the Groups to which the current user belongs
- Create a Group
- Rename a Group
- Add a member to the group
- Remove a member from a group
- Delete a Group
"""
try:
# First delete group if it already exists
original_groups = client.groups()
_delete_leftover_group(original_groups, 'box_sdk_demo_group')
_delete_leftover_group(original_groups, 'renamed_box_sdk_demo_group')
new_group = client.create_group('box_sdk_demo_group')
except BoxAPIException as ex:
if ex.status != 403:
raise
print('The authenticated user does not have permissions to manage groups. Skipping the test of this demo.')
return
print('New group:', new_group.name, new_group.id)
new_group = new_group.update_info({'name': 'renamed_box_sdk_demo_group'})
print("Group's new name:", new_group.name)
me_dict = client.user().get(fields=['login'])
me = client.user(user_id=me_dict['id'])
group_membership = new_group.add_member(me, 'member')
members = list(new_group.membership())
print('The group has a membership of: ', len(members))
print('The id of that membership: ', group_membership.object_id)
group_membership.delete()
print('After deleting that membership, the group has a membership of: ', len(list(new_group.membership())))
new_group.delete()
groups_after_deleting_demo = client.groups()
has_been_deleted = not any(g.name == 'renamed_box_sdk_demo_group' for g in groups_after_deleting_demo)
print('The new group has been deleted: ', has_been_deleted)
def run_metadata_example(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
foo = root_folder.upload(file_path, file_name='foo.txt')
print('{0} uploaded '.format(foo.get()['name']))
try:
metadata = foo.metadata()
metadata.create({'foo': 'bar'})
print('Created metadata: {0}'.format(metadata.get()))
update = metadata.start_update()
update.update('/foo', 'baz', 'bar')
print('Updated metadata: {0}'.format(metadata.update(update)))
finally:
foo.delete()
def run_examples(oauth):
client = Client(oauth)
run_user_example(client)
run_folder_examples(client)
run_collab_examples(client)
rename_folder(client)
get_folder_shared_link(client)
upload_file(client)
rename_file(client)
update_file(client)
search_files(client)
copy_item(client)
move_item(client)
get_events(client)
get_latest_stream_position(client)
# long_poll(client)
# Enterprise accounts only
run_groups_example(client)
run_metadata_example(client)
# Premium Apps only
upload_accelerator(client)
def main():
# Please notice that you need to put in your client id and client secret in demo/auth.py in order to make this work.
oauth = authenticate()
run_examples(oauth)
os._exit(0)
if __name__ == '__main__':
main()
| lkabongoVC/box-python-sdk | demo/example.py | Python | apache-2.0 | 10,222 |
class Foo<R> {
public interface Factory<U> {
U make();
}
interface ASink<R, K extends ASink<R, K>> {
public void combine(K other);
}
static <R, S extends ASink<R, S>> R reduce(Factory<S> factory) {
return null;
}
public void foo() {
reduce(Moo::new);
reduce<error descr="'reduce(Foo.Factory<Foo.ASink>)' in 'Foo' cannot be applied to '(<method reference>)'">(AMoo::new)</error>;
reduce(AAMoo::new);
reduce(AAAMoo::new);
}
private class Moo implements ASink<R, Moo> {
@Override
public void combine(Moo other) {
}
}
private class AMoo {
}
private class AAMoo implements ASink<AAMoo, AAMoo> {
@Override
public void combine(AAMoo other) {
}
}
private class AAAMoo implements ASink<R, AAAMoo> {
private AAAMoo() {
}
@Override
public void combine(AAAMoo other) {
}
}
} | android-ia/platform_tools_idea | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/lambda/methodRef/ConstructorAssignability.java | Java | apache-2.0 | 978 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.loading.converter;
import org.apache.carbondata.core.datastore.row.CarbonRow;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
/**
* This interface converts/transforms the column field.
*/
public interface FieldConverter {
/**
* It converts the column field and updates the data in same location/index in row.
* @param row
* @return the status whether it could be loaded or not, usually when record is added
* to bad records then it returns false.
* @throws CarbonDataLoadingException
*/
void convert(CarbonRow row, BadRecordLogHolder logHolder) throws CarbonDataLoadingException;
/**
* It convert the literal value to carbon internal value
*/
Object convert(Object value, BadRecordLogHolder logHolder) throws RuntimeException;
/**
* This method clears all the dictionary caches being acquired.
*/
void clear();
}
| jatin9896/incubator-carbondata | processing/src/main/java/org/apache/carbondata/processing/loading/converter/FieldConverter.java | Java | apache-2.0 | 1,743 |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.wsr;
import static java.util.Collections.singleton;
import java.util.Collection;
import org.kaazing.gateway.transport.dispatch.ProtocolDispatcher;
class RtmpProtocolDispatcher implements ProtocolDispatcher {
private static final String RTMP_PROTOCOL = "rtmp/1.0";
private static final Collection<byte[]> RTMP_DISCRIMINATORS = singleton(new byte[] { 0x03 });
@Override
public int compareTo(ProtocolDispatcher pd) {
return protocolDispatchComparator.compare(this, pd);
}
@Override
public String getProtocolName() {
return RTMP_PROTOCOL;
}
@Override
public Collection<byte[]> getDiscriminators() {
return RTMP_DISCRIMINATORS;
}
}
| EArdeleanu/gateway | transport/wsr/src/main/java/org/kaazing/gateway/transport/wsr/RtmpProtocolDispatcher.java | Java | apache-2.0 | 1,627 |
"""
GeoJSON example using addItem
Python 2/3
ArcREST version 3.5.0
"""
from __future__ import print_function
import arcrest
if __name__ == "__main__":
username = ""
password = ""
geojsonFile = r""
sh = arcrest.AGOLTokenSecurityHandler(username, password)
admin = arcrest.manageorg.Administration(securityHandler=sh)
user = admin.content.users.user()
ip = arcrest.manageorg.ItemParameter()
ip.title = "MyGeoJSONTestFile"
ip.type = "GeoJson"
ip.tags = "Geo1,Geo2"
ip.description = "Publishing a geojson file"
addedItem = user.addItem(itemParameters=ip, filePath=geojsonFile)
itemId = addedItem.id
pp = arcrest.manageorg.PublishGeoJSONParameter()
pp.name = "Geojsonrocks"
pp.hasStaticData = True
print( user.publishItem(fileType="geojson", publishParameters=pp, itemId=itemId, wait=True)) | Esri/ArcREST | samples/publishingGeoJSON.py | Python | apache-2.0 | 864 |
//
// immer: immutable data structures for C++
// Copyright (C) 2016, 2017, 2018 Juan Pedro Bolivar Puente
//
// This software is distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE or copy at http://boost.org/LICENSE_1_0.txt
//
#include <immer/set.hpp>
template <typename T,
typename Hash = std::hash<T>,
typename Eq = std::equal_to<T>>
using test_set_t = immer::set<T, Hash, Eq, immer::default_memory_policy, 3u>;
#define SET_T test_set_t
#include "generic.ipp"
| wiltonlazary/arangodb | 3rdParty/immer/v0.7.0/test/set/B3.cpp | C++ | apache-2.0 | 529 |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.rules.macros;
import static org.hamcrest.MatcherAssert.assertThat;
import com.facebook.buck.core.cell.CellPathResolver;
import com.facebook.buck.core.cell.TestCellPathResolver;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.CompositeArg;
import com.facebook.buck.rules.args.SanitizedArg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.rules.args.WriteToFileArg;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.shell.GenruleBuilder;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import java.util.Optional;
import org.hamcrest.Matchers;
import org.junit.Test;
public class StringWithMacrosConverterTest {
private static final BuildTarget TARGET = BuildTargetFactory.newInstance("//:rule");
private static final CellPathResolver CELL_ROOTS =
TestCellPathResolver.get(new FakeProjectFilesystem());
private static final ImmutableList<MacroExpander<? extends Macro, ?>> MACRO_EXPANDERS =
ImmutableList.of(LocationMacroExpander.INSTANCE);
@Test
public void noMacros() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
assertThat(
converter.convert(StringWithMacrosUtils.format("something")),
Matchers.equalTo(StringArg.of("something")));
}
@Test
public void macro() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
Genrule genrule =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out")
.build(graphBuilder);
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
assertThat(
converter.convert(
StringWithMacrosUtils.format("%s", LocationMacro.of(genrule.getBuildTarget()))),
Matchers.equalTo(
SourcePathArg.of(Preconditions.checkNotNull(genrule.getSourcePathToOutput()))));
}
@Test
public void macroAndString() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
Genrule genrule =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out")
.build(graphBuilder);
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
assertThat(
converter.convert(
StringWithMacrosUtils.format("--foo=%s", LocationMacro.of(genrule.getBuildTarget()))),
Matchers.equalTo(
CompositeArg.of(
ImmutableList.of(
StringArg.of("--foo="),
SourcePathArg.of(
Preconditions.checkNotNull(genrule.getSourcePathToOutput()))))));
}
@Test
public void sanitization() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET,
CELL_ROOTS.getCellNameResolver(),
graphBuilder,
MACRO_EXPANDERS,
Optional.of(s -> "something else"));
assertThat(
converter.convert(StringWithMacrosUtils.format("something")),
Matchers.equalTo(SanitizedArg.create(s -> "something else", "something")));
}
@Test
public void outputToFileMacro() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
Genrule genrule =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out")
.build(graphBuilder);
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
Arg result =
converter.convert(
StringWithMacrosUtils.format(
"%s", MacroContainer.of(LocationMacro.of(genrule.getBuildTarget()), true)));
assertThat(result, Matchers.instanceOf(WriteToFileArg.class));
}
}
| facebook/buck | test/com/facebook/buck/rules/macros/StringWithMacrosConverterTest.java | Java | apache-2.0 | 5,254 |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.common.BuildableSupport;
import com.facebook.buck.core.util.immutables.BuckStyleValueWithBuilder;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import java.util.Optional;
/** The components that get contributed to a top-level run of the C++ preprocessor. */
@BuckStyleValueWithBuilder
public abstract class CxxPreprocessorInput {
private static final CxxPreprocessorInput INSTANCE =
ImmutableCxxPreprocessorInput.builder().build();
public abstract Multimap<CxxSource.Type, Arg> getPreprocessorFlags();
public abstract ImmutableList<CxxHeaders> getIncludes();
// Framework paths.
public abstract ImmutableSet<FrameworkPath> getFrameworks();
// The build rules which produce headers found in the includes below.
protected abstract ImmutableSet<BuildTarget> getRules();
public Iterable<BuildRule> getDeps(BuildRuleResolver ruleResolver) {
ImmutableList.Builder<BuildRule> builder = ImmutableList.builder();
for (CxxHeaders cxxHeaders : getIncludes()) {
cxxHeaders.getDeps(ruleResolver).forEachOrdered(builder::add);
}
builder.addAll(ruleResolver.getAllRules(getRules()));
for (FrameworkPath frameworkPath : getFrameworks()) {
if (frameworkPath.getSourcePath().isPresent()) {
Optional<BuildRule> frameworkRule =
ruleResolver.getRule(frameworkPath.getSourcePath().get());
if (frameworkRule.isPresent()) {
builder.add(frameworkRule.get());
}
}
}
for (Arg arg : getPreprocessorFlags().values()) {
builder.addAll(BuildableSupport.getDepsCollection(arg, ruleResolver));
}
return builder.build();
}
public static CxxPreprocessorInput concat(Iterable<CxxPreprocessorInput> inputs) {
CxxPreprocessorInput.Builder builder = CxxPreprocessorInput.builder();
for (CxxPreprocessorInput input : inputs) {
builder.putAllPreprocessorFlags(input.getPreprocessorFlags());
builder.addAllIncludes(input.getIncludes());
builder.addAllFrameworks(input.getFrameworks());
builder.addAllRules(input.getRules());
}
return builder.build();
}
public static CxxPreprocessorInput of() {
return INSTANCE;
}
public static Builder builder() {
return new Builder();
}
public static class Builder extends ImmutableCxxPreprocessorInput.Builder {
@Override
public CxxPreprocessorInput build() {
CxxPreprocessorInput cxxPreprocessorInput = super.build();
if (cxxPreprocessorInput.equals(INSTANCE)) {
return INSTANCE;
}
return cxxPreprocessorInput;
}
}
}
| facebook/buck | src/com/facebook/buck/cxx/CxxPreprocessorInput.java | Java | apache-2.0 | 3,593 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.embedding;
import com.intellij.lang.ASTNode;
import com.intellij.lang.LighterLazyParseableNode;
import com.intellij.lang.ParserDefinition;
import com.intellij.lang.PsiBuilder;
import com.intellij.lang.impl.DelegateMarker;
import com.intellij.lang.impl.PsiBuilderAdapter;
import com.intellij.lang.impl.PsiBuilderImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.psi.TokenType;
import com.intellij.psi.tree.IElementType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
/**
* A delegate PsiBuilder that hides or substitutes some tokens (namely, the ones provided by {@link MasqueradingLexer})
* from a parser, however, _still inserting_ them into a production tree in their initial appearance.
* @see MasqueradingLexer
*/
public class MasqueradingPsiBuilderAdapter extends PsiBuilderAdapter {
private final static Logger LOG = Logger.getInstance(MasqueradingPsiBuilderAdapter.class);
private List<MyShiftedToken> myShrunkSequence;
private CharSequence myShrunkCharSequence;
private int myLexPosition;
private final PsiBuilderImpl myBuilderDelegate;
private final MasqueradingLexer myLexer;
public MasqueradingPsiBuilderAdapter(@NotNull final Project project,
@NotNull final ParserDefinition parserDefinition,
@NotNull final MasqueradingLexer lexer,
@NotNull final ASTNode chameleon,
@NotNull final CharSequence text) {
this(new PsiBuilderImpl(project, parserDefinition, lexer, chameleon, text));
}
public MasqueradingPsiBuilderAdapter(@NotNull final Project project,
@NotNull final ParserDefinition parserDefinition,
@NotNull final MasqueradingLexer lexer,
@NotNull final LighterLazyParseableNode chameleon,
@NotNull final CharSequence text) {
this(new PsiBuilderImpl(project, parserDefinition, lexer, chameleon, text));
}
private MasqueradingPsiBuilderAdapter(PsiBuilderImpl builder) {
super(builder);
LOG.assertTrue(myDelegate instanceof PsiBuilderImpl);
myBuilderDelegate = ((PsiBuilderImpl)myDelegate);
LOG.assertTrue(myBuilderDelegate.getLexer() instanceof MasqueradingLexer);
myLexer = ((MasqueradingLexer)myBuilderDelegate.getLexer());
initShrunkSequence();
}
@Override
public CharSequence getOriginalText() {
return myShrunkCharSequence;
}
@Override
public void advanceLexer() {
myLexPosition++;
skipWhitespace();
synchronizePositions(false);
}
/**
* @param exact if true then positions should be equal;
* else delegate should be behind, not including exactly all foreign (skipped) or whitespace tokens
*/
private void synchronizePositions(boolean exact) {
final PsiBuilder delegate = getDelegate();
if (myLexPosition >= myShrunkSequence.size() || delegate.eof()) {
myLexPosition = myShrunkSequence.size();
while (!delegate.eof()) {
delegate.advanceLexer();
}
return;
}
if (delegate.getCurrentOffset() > myShrunkSequence.get(myLexPosition).realStart) {
LOG.error("delegate is ahead of my builder!");
return;
}
final int keepUpPosition = getKeepUpPosition(exact);
while (!delegate.eof()) {
final int delegatePosition = delegate.getCurrentOffset();
if (delegatePosition < keepUpPosition) {
delegate.advanceLexer();
}
else {
break;
}
}
}
private int getKeepUpPosition(boolean exact) {
if (exact) {
return myShrunkSequence.get(myLexPosition).realStart;
}
int lexPosition = myLexPosition;
while (lexPosition > 0 && (myShrunkSequence.get(lexPosition - 1).shrunkStart == myShrunkSequence.get(lexPosition).shrunkStart
|| isWhiteSpaceOnPos(lexPosition - 1))) {
lexPosition--;
}
if (lexPosition == 0) {
return myShrunkSequence.get(lexPosition).realStart;
}
return myShrunkSequence.get(lexPosition - 1).realStart + 1;
}
@Override
public IElementType lookAhead(int steps) {
if (eof()) { // ensure we skip over whitespace if it's needed
return null;
}
int cur = myLexPosition;
while (steps > 0) {
++cur;
while (cur < myShrunkSequence.size() && isWhiteSpaceOnPos(cur)) {
cur++;
}
steps--;
}
return cur < myShrunkSequence.size() ? myShrunkSequence.get(cur).elementType : null;
}
@Override
public IElementType rawLookup(int steps) {
int cur = myLexPosition + steps;
return cur >= 0 && cur < myShrunkSequence.size() ? myShrunkSequence.get(cur).elementType : null;
}
@Override
public int rawTokenTypeStart(int steps) {
int cur = myLexPosition + steps;
if (cur < 0) return -1;
if (cur >= myShrunkSequence.size()) return getOriginalText().length();
return myShrunkSequence.get(cur).shrunkStart;
}
@Override
public int rawTokenIndex() {
return myLexPosition;
}
@Override
public int getCurrentOffset() {
return myLexPosition < myShrunkSequence.size() ? myShrunkSequence.get(myLexPosition).shrunkStart : myShrunkCharSequence.length();
}
@Nullable
@Override
public IElementType getTokenType() {
if (allIsEmpty()) {
return TokenType.DUMMY_HOLDER;
}
skipWhitespace();
return myLexPosition < myShrunkSequence.size() ? myShrunkSequence.get(myLexPosition).elementType : null;
}
@Nullable
@Override
public String getTokenText() {
if (allIsEmpty()) {
return getDelegate().getOriginalText().toString();
}
skipWhitespace();
if (myLexPosition >= myShrunkSequence.size()) {
return null;
}
final MyShiftedToken token = myShrunkSequence.get(myLexPosition);
return myShrunkCharSequence.subSequence(token.shrunkStart, token.shrunkEnd).toString();
}
@Override
public boolean eof() {
boolean isEof = myLexPosition >= myShrunkSequence.size();
if (!isEof) {
return false;
}
synchronizePositions(true);
return true;
}
@Override
public Marker mark() {
// In the case of the topmost node all should be inserted
if (myLexPosition != 0) {
synchronizePositions(true);
}
final Marker mark = super.mark();
return new MyMarker(mark, myLexPosition);
}
private boolean allIsEmpty() {
return myShrunkSequence.isEmpty() && getDelegate().getOriginalText().length() != 0;
}
private void skipWhitespace() {
while (myLexPosition < myShrunkSequence.size() && isWhiteSpaceOnPos(myLexPosition)) {
myLexPosition++;
}
}
private boolean isWhiteSpaceOnPos(int pos) {
return myBuilderDelegate.whitespaceOrComment(myShrunkSequence.get(pos).elementType);
}
protected void initShrunkSequence() {
initTokenListAndCharSequence(myLexer);
myLexPosition = 0;
}
private void initTokenListAndCharSequence(MasqueradingLexer lexer) {
lexer.start(getDelegate().getOriginalText());
myShrunkSequence = new ArrayList<MyShiftedToken>();
StringBuilder charSequenceBuilder = new StringBuilder();
int realPos = 0;
int shrunkPos = 0;
while (lexer.getTokenType() != null) {
final IElementType masqueTokenType = lexer.getMasqueTokenType();
final String masqueTokenText = lexer.getMasqueTokenText();
final int realLength = lexer.getTokenEnd() - lexer.getTokenStart();
if (masqueTokenType != null) {
assert masqueTokenText != null;
final int masqueLength = masqueTokenText.length();
myShrunkSequence.add(new MyShiftedToken(masqueTokenType,
realPos, realPos + realLength,
shrunkPos, shrunkPos + masqueLength));
charSequenceBuilder.append(masqueTokenText);
shrunkPos += masqueLength;
}
realPos += realLength;
lexer.advance();
}
myShrunkCharSequence = charSequenceBuilder.toString();
}
@SuppressWarnings({"StringConcatenationInsideStringBufferAppend", "UnusedDeclaration"})
private void logPos() {
StringBuilder sb = new StringBuilder();
sb.append("\nmyLexPosition=" + myLexPosition + "/" + myShrunkSequence.size());
if (myLexPosition < myShrunkSequence.size()) {
final MyShiftedToken token = myShrunkSequence.get(myLexPosition);
sb.append("\nshrunk:" + token.shrunkStart + "," + token.shrunkEnd);
sb.append("\nreal:" + token.realStart + "," + token.realEnd);
sb.append("\nTT:" + getTokenText());
}
sb.append("\ndelegate:");
sb.append("eof=" + myDelegate.eof());
if (!myDelegate.eof()) {
//noinspection ConstantConditions
sb.append("\nposition:" + myDelegate.getCurrentOffset() + "," + (myDelegate.getCurrentOffset() + myDelegate.getTokenText().length()));
sb.append("\nTT:" + myDelegate.getTokenText());
}
LOG.info(sb.toString());
}
private static class MyShiftedToken {
public final IElementType elementType;
public final int realStart;
public final int realEnd;
public final int shrunkStart;
public final int shrunkEnd;
public MyShiftedToken(IElementType elementType, int realStart, int realEnd, int shrunkStart, int shrunkEnd) {
this.elementType = elementType;
this.realStart = realStart;
this.realEnd = realEnd;
this.shrunkStart = shrunkStart;
this.shrunkEnd = shrunkEnd;
}
@Override
public String toString() {
return "MSTk: [" + realStart + ", " + realEnd + "] -> [" + shrunkStart + ", " + shrunkEnd + "]: " + elementType.toString();
}
}
private class MyMarker extends DelegateMarker {
private final int myBuilderPosition;
public MyMarker(Marker delegate, int builderPosition) {
super(delegate);
myBuilderPosition = builderPosition;
}
@Override
public void rollbackTo() {
super.rollbackTo();
myLexPosition = myBuilderPosition;
}
@Override
public void doneBefore(IElementType type, Marker before) {
super.doneBefore(type, getDelegateOrThis(before));
}
@Override
public void doneBefore(IElementType type, Marker before, String errorMessage) {
super.doneBefore(type, getDelegateOrThis(before), errorMessage);
}
@NotNull
private Marker getDelegateOrThis(@NotNull Marker marker) {
if (marker instanceof DelegateMarker) {
return ((DelegateMarker)marker).getDelegate();
}
else {
return marker;
}
}
}
}
| akosyakov/intellij-community | xml/xml-psi-impl/src/com/intellij/embedding/MasqueradingPsiBuilderAdapter.java | Java | apache-2.0 | 11,304 |
package storage
import (
"fmt"
)
// ErrOldVersion is returned when a newer version of TUF metadata is already available
type ErrOldVersion struct{}
// ErrOldVersion is returned when a newer version of TUF metadata is already available
func (err ErrOldVersion) Error() string {
return fmt.Sprintf("Error updating metadata. A newer version is already available")
}
// ErrNotFound is returned when TUF metadata isn't found for a specific record
type ErrNotFound struct{}
// Error implements error
func (err ErrNotFound) Error() string {
return fmt.Sprintf("No record found")
}
// ErrKeyExists is returned when a key already exists
type ErrKeyExists struct {
gun string
role string
}
// ErrKeyExists is returned when a key already exists
func (err ErrKeyExists) Error() string {
return fmt.Sprintf("Error, timestamp key already exists for %s:%s", err.gun, err.role)
}
// ErrNoKey is returned when no timestamp key is found
type ErrNoKey struct {
gun string
}
// ErrNoKey is returned when no timestamp key is found
func (err ErrNoKey) Error() string {
return fmt.Sprintf("Error, no timestamp key found for %s", err.gun)
}
// ErrBadQuery is used when the parameters provided cannot be appropriately
// coerced.
type ErrBadQuery struct {
msg string
}
func (err ErrBadQuery) Error() string {
return fmt.Sprintf("did not recognize parameters: %s", err.msg)
}
| jfrazelle/notary | server/storage/errors.go | GO | apache-2.0 | 1,372 |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.nio.serialization;
/**
* This interface adds the possibility for the class to act as if it is another class when being deserialized using
* DataSerializable
*/
public interface TypedDataSerializable extends DataSerializable {
/**
*
* @return The class type that this serializable wants to act as.
*/
Class getClassType();
}
| emre-aydin/hazelcast | hazelcast/src/main/java/com/hazelcast/nio/serialization/TypedDataSerializable.java | Java | apache-2.0 | 1,000 |
/**
* View attribute injection library for Android which generates the obtainStyledAttributes() and
* TypedArray boilerplate code for you at compile time.
* <p>
* No more handing to deal with context.obtainStyledAttributes(...) or manually retrieving values
* from the resulting {@link android.content.res.TypedArray TypedArray} instance. Just annotate your
* field or method with {@link io.sweers.barber.StyledAttr @StyledAttr}.
*/
package io.sweers.barber; | lord19871207/barber | api/src/main/java/io/sweers/barber/package-info.java | Java | apache-2.0 | 465 |
/*******************************************************************************
* Copyright 2015 Ivan Shubin http://galenframework.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.galenframework.components.report;
import static java.lang.String.format;
import java.io.PrintStream;
import java.io.PrintWriter;
public class FakeException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = -4840622707009032748L;
public FakeException(String string) {
super(string);
}
@Override
public StackTraceElement[] getStackTrace() {
return new StackTraceElement[]{
new StackTraceElement("net.mindengine.someclass.SomeClass", "method1", "SomeClass.java", 4),
new StackTraceElement("net.mindengine.someclass.SomeClass2", "method2", "SomeClass2.java", 5),
new StackTraceElement("net.mindengine.someclass.SomeClass3", "method3", "SomeClass3.java", 6)
};
}
@Override
public void printStackTrace(PrintStream ps) {
ps.println(getClass().getName() + ": " + getMessage());
for (StackTraceElement element : getStackTrace()) {
ps.println(format("\tat %s.%s(%s:%d)", element.getClassName(), element.getMethodName(), element.getFileName(), element.getLineNumber()));
}
}
@Override
public void printStackTrace(PrintWriter s) {
s.println(getClass().getName() + ": " + getMessage());
for (StackTraceElement element : getStackTrace()) {
s.println(format("\tat %s.%s(%s:%d)", element.getClassName(), element.getMethodName(), element.getFileName(), element.getLineNumber()));
}
}
}
| thhiep/galen | galen-core/src/test/java/com/galenframework/components/report/FakeException.java | Java | apache-2.0 | 2,298 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.slm;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction;
import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction;
public class SLMInfoTransportAction extends XPackInfoFeatureTransportAction {
private final boolean enabled;
private final XPackLicenseState licenseState;
@Inject
public SLMInfoTransportAction(TransportService transportService, ActionFilters actionFilters,
Settings settings, XPackLicenseState licenseState) {
super(XPackInfoFeatureAction.SNAPSHOT_LIFECYCLE.name(), transportService, actionFilters);
this.enabled = XPackSettings.SNAPSHOT_LIFECYCLE_ENABLED.get(settings);
this.licenseState = licenseState;
}
@Override
public String name() {
return XPackField.SNAPSHOT_LIFECYCLE;
}
@Override
public boolean available() {
return licenseState.isIndexLifecycleAllowed();
}
@Override
public boolean enabled() {
return enabled;
}
}
| HonzaKral/elasticsearch | x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SLMInfoTransportAction.java | Java | apache-2.0 | 1,649 |
/*
* Copyright 2016 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.javascript.jscomp.PolymerBehaviorExtractor.BehaviorDefinition;
import com.google.javascript.rhino.Node;
/**
* Unit tests for {@link PolymerBehaviorExtractor}.
*/
public class PolymerBehaviorExtractorTest extends CompilerTypeTestCase {
private PolymerBehaviorExtractor extractor;
private Node behaviorArray;
@Override
protected void setUp() {
super.setUp();
behaviorArray = null;
}
public void testArrayBehavior() {
parseAndInitializeExtractor(
LINE_JOINER.join(
"/** @polymerBehavior */",
"var FunBehavior = {",
" properties: {",
" isFun: Boolean",
" },",
" /** @param {string} funAmount */",
" doSomethingFun: function(funAmount) { alert('Something ' + funAmount + ' fun!'); },",
" /** @override */",
" created: function() {}",
"};",
"/** @polymerBehavior */",
"var RadBehavior = {",
" properties: {",
" howRad: Number",
" },",
" /** @param {number} radAmount */",
" doSomethingRad: function(radAmount) { alert('Something ' + radAmount + ' rad!'); },",
" /** @override */",
" ready: function() {}",
"};",
"/** @polymerBehavior */",
"var SuperCoolBehaviors = [FunBehavior, RadBehavior];",
"/** @polymerBehavior */",
"var BoringBehavior = {",
" properties: {",
" boringString: String",
" },",
" /** @param {boolean} boredYet */",
" doSomething: function(boredYet) { alert(boredYet + ' ' + this.boringString); },",
"};",
"var A = Polymer({",
" is: 'x-element',",
" behaviors: [ SuperCoolBehaviors, BoringBehavior ],",
"});"));
ImmutableList<BehaviorDefinition> defs = extractor.extractBehaviors(behaviorArray);
assertThat(defs).hasSize(3);
// TODO(jlklein): Actually verify the properties of the BehaviorDefinitions.
}
public void testInlineLiteralBehavior() {
parseAndInitializeExtractor(
LINE_JOINER.join(
"/** @polymerBehavior */",
"var FunBehavior = {",
" properties: {",
" isFun: Boolean",
" },",
" /** @param {string} funAmount */",
" doSomethingFun: function(funAmount) { alert('Something ' + funAmount + ' fun!'); },",
" /** @override */",
" created: function() {}",
"};",
"/** @polymerBehavior */",
"var SuperCoolBehaviors = [FunBehavior, {",
" properties: {",
" howRad: Number",
" },",
" /** @param {number} radAmount */",
" doSomethingRad: function(radAmount) { alert('Something ' + radAmount + ' rad!'); },",
" /** @override */",
" ready: function() {}",
"}];",
"var A = Polymer({",
" is: 'x-element',",
" behaviors: [ SuperCoolBehaviors ],",
"});"));
ImmutableList<BehaviorDefinition> defs = extractor.extractBehaviors(behaviorArray);
assertThat(defs).hasSize(2);
// TODO(jlklein): Actually verify the properties of the BehaviorDefinitions.
}
// TODO(jlklein): Test more use cases: names to avoid copying, global vs. non-global, etc.
private void parseAndInitializeExtractor(String code) {
Node root = compiler.parseTestCode(code);
GlobalNamespace globalNamespace = new GlobalNamespace(compiler, root);
extractor = new PolymerBehaviorExtractor(compiler, globalNamespace);
NodeUtil.visitPostOrder(root, new NodeUtil.Visitor() {
@Override
public void visit(Node node) {
if (isBehaviorArrayDeclaration(node)) {
behaviorArray = node;
}
}
}, Predicates.<Node>alwaysTrue());
assertNotNull(behaviorArray);
}
private boolean isBehaviorArrayDeclaration(Node node) {
return node.isArrayLit()
&& node.getParent().isStringKey() && node.getParent().getString().equals("behaviors");
}
}
| selkhateeb/closure-compiler | test/com/google/javascript/jscomp/PolymerBehaviorExtractorTest.java | Java | apache-2.0 | 5,025 |
// legal JS, if nonsensical, which also triggers the issue
const {
date,
} = (inspectedElement: any) => 0;
date.toISOString();
// Working flow code
const {
date2,
} = (inspectedElement: any).props;
date2.toISOString();
// It could also be an async function
const { constructor } = async () => {};
| Microsoft/TypeScript | tests/cases/compiler/destructuringControlFlowNoCrash.ts | TypeScript | apache-2.0 | 323 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl.blockEvaluator;
import com.jetbrains.python.psi.PyExpression;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Ilya.Kazakevich
*/
@SuppressWarnings("PackageVisibleField") // Package-only class
class PyEvaluationResult {
@NotNull
final Map<String, Object> myNamespace = new HashMap<>();
@NotNull
final Map<String, List<PyExpression>> myDeclarations = new HashMap<>();
@NotNull
List<PyExpression> getDeclarations(@NotNull final String name) {
final List<PyExpression> expressions = myDeclarations.get(name);
return (expressions != null) ? expressions : Collections.<PyExpression>emptyList();
}
}
| hurricup/intellij-community | python/src/com/jetbrains/python/psi/impl/blockEvaluator/PyEvaluationResult.java | Java | apache-2.0 | 1,369 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Formatting.Rules;
using Microsoft.CodeAnalysis.Text;
namespace Microsoft.CodeAnalysis.Formatting
{
/// <summary>
/// this collector gathers formatting operations that are based on a node
/// </summary>
internal class NodeOperations
{
public static NodeOperations Empty = new NodeOperations();
public Task<List<IndentBlockOperation>> IndentBlockOperationTask { get; private set; }
public Task<List<SuppressOperation>> SuppressOperationTask { get; private set; }
public Task<List<AlignTokensOperation>> AlignmentOperationTask { get; private set; }
public Task<List<AnchorIndentationOperation>> AnchorIndentationOperationsTask { get; private set; }
public NodeOperations(Task<List<IndentBlockOperation>> indentBlockOperationTask, Task<List<SuppressOperation>> suppressOperationTask, Task<List<AnchorIndentationOperation>> anchorIndentationOperationsTask, Task<List<AlignTokensOperation>> alignmentOperationTask)
{
this.IndentBlockOperationTask = indentBlockOperationTask;
this.SuppressOperationTask = suppressOperationTask;
this.AlignmentOperationTask = alignmentOperationTask;
this.AnchorIndentationOperationsTask = anchorIndentationOperationsTask;
}
private NodeOperations()
{
this.IndentBlockOperationTask = Task.FromResult(new List<IndentBlockOperation>());
this.SuppressOperationTask = Task.FromResult(new List<SuppressOperation>());
this.AlignmentOperationTask = Task.FromResult(new List<AlignTokensOperation>());
this.AnchorIndentationOperationsTask = Task.FromResult(new List<AnchorIndentationOperation>());
}
}
}
| DavidKarlas/roslyn | src/Workspaces/Core/Portable/Formatting/Engine/NodeOperations.cs | C# | apache-2.0 | 2,013 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#nullable disable
namespace Microsoft.CodeAnalysis.Operations
{
/// <summary>
/// Represents a <see cref="IOperation"/> visitor that visits only the single IOperation
/// passed into its Visit method.
/// </summary>
public abstract partial class OperationVisitor
{
// Make public after review: https://github.com/dotnet/roslyn/issues/21281
internal virtual void VisitFixed(IFixedOperation operation) =>
// https://github.com/dotnet/roslyn/issues/21281
//DefaultVisit(operation);
VisitNoneOperation(operation);
}
/// <summary>
/// Represents a <see cref="IOperation"/> visitor that visits only the single IOperation
/// passed into its Visit method with an additional argument of the type specified by the
/// <typeparamref name="TArgument"/> parameter and produces a value of the type specified by
/// the <typeparamref name="TResult"/> parameter.
/// </summary>
/// <typeparam name="TArgument">
/// The type of the additional argument passed to this visitor's Visit method.
/// </typeparam>
/// <typeparam name="TResult">
/// The type of the return value of this visitor's Visit method.
/// </typeparam>
public abstract partial class OperationVisitor<TArgument, TResult>
{
// Make public after review: https://github.com/dotnet/roslyn/issues/21281
internal virtual TResult VisitFixed(IFixedOperation operation, TArgument argument) =>
// https://github.com/dotnet/roslyn/issues/21281
//return DefaultVisit(operation, argument);
VisitNoneOperation(operation, argument);
}
}
| brettfo/roslyn | src/Compilers/Core/Portable/Operations/OperationVisitor.cs | C# | apache-2.0 | 1,874 |
module Clever
module APIOperations
# Represents a list of results for a paged request.
class ResultsList
include Enumerable
# Create a results list from a PageList
# @api private
# @return [ResultsList]
def initialize(pagelist)
@pages = pagelist
end
# Iterate over results list
# @api public
# @return [nil]
# @example
# results = Clever::District.find # returns a ResultsList
# results.each do |district|
# puts district.name
# end
def each
@pages.each do |page|
page.each do |elem|
yield elem
end
end
end
end
end
end
| mchavarriagam/clever-ruby | lib/clever-ruby/api_operations/results_list.rb | Ruby | apache-2.0 | 699 |
({
L_MENU_GRID: "Valikkoruudukko",
L_MENU_ITEM_DISABLED: "%1 ei ole k\u00e4ytett\u00e4viss\u00e4",
L_MENU_ITEM_SUBMENU: "%1 (alivalikko)",
L_MENU_SUBMENU: "alivalikko",
L_MENU_CHECK: "valinta"
})
| iharkhukhrakou/XPagesExtensionLibrary | extlib/lwp/product/runtime/eclipse/plugins/com.ibm.xsp.extlib.domino/resources/web/dwa/common/nls/fi/menu.js | JavaScript | apache-2.0 | 196 |
package antlr;
/* ANTLR Translator Generator
* Project led by Terence Parr at http://www.jGuru.com
* Software rights: http://www.antlr.org/RIGHTS.html
*
* $Id: CppCharFormatter.java,v 1.1 2003/06/04 20:54:22 greg Exp $
*/
// C++ code generator by Pete Wells: pete@yamuna.demon.co.uk
class CppCharFormatter implements CharFormatter {
/** Given a character value, return a string representing the character
* that can be embedded inside a string literal or character literal
* This works for Java/C/C++ code-generation and languages with compatible
* special-character-escapment.
* Code-generators for languages should override this method.
* @param c The character of interest.
* @param forCharLiteral true to escape for char literal, false for string literal
*/
public String escapeChar(int c, boolean forCharLiteral) {
switch (c) {
case '\n' : return "\\n";
case '\t' : return "\\t";
case '\r' : return "\\r";
case '\\' : return "\\\\";
case '\'' : return forCharLiteral ? "\\'" : "'";
case '"' : return forCharLiteral ? "\"" : "\\\"";
default :
if ( c<' '||c>126 ) {
if (c > 255) {
return "\\u" + Integer.toString(c,16);
}
else {
return "\\" + Integer.toString(c,8);
}
}
else {
return String.valueOf((char)c);
}
}
}
/** Converts a String into a representation that can be use as a literal
* when surrounded by double-quotes.
* @param s The String to be changed into a literal
*/
public String escapeString(String s)
{
String retval = new String();
for (int i = 0; i < s.length(); i++)
{
retval += escapeChar(s.charAt(i), false);
}
return retval;
}
/** Given a character value, return a string representing the character
* literal that can be recognized by the target language compiler.
* This works for languages that use single-quotes for character literals.
* Code-generators for languages should override this method.
* @param c The character of interest.
*/
public String literalChar(int c) {
return "static_cast<unsigned char>('" + escapeChar(c, true) + "')";
}
/** Converts a String into a string literal
* This works for languages that use double-quotes for string literals.
* Code-generators for languages should override this method.
* @param s The String to be changed into a literal
*/
public String literalString(String s)
{
return "\"" + escapeString(s) + "\"";
}
}
| HebaKhaled/bposs | src/pt_antlr/antlr/CppCharFormatter.java | Java | apache-2.0 | 2,509 |
/**
* @@@ START COPYRIGHT @@@
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* @@@ END COPYRIGHT @@@
**/
package org.trafodion.wms;
public interface SmallTests {
}
| apache/incubator-trafodion | wms/src/test/java/org/trafodion/wms/SmallTests.java | Java | apache-2.0 | 909 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.cache.hibernate;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import javax.cache.Cache;
import javax.cache.expiry.ExpiryPolicy;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorResult;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.CacheEntry;
import org.apache.ignite.cache.CacheMetrics;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.IgniteInternalCache;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.mxbean.CacheMetricsMXBean;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import org.jetbrains.annotations.Nullable;
/**
* Hibernate cache proxy used to substitute hibernate keys with ignite keys.
*/
public class HibernateCacheProxy implements IgniteInternalCache<Object, Object> {
/** Delegate is lazily loaded which allows for creation of caches after the SPI is bootstrapped */
private final Supplier<IgniteInternalCache<Object, Object>> delegate;
/** Transformer. */
private final HibernateKeyTransformer keyTransformer;
/** */
private String cacheName;
/**
* @param cacheName Cache name. Should match delegate.get().name(). Needed for lazy loading.
* @param delegate Delegate.
* @param keyTransformer Key keyTransformer.
*/
HibernateCacheProxy(
String cacheName,
Supplier<IgniteInternalCache<Object, Object>> delegate,
HibernateKeyTransformer keyTransformer
) {
assert cacheName != null;
assert delegate != null;
assert keyTransformer != null;
this.cacheName = cacheName;
this.delegate = delegate;
this.keyTransformer = keyTransformer;
}
/**
* @return HibernateKeyTransformer
*/
public HibernateKeyTransformer keyTransformer() {
return keyTransformer;
}
/** {@inheritDoc} */
@Override public String name() {
return cacheName;
}
/** {@inheritDoc} */
@Override public boolean skipStore() {
return delegate.get().skipStore();
}
/** {@inheritDoc} */
@Override public IgniteInternalCache setSkipStore(boolean skipStore) {
return delegate.get().setSkipStore(skipStore);
}
/** {@inheritDoc} */
@Override public boolean isEmpty() {
return delegate.get().isEmpty();
}
/** {@inheritDoc} */
@Override public boolean containsKey(Object key) {
return delegate.get().containsKey(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> containsKeyAsync(Object key) {
return delegate.get().containsKeyAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean containsKeys(Collection keys) {
return delegate.get().containsKey(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> containsKeysAsync(Collection keys) {
return delegate.get().containsKeysAsync(transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public Object localPeek(
Object key,
CachePeekMode[] peekModes
) throws IgniteCheckedException {
return delegate.get().localPeek(keyTransformer.transform(key), peekModes);
}
/** {@inheritDoc} */
@Override public Iterable<Cache.Entry<Object, Object>> localEntries(
CachePeekMode[] peekModes
) throws IgniteCheckedException {
return delegate.get().localEntries(peekModes);
}
/** {@inheritDoc} */
@Nullable @Override public Object get(Object key) throws IgniteCheckedException {
return delegate.get().get(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Nullable @Override public CacheEntry getEntry(Object key) throws IgniteCheckedException {
return delegate.get().getEntry(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAsync(Object key) {
return delegate.get().getAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<CacheEntry<Object, Object>> getEntryAsync(Object key) {
return delegate.get().getEntryAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public Map getAll(@Nullable Collection keys) throws IgniteCheckedException {
return delegate.get().getAll(transform(keys));
}
/** {@inheritDoc} */
@Override public Collection<CacheEntry<Object, Object>> getEntries(
@Nullable Collection keys) throws IgniteCheckedException {
return delegate.get().getEntries(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map<Object, Object>> getAllAsync(@Nullable Collection keys) {
return delegate.get().getAllAsync(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Collection<CacheEntry<Object, Object>>> getEntriesAsync(
@Nullable Collection keys
) {
return delegate.get().getEntriesAsync(transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndPut(Object key, Object val) throws IgniteCheckedException {
return delegate.get().getAndPut(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndPutAsync(Object key, Object val) {
return delegate.get().getAndPutAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean put(Object key, Object val) throws IgniteCheckedException {
return delegate.get().put(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> putAsync(Object key, Object val) {
return delegate.get().putAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndPutIfAbsent(Object key, Object val) throws IgniteCheckedException {
return delegate.get().getAndPutIfAbsent(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndPutIfAbsentAsync(Object key, Object val) {
return delegate.get().getAndPutIfAbsentAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean putIfAbsent(Object key, Object val) throws IgniteCheckedException {
return delegate.get().putIfAbsent(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> putIfAbsentAsync(Object key, Object val) {
return delegate.get().putIfAbsentAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndReplace(Object key, Object val) throws IgniteCheckedException {
return delegate.get().getAndReplace(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndReplaceAsync(Object key, Object val) {
return delegate.get().getAndReplaceAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean replace(Object key, Object val) throws IgniteCheckedException {
return delegate.get().replace(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> replaceAsync(Object key, Object val) {
return delegate.get().replaceAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean replace(Object key, Object oldVal, Object newVal) throws IgniteCheckedException {
return delegate.get().replace(keyTransformer.transform(key), oldVal, newVal);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> replaceAsync(Object key, Object oldVal, Object newVal) {
return delegate.get().replaceAsync(keyTransformer.transform(key), oldVal, newVal);
}
/** {@inheritDoc} */
@Override public void putAll(@Nullable Map m) throws IgniteCheckedException {
delegate.get().putAll(transform(m));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> putAllAsync(@Nullable Map m) {
return delegate.get().putAllAsync(transform(m));
}
/** {@inheritDoc} */
@Override public Set keySet() {
return delegate.get().keySet();
}
/** {@inheritDoc} */
@Override public Set<Cache.Entry<Object, Object>> entrySet() {
return delegate.get().entrySet();
}
/** {@inheritDoc} */
@Override public Transaction txStart(
TransactionConcurrency concurrency,
TransactionIsolation isolation
) {
return delegate.get().txStart(concurrency, isolation);
}
/** {@inheritDoc} */
@Override public GridNearTxLocal txStartEx(
TransactionConcurrency concurrency,
TransactionIsolation isolation
) {
return delegate.get().txStartEx(concurrency, isolation);
}
/** {@inheritDoc} */
@Override public Transaction txStart(
TransactionConcurrency concurrency,
TransactionIsolation isolation,
long timeout,
int txSize
) {
return delegate.get().txStart(concurrency, isolation, timeout, txSize);
}
/** {@inheritDoc} */
@Nullable @Override public GridNearTxLocal tx() {
return delegate.get().tx();
}
/** {@inheritDoc} */
@Override public boolean evict(Object key) {
return delegate.get().evict(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void evictAll(@Nullable Collection keys) {
delegate.get().evictAll(transform(keys));
}
/** {@inheritDoc} */
@Override public void clearLocally(boolean srv, boolean near, boolean readers) {
delegate.get().clearLocally(srv, near, readers);
}
/** {@inheritDoc} */
@Override public boolean clearLocally(Object key) {
return delegate.get().clearLocally(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void clearLocallyAll(Set keys, boolean srv, boolean near, boolean readers) {
delegate.get().clearLocallyAll((Set<?>)transform(keys), srv, near, readers);
}
/** {@inheritDoc} */
@Override public void clear(Object key) throws IgniteCheckedException {
delegate.get().clear(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void clearAll(Set keys) throws IgniteCheckedException {
delegate.get().clearAll((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Override public void clear() throws IgniteCheckedException {
delegate.get().clear();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> clearAsync() {
return delegate.get().clearAsync();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> clearAsync(Object key) {
return delegate.get().clearAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> clearAllAsync(Set keys) {
return delegate.get().clearAllAsync((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndRemove(Object key) throws IgniteCheckedException {
return delegate.get().getAndRemove(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndRemoveAsync(Object key) {
return delegate.get().getAndRemoveAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean remove(Object key) throws IgniteCheckedException {
return delegate.get().remove(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> removeAsync(Object key) {
return delegate.get().removeAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean remove(Object key, Object val) throws IgniteCheckedException {
return delegate.get().remove(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> removeAsync(Object key, Object val) {
return delegate.get().removeAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public void removeAll(@Nullable Collection keys) throws IgniteCheckedException {
delegate.get().removeAll(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> removeAllAsync(@Nullable Collection keys) {
return delegate.get().removeAllAsync(transform(keys));
}
/** {@inheritDoc} */
@Override public void removeAll() throws IgniteCheckedException {
delegate.get().removeAll();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> removeAllAsync() {
return delegate.get().removeAllAsync();
}
/** {@inheritDoc} */
@Override public boolean lock(Object key, long timeout) throws IgniteCheckedException {
return delegate.get().lock(keyTransformer.transform(key), timeout);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> lockAsync(Object key, long timeout) {
return delegate.get().lockAsync(keyTransformer.transform(key), timeout);
}
/** {@inheritDoc} */
@Override public boolean lockAll(@Nullable Collection keys, long timeout) throws IgniteCheckedException {
return delegate.get().lockAll(transform(keys), timeout);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> lockAllAsync(@Nullable Collection keys, long timeout) {
return delegate.get().lockAllAsync(transform(keys), timeout);
}
/** {@inheritDoc} */
@Override public void unlock(Object key) throws IgniteCheckedException {
delegate.get().unlock(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void unlockAll(@Nullable Collection keys) throws IgniteCheckedException {
delegate.get().unlockAll(transform(keys));
}
/** {@inheritDoc} */
@Override public boolean isLocked(Object key) {
return delegate.get().isLocked(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean isLockedByThread(Object key) {
return delegate.get().isLockedByThread(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public int size() {
return delegate.get().size();
}
/** {@inheritDoc} */
@Override public long sizeLong() {
return delegate.get().sizeLong();
}
/** {@inheritDoc} */
@Override public int localSize(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().localSize(peekModes);
}
/** {@inheritDoc} */
@Override public long localSizeLong(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().localSizeLong(peekModes);
}
/** {@inheritDoc} */
@Override public long localSizeLong(int partition, CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().localSizeLong(partition, peekModes);
}
/** {@inheritDoc} */
@Override public int size(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().size(peekModes);
}
/** {@inheritDoc} */
@Override public long sizeLong(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().sizeLong(peekModes);
}
/** {@inheritDoc} */
@Override public long sizeLong(int partition, CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().sizeLong(partition, peekModes);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Integer> sizeAsync(CachePeekMode[] peekModes) {
return delegate.get().sizeAsync(peekModes);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Long> sizeLongAsync(CachePeekMode[] peekModes) {
return delegate.get().sizeLongAsync(peekModes);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Long> sizeLongAsync(int partition, CachePeekMode[] peekModes) {
return delegate.get().sizeLongAsync(partition, peekModes);
}
/** {@inheritDoc} */
@Override public int nearSize() {
return delegate.get().nearSize();
}
/** {@inheritDoc} */
@Override public int primarySize() {
return delegate.get().primarySize();
}
/** {@inheritDoc} */
@Override public long primarySizeLong() {
return delegate.get().primarySizeLong();
}
/** {@inheritDoc} */
@Override public CacheConfiguration configuration() {
return delegate.get().configuration();
}
/** {@inheritDoc} */
@Override public Affinity affinity() {
return delegate.get().affinity();
}
/** {@inheritDoc} */
@Override public CacheMetrics clusterMetrics() {
return delegate.get().clusterMetrics();
}
/** {@inheritDoc} */
@Override public CacheMetrics clusterMetrics(ClusterGroup grp) {
return delegate.get().clusterMetrics(grp);
}
/** {@inheritDoc} */
@Override public CacheMetrics localMetrics() {
return delegate.get().localMetrics();
}
/** {@inheritDoc} */
@Override public CacheMetricsMXBean clusterMxBean() {
return delegate.get().clusterMxBean();
}
/** {@inheritDoc} */
@Override public CacheMetricsMXBean localMxBean() {
return delegate.get().localMxBean();
}
/** {@inheritDoc} */
@Override public long offHeapEntriesCount() {
return delegate.get().offHeapEntriesCount();
}
/** {@inheritDoc} */
@Override public long offHeapAllocatedSize() {
return delegate.get().offHeapAllocatedSize();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> rebalance() {
return delegate.get().rebalance();
}
/** {@inheritDoc} */
@Nullable @Override public Object getForcePrimary(Object key) throws IgniteCheckedException {
return delegate.get().getForcePrimary(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getForcePrimaryAsync(Object key) {
return delegate.get().getForcePrimaryAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public Map getAllOutTx(Set keys) throws IgniteCheckedException {
return delegate.get().getAllOutTx((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map<Object, Object>> getAllOutTxAsync(Set keys) {
return delegate.get().getAllOutTxAsync((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public ExpiryPolicy expiry() {
return delegate.get().expiry();
}
/** {@inheritDoc} */
@Override public IgniteInternalCache withExpiryPolicy(ExpiryPolicy plc) {
return delegate.get().withExpiryPolicy(plc);
}
/** {@inheritDoc} */
@Override public IgniteInternalCache withNoRetries() {
return delegate.get().withNoRetries();
}
/** {@inheritDoc} */
@Override public <K1, V1> IgniteInternalCache<K1, V1> withAllowAtomicOpsInTx() {
return delegate.get().withAllowAtomicOpsInTx();
}
/** {@inheritDoc} */
@Override public GridCacheContext context() {
return delegate.get().context();
}
/** {@inheritDoc} */
@Override public void localLoadCache(
@Nullable IgniteBiPredicate p,
@Nullable Object... args
) throws IgniteCheckedException {
delegate.get().localLoadCache(p, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> localLoadCacheAsync(
@Nullable IgniteBiPredicate p,
@Nullable Object... args
) {
return delegate.get().localLoadCacheAsync(p, args);
}
/** {@inheritDoc} */
@Override public Collection<Integer> lostPartitions() {
return delegate.get().lostPartitions();
}
/** {@inheritDoc} */
@Override public void preloadPartition(int part) throws IgniteCheckedException {
delegate.get().preloadPartition(part);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> preloadPartitionAsync(int part) throws IgniteCheckedException {
return delegate.get().preloadPartitionAsync(part);
}
/** {@inheritDoc} */
@Override public boolean localPreloadPartition(int part) throws IgniteCheckedException {
return delegate.get().localPreloadPartition(part);
}
/** {@inheritDoc} */
@Nullable @Override public EntryProcessorResult invoke(
@Nullable AffinityTopologyVersion topVer,
Object key,
EntryProcessor entryProcessor,
Object... args
) throws IgniteCheckedException {
return delegate.get().invoke(topVer, key, entryProcessor, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map> invokeAllAsync(Map map, Object... args) {
return delegate.get().invokeAllAsync(map, args);
}
/** {@inheritDoc} */
@Override public Map invokeAll(Map map, Object... args) throws IgniteCheckedException {
return delegate.get().invokeAll(map, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map> invokeAllAsync(Set keys, EntryProcessor entryProcessor, Object... args) {
return delegate.get().invokeAllAsync((Set<?>)transform(keys), entryProcessor, args);
}
/** {@inheritDoc} */
@Override public Map invokeAll(Set keys, EntryProcessor entryProcessor, Object... args) throws IgniteCheckedException {
return delegate.get().invokeAll((Set<?>)transform(keys), entryProcessor, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<EntryProcessorResult> invokeAsync(
Object key,
EntryProcessor entryProcessor,
Object... args
) {
return delegate.get().invokeAsync(keyTransformer.transform(key), entryProcessor, args);
}
/** {@inheritDoc} */
@Nullable @Override public EntryProcessorResult invoke(
Object key,
EntryProcessor entryProcessor,
Object... args
) throws IgniteCheckedException {
return delegate.get().invoke(keyTransformer.transform(key), entryProcessor, args);
}
/** {@inheritDoc} */
@Override public Iterator<Cache.Entry<Object, Object>> scanIterator(
boolean keepBinary,
@Nullable IgniteBiPredicate p
) throws IgniteCheckedException {
return delegate.get().scanIterator(keepBinary, p);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> removeAllConflictAsync(Map drMap) throws IgniteCheckedException {
return delegate.get().removeAllConflictAsync(drMap);
}
/** {@inheritDoc} */
@Override public void removeAllConflict(Map drMap) throws IgniteCheckedException {
delegate.get().removeAllConflictAsync(drMap);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> putAllConflictAsync(Map drMap) throws IgniteCheckedException {
return delegate.get().putAllConflictAsync(drMap);
}
/** {@inheritDoc} */
@Override public void putAllConflict(Map drMap) throws IgniteCheckedException {
delegate.get().putAllConflict(drMap);
}
/** {@inheritDoc} */
@Override public IgniteInternalCache keepBinary() {
return delegate.get().keepBinary();
}
/** {@inheritDoc} */
@Override public IgniteInternalCache cache() {
return delegate.get().cache();
}
/** {@inheritDoc} */
@Override public Iterator iterator() {
return delegate.get().iterator();
}
/**
* @param keys Keys.
*/
private Collection<Object> transform(Collection<Object> keys) {
Collection<Object> res = new LinkedList<>();
for (Object o : keys)
res.add(keyTransformer.transform(o));
return res;
}
/**
* @param map Map.
*/
private Map<Object, Object> transform(Map<Object, Object> map) {
Map<Object, Object> res = new HashMap<>();
Set<Map.Entry<Object, Object>> ents = map.entrySet();
for (Map.Entry<Object, Object> e : ents)
res.put(keyTransformer.transform(e.getKey()), e.getValue());
return res;
}
}
| chandresh-pancholi/ignite | modules/hibernate-core/src/main/java/org/apache/ignite/cache/hibernate/HibernateCacheProxy.java | Java | apache-2.0 | 26,223 |
module td.output
{
/**
* List of states the parser of [[PrettyPrintPlugin]] can be in.
*/
enum PrettyPrintState {
/**
* Default state of the parser. Empty lines will be removed and indention will be adjusted.
*/
Default,
/**
* Comment state, the parser waits for a comment closing tag.
*/
Comment,
/**
* Pre state, the parser waits for the closing tag of the current pre block.
*/
Pre
}
/**
* A plugin that pretty prints the generated html.
*
* This not only aids in making the generated html source code more readable, by removing
* blank lines and unnecessary whitespaces the size of the documentation is reduced without
* visual impact.
*
* At the point writing this the docs of TypeDoc took 97.8 MB without and 66.4 MB with this
* plugin enabled, so it reduced the size to 68% of the original output.
*/
export class PrettyPrintPlugin extends RendererPlugin
{
/**
* Map of all tags that will be ignored.
*/
static IGNORED_TAGS:any = {
area: true,
base: true,
br: true,
wbr: true,
col: true,
command: true,
embed: true,
hr: true,
img: true,
input: true,
link: true,
meta: true,
param: true,
source: true
};
/**
* Map of all tags that prevent this plugin form modifying the following code.
*/
static PRE_TAGS:any = {
pre: true,
code: true,
textarea: true,
script: true,
style: true
};
/**
* Create a new PrettyPrintPlugin instance.
*
* @param renderer The renderer this plugin should be attached to.
*/
constructor(renderer:Renderer) {
super(renderer);
renderer.on(Renderer.EVENT_END_PAGE, this.onRendererEndPage, this, -1024);
}
/**
* Triggered after a document has been rendered, just before it is written to disc.
*
* @param event
*/
onRendererEndPage(event:OutputPageEvent) {
var match, line, lineState, lineDepth, tagName, preName;
var tagExp = /<\s*(\w+)[^>]*>|<\/\s*(\w+)[^>]*>|<!--|-->/g;
var emptyLineExp = /^[\s]*$/;
var minLineDepth = 1;
var state = PrettyPrintState.Default;
var stack = [];
var lines = event.contents.split(/\r\n?|\n/);
var index = 0;
var count = lines.length;
while (index < count) {
line = lines[index];
if (emptyLineExp.test(line)) {
if (state == PrettyPrintState.Default) {
lines.splice(index, 1);
count -= 1;
continue;
}
} else {
lineState = state;
lineDepth = stack.length;
while (match = tagExp.exec(line)) {
if (state == PrettyPrintState.Comment) {
if (match[0] == '-->') {
state = PrettyPrintState.Default;
}
} else if (state == PrettyPrintState.Pre) {
if (match[2] && match[2].toLowerCase() == preName) {
state = PrettyPrintState.Default;
}
} else {
if (match[0] == '<!--') {
state = PrettyPrintState.Comment;
} else if (match[1]) {
tagName = match[1].toLowerCase();
if (tagName in PrettyPrintPlugin.IGNORED_TAGS) continue;
if (tagName in PrettyPrintPlugin.PRE_TAGS) {
state = PrettyPrintState.Pre;
preName = tagName;
} else {
if (tagName == 'body') minLineDepth = 2;
stack.push(tagName);
}
} else if (match[2]) {
tagName = match[2].toLowerCase();
if (tagName in PrettyPrintPlugin.IGNORED_TAGS) continue;
var n = stack.lastIndexOf(tagName);
if (n != -1) {
stack.length = n;
}
}
}
}
if (lineState == PrettyPrintState.Default) {
lineDepth = Math.min(lineDepth, stack.length);
line = line.replace(/^\s+/, '').replace(/\s+$/, '');
if (lineDepth > minLineDepth) {
line = Array(lineDepth - minLineDepth + 1).join('\t') + line;
}
lines[index] = line;
}
}
index++;
}
event.contents = lines.join('\n');
}
}
/**
* Register this plugin.
*/
Renderer.registerPlugin('prettyPrint', PrettyPrintPlugin);
} | innerverse/typedoc | src/td/output/plugins/PrettyPrintPlugin.ts | TypeScript | apache-2.0 | 5,741 |
/**
* @@@ START COPYRIGHT @@@
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* @@@ END COPYRIGHT @@@
*/
package org.trafodion.dcs.master.listener;
import java.sql.SQLException;
import java.io.*;
import java.nio.*;
import java.nio.channels.*;
import java.nio.channels.spi.*;
import java.net.*;
import java.util.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
class ConnectionContext {
private static final Log LOG = LogFactory.getLog(ConnectionContext.class);
String datasource = "";
String catalog = "";
String schema = "";
String location = "";
String userRole = "";
String connectOptions = "";
short accessMode;
short autoCommit;
int queryTimeoutSec;
int idleTimeoutSec;
int loginTimeoutSec;
short txnIsolationLevel;
short rowSetSize;
int diagnosticFlag;
int processId;
String computerName = "";
String windowText = "";
VersionList clientVersionList = null;
UserDesc user = null;
int ctxACP;
int ctxDataLang;
int ctxErrorLang;
short ctxCtrlInferNXHAR;
short cpuToUse;
short cpuToUseEnd;
int srvrType;
short retryCount;
int optionFlags1;
int optionFlags2;
String vproc;
String client;
ConnectionContext(){
clientVersionList = new VersionList();
user = new UserDesc();
}
void extractFromByteBuffer(ByteBuffer buf) throws java.io.UnsupportedEncodingException {
datasource = Util.extractString(buf);
catalog= Util.extractString(buf);
schema= Util.extractString(buf);
location= Util.extractString(buf);
userRole= Util.extractString(buf);
accessMode=buf.getShort();
autoCommit=buf.getShort();
queryTimeoutSec=buf.getInt();
idleTimeoutSec=buf.getInt();
loginTimeoutSec=buf.getInt();
txnIsolationLevel=buf.getShort();
rowSetSize=buf.getShort();
diagnosticFlag=buf.getInt();
processId=buf.getInt();
computerName=Util.extractString(buf);
windowText=Util.extractString(buf);
ctxACP=buf.getInt();
ctxDataLang=buf.getInt();
ctxErrorLang=buf.getInt();
ctxCtrlInferNXHAR=buf.getShort();
cpuToUse=buf.getShort();
cpuToUseEnd=buf.getShort();
connectOptions=Util.extractString(buf);
clientVersionList.extractFromByteBuffer(buf);
user.extractFromByteBuffer(buf);
srvrType = buf.getInt();
retryCount = buf.getShort();
optionFlags1 = buf.getInt();
optionFlags2 = buf.getInt();
vproc= Util.extractString(buf);
client= Util.extractString(buf);
}
}
| apache/incubator-trafodion | dcs/src/main/java/org/trafodion/dcs/master/listener/ConnectionContext.java | Java | apache-2.0 | 3,102 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('event_mapper', '0005_user_is_confirmed'),
]
operations = [
migrations.AlterField(
model_name='event',
name='date_time',
field=models.DateTimeField(help_text=b'Date and time when the event happened.', verbose_name=b'Date and Time'),
preserve_default=True,
),
migrations.AlterField(
model_name='event',
name='victim',
field=models.ForeignKey(default=0, verbose_name=b'Victim', to='event_mapper.Victim', help_text=b'The victim of the event.'),
preserve_default=True,
),
]
| MariaSolovyeva/watchkeeper | django_project/event_mapper/migrations/0006_auto_20150505_0922.py | Python | bsd-2-clause | 789 |
package vektah.rust;
import com.intellij.openapi.fileTypes.LanguageFileType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import vektah.rust.i18n.RustBundle;
import javax.swing.*;
public class RustFileType extends LanguageFileType {
public static final RustFileType INSTANCE = new RustFileType();
private RustFileType() {
super(RustLanguage.INSTANCE);
}
@NotNull
@Override
public String getName() {
return RustBundle.message("file.type.name.rust");
}
@NotNull
@Override
public String getDescription() {
return RustBundle.message("file.type.description.rust");
}
@NotNull
@Override
public String getDefaultExtension() {
return "rs";
}
@Nullable
@Override
public Icon getIcon() {
return RustIcons.ICON_RUST_16;
}
}
| tempbottle/idea-rust | src/java/main/vektah/rust/RustFileType.java | Java | bsd-2-clause | 790 |
cask 'picka' do
version '1.0.0'
sha256 '981209f1bd432d99ce082429cbb182b17194063b6b0eb8ae9fa22a0dbe37bca8'
url 'https://getpicka.com/downloads/Picka.zip'
appcast 'https://getpicka.com/appcast-trial.xml'
name 'Picka'
homepage 'https://getpicka.com/'
app 'Picka.app'
end
| jawshooah/homebrew-cask | Casks/picka.rb | Ruby | bsd-2-clause | 284 |
cask 'watchguard-mobile-vpn-with-ssl' do
version '12.5.3,615421'
sha256 'b8a4f9ce908f19df6122fdf24445fdb233d812f2f6b5f08261ca2e4cca0c3784'
url "http://cdn.watchguard.com/SoftwareCenter/Files/MUVPN_SSL/#{version.before_comma.dots_to_underscores}/WG-MVPN-SSL_#{version.before_comma.dots_to_underscores}.dmg"
name 'WatchGuard Mobile VPN with SSL'
homepage 'https://www.watchguard.com/'
pkg "WatchGuard Mobile VPN with SSL Installer V#{version.after_comma}.mpkg"
uninstall pkgutil: 'com.watchguard.*'
end
| sscotth/homebrew-cask | Casks/watchguard-mobile-vpn-with-ssl.rb | Ruby | bsd-2-clause | 518 |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import './advanced_settings_dialog.js';
import './print_preview_shared_css.js';
import './settings_section.js';
import {CrButtonElement} from 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import {PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {Destination} from '../data/destination.js';
import {Settings} from '../data/model.js';
import {getTemplate} from './advanced_options_settings.html.js';
interface PrintPreviewAdvancedOptionsSettingsElement {
$: {
button: CrButtonElement,
}
}
class PrintPreviewAdvancedOptionsSettingsElement extends PolymerElement {
static get is() {
return 'print-preview-advanced-options-settings';
}
static get template() {
return getTemplate();
}
static get properties() {
return {
disabled: Boolean,
destination: Object,
settings: Object,
showAdvancedDialog_: {
type: Boolean,
value: false,
},
};
}
disabled: boolean;
destination: Destination;
settings: Settings;
private showAdvancedDialog_: boolean;
private onButtonClick_() {
this.showAdvancedDialog_ = true;
}
private onDialogClose_() {
this.showAdvancedDialog_ = false;
this.$.button.focus();
}
}
customElements.define(
PrintPreviewAdvancedOptionsSettingsElement.is,
PrintPreviewAdvancedOptionsSettingsElement);
| chromium/chromium | chrome/browser/resources/print_preview/ui/advanced_options_settings.ts | TypeScript | bsd-3-clause | 1,620 |
<?php
/**
* PSR1_Sniffs_Methods_CamelCapsMethodNameSniff.
*
* PHP version 5
*
* @category PHP
* @package PHP_CodeSniffer
* @author Greg Sherwood <gsherwood@squiz.net>
* @copyright 2006-2012 Squiz Pty Ltd (ABN 77 084 670 600)
* @license https://github.com/squizlabs/PHP_CodeSniffer/blob/master/licence.txt BSD Licence
* @link http://pear.php.net/package/PHP_CodeSniffer
*/
if (class_exists('PHP_CodeSniffer_Standards_AbstractScopeSniff', true) === false) {
throw new PHP_CodeSniffer_Exception('Class PHP_CodeSniffer_Standards_AbstractScopeSniff not found');
}
/**
* PSR1_Sniffs_Methods_CamelCapsMethodNameSniff.
*
* Ensures method names are defined using camel case.
*
* @category PHP
* @package PHP_CodeSniffer
* @author Greg Sherwood <gsherwood@squiz.net>
* @copyright 2006-2012 Squiz Pty Ltd (ABN 77 084 670 600)
* @license https://github.com/squizlabs/PHP_CodeSniffer/blob/master/licence.txt BSD Licence
* @version Release: @package_version@
* @link http://pear.php.net/package/PHP_CodeSniffer
*/
class PSR1_Sniffs_Methods_CamelCapsMethodNameSniff extends PHP_CodeSniffer_Standards_AbstractScopeSniff
{
/**
* Constructs a PSR1_Sniffs_Methods_CamelCapsMethodNameSniff.
*/
public function __construct()
{
parent::__construct(array(T_CLASS, T_INTERFACE, T_TRAIT), array(T_FUNCTION), true);
}//end __construct()
/**
* Processes the tokens within the scope.
*
* @param PHP_CodeSniffer_File $phpcsFile The file being processed.
* @param int $stackPtr The position where this token was
* found.
* @param int $currScope The position of the current scope.
*
* @return void
*/
protected function processTokenWithinScope(PHP_CodeSniffer_File $phpcsFile, $stackPtr, $currScope)
{
$methodName = $phpcsFile->getDeclarationName($stackPtr);
if ($methodName === null) {
// Ignore closures.
return;
}
$testName = ltrim($methodName, '_');
if (PHP_CodeSniffer::isCamelCaps($testName, false, true, false) === false) {
$error = 'Method name "%s" is not in camel caps format';
$className = $phpcsFile->getDeclarationName($currScope);
$errorData = array($className.'::'.$methodName);
$phpcsFile->addError($error, $stackPtr, 'NotCamelCaps', $errorData);
}
}//end processTokenWithinScope()
/**
* Processes the tokens outside the scope.
*
* @param PHP_CodeSniffer_File $phpcsFile The file being processed.
* @param int $stackPtr The position where this token was
* found.
*
* @return void
*/
protected function processTokenOutsideScope(PHP_CodeSniffer_File $phpcsFile, $stackPtr)
{
}//end processTokenOutsideScope()
}//end class
?>
| scaryml1000/ZendSkeleton | vendor/squizlabs/php_codesniffer/CodeSniffer/Standards/PSR1/Sniffs/Methods/CamelCapsMethodNameSniff.php | PHP | bsd-3-clause | 2,984 |
// Copyright 2010, Shuo Chen. All rights reserved.
// http://code.google.com/p/evproto/
//
// Use of this source code is governed by a BSD-style license
// that can be found in the License file.
// Author: Shuo Chen (chenshuo at chenshuo dot com)
//
#include "evproto/evproto.h"
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <google/protobuf/message.h>
#include <event2/event.h>
#include <event2/thread.h>
#if !defined(LIBEVENT_VERSION_NUMBER) || LIBEVENT_VERSION_NUMBER < 0x02000400
#error "This version of Libevent is not supported; Get 2.0.4-alpha or later."
#endif
namespace evproto
{
namespace internal
{
void eventLogToGlog(int severity, const char *msg)
{
switch (severity) {
case _EVENT_LOG_DEBUG:
VLOG(1) << msg;
break;
case _EVENT_LOG_MSG:
LOG(INFO) << msg;
break;
case _EVENT_LOG_WARN:
LOG(WARNING) << msg;
break;
case _EVENT_LOG_ERR:
LOG(ERROR) << msg;
break;
default:
LOG(ERROR) << msg;
break;
}
}
void protobufLogHandler(google::protobuf::LogLevel level, const char* filename, int line,
const std::string& message)
{
google::LogMessage(filename, line, level).stream() << message;
}
void eventFatal(int err)
{
LOG(FATAL) << "libevent2 fatal " << err;
}
} // namespace internal
// TODO: pass back modified argc and argv.
void initialize(int argc, char* argv[])
{
google::InitGoogleLogging(argv[0]);
::event_set_log_callback(internal::eventLogToGlog);
google::protobuf::SetLogHandler(internal::protobufLogHandler);
#if EVTHREAD_USE_WINDOWS_THREADS_IMPLEMENTED
CHECK_EQ(::evthread_use_windows_threads(), 0);
#elif EVTHREAD_USE_PTHREADS_IMPLEMENTED
CHECK_EQ(::evthread_use_pthreads(), 0);
#endif
#ifndef NDEBUG
// ::evthread_enable_lock_debuging();
// ::event_enable_debug_mode();
#endif
CHECK_EQ(LIBEVENT_VERSION_NUMBER, ::event_get_version_number())
<< "libevent2 version number mismatch";
google::ParseCommandLineFlags(&argc, &argv, true);
LOG(INFO) << argv[0] << " initialized";
}
}
| cetium/evproto | evproto/evproto.cc | C++ | bsd-3-clause | 2,066 |
// (C) Copyright Joel de Guzman 2003.
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// Modified by Troy D. Straszheim and Jakob van Santen, 2009-03-26
// Pulled in to ecto in 2010-11 by Troy D. Straszheim
// Willow Garage BSD License not applicable
#ifndef ICETRAY_PYTHON_STD_MAP_INDEXING_SUITE_HPP_INCLUDED
# define ICETRAY_PYTHON_STD_MAP_INDEXING_SUITE_HPP_INCLUDED
# include <ecto/python.hpp>
# include <boost/python/suite/indexing/indexing_suite.hpp>
# include <boost/python/iterator.hpp>
# include <boost/python/call_method.hpp>
# include <boost/python/tuple.hpp>
# include <boost/iterator/transform_iterator.hpp>
namespace bp = boost::python;
namespace boost { namespace python {
// Forward declaration
template <class Container, bool NoProxy, class DerivedPolicies>
class std_map_indexing_suite;
namespace detail
{
template <class Container, bool NoProxy>
class final_std_map_derived_policies
: public std_map_indexing_suite<Container,
NoProxy, final_std_map_derived_policies<Container, NoProxy> > {};
}
// The map_indexing_suite class is a predefined indexing_suite derived
// class for wrapping std::vector (and std::vector like) classes. It provides
// all the policies required by the indexing_suite (see indexing_suite).
// Example usage:
//
// class X {...};
//
// ...
//
// class_<std::map<std::string, X> >("XMap")
// .def(map_indexing_suite<std::map<std::string, X> >())
// ;
//
// By default indexed elements are returned by proxy. This can be
// disabled by supplying *true* in the NoProxy template parameter.
//
template <
class Container,
bool NoProxy = false,
class DerivedPolicies
= detail::final_std_map_derived_policies<Container, NoProxy> >
class std_map_indexing_suite
: public indexing_suite<
Container
, DerivedPolicies
, NoProxy
, true
, typename Container::value_type::second_type
, typename Container::key_type
, typename Container::key_type
>
{
public:
typedef typename Container::value_type value_type;
typedef typename Container::value_type::second_type data_type;
typedef typename Container::key_type key_type;
typedef typename Container::key_type index_type;
typedef typename Container::size_type size_type;
typedef typename Container::difference_type difference_type;
typedef typename Container::const_iterator const_iterator;
// __getitem__ for std::pair
// FIXME: horrible (20x) performance regression vs. (pair.key(),pair.data())
static object pair_getitem(value_type const& x, int i) {
if (i==0 || i==-2) return object(x.first);
else if (i==1 || i==-1) return object(x.second);
else {
PyErr_SetString(PyExc_IndexError,"Index out of range.");
throw_error_already_set();
return object(); // None
}
}
// __iter__ for std::pair
// here we cheat by making a tuple and returning its iterator
// FIXME: replace this with a pure C++ iterator
// how to handle the different return types of first and second?
static PyObject* pair_iter(value_type const& x) {
object tuple = bp::make_tuple(x.first,x.second);
return incref(tuple.attr("__iter__")().ptr());
}
// __len__ std::pair = 2
static int pair_len(value_type const& x) { return 2; }
// return a list of keys
static bp::list keys(Container const& x)
{
bp::list t;
for(typename Container::const_iterator it = x.begin(); it != x.end(); it++)
t.append(it->first);
return t;
}
// return a list of values
static bp::list values(Container const& x)
{
bp::list t;
for(typename Container::const_iterator it = x.begin(); it != x.end(); it++)
t.append(it->second);
return t;
}
// return a list of (key,value) tuples
static bp::list items(Container const& x)
{
bp::list t;
for(typename Container::const_iterator it = x.begin(); it != x.end(); it++)
t.append(bp::make_tuple(it->first, it->second));
return t;
}
#if 0
// return a shallow copy of the map
// FIXME: is this actually a shallow copy, or did i duplicate the pairs?
static Container copy(Container const& x)
{
Container newmap;
for(const_iterator it = x.begin();it != x.end();it++) newmap.insert(*it);
return newmap;
}
#endif
// get with default value
static object dict_get(Container const& x, index_type const& k, object const& default_val = object())
{
const_iterator it = x.find(k);
if (it != x.end()) return object(it->second);
else return default_val;
}
// preserve default value info
BOOST_PYTHON_FUNCTION_OVERLOADS(dict_get_overloads, dict_get, 2, 3);
// pop map[key], or throw an error if it doesn't exist
static object dict_pop(Container & x, index_type const& k)
{
const_iterator it = x.find(k);
object result;
if (it != x.end()) {
result = object(it->second);
x.erase(it->first);
return result;
}
else {
PyErr_SetString(PyExc_KeyError,"Key not found.");
throw_error_already_set();
return object(); // None
};
}
// pop map[key], or return default_val if it doesn't exist
static object dict_pop_default(Container & x, index_type const& k, object const& default_val)
{
const_iterator it = x.find(k);
object result;
if (it != x.end()) {
result = object(it->second);
x.erase(it->first);
return result;
}
else return default_val;
}
// pop a tuple, or throw an error if empty
static object dict_pop_item(Container & x)
{
const_iterator it = x.begin();
object result;
if (it != x.end()) {
result = boost::python::make_tuple(it->first,it->second);
x.erase(it->first);
return result;
}
else {
PyErr_SetString(PyExc_KeyError,"No more items to pop");
throw_error_already_set();
return object(); // None
};
}
// create a new map with given keys, initialialized to value
static object dict_fromkeys(object const& keys, object const& value)
{
object newmap = object(typename Container::storage_type());
int numkeys = extract<int>(keys.attr("__len__")());
for(int i=0;i<numkeys;i++) { // 'cuz python is more fun in C++...
newmap.attr("__setitem__")
(keys.attr("__getitem__")(i),value);
}
return newmap;
}
// spice up the constructors a bit
template <typename PyClassT>
struct init_factory {
typedef typename PyClassT::metadata::holder Holder;
typedef bp::objects::instance<Holder> instance_t;
// connect the PyObject to a wrapped C++ instance
// borrowed from boost/python/object/make_holder.hpp
static void make_holder(PyObject *p)
{
void* memory = Holder::allocate(p, offsetof(instance_t, storage), sizeof(Holder));
try {
// this only works for blank () constructors
(new (memory) Holder(p))->install(p);
}
catch(...) {
Holder::deallocate(p, memory);
throw;
}
}
static void from_dict(PyObject *p, bp::dict const& dict)
{
make_holder(p);
object newmap = object(bp::handle<>(borrowed(p)));
newmap.attr("update")(dict);
}
static void from_list(PyObject *p, bp::list const& list)
{
make_holder(p);
object newmap = object(bp::handle<>(borrowed(p)));
newmap.attr("update")(bp::dict(list));
}
};
// copy keys and values from dictlike object (anything with keys())
static void dict_update(object & x, object const& dictlike)
{
object key;
object keys = dictlike.attr("keys")();
int numkeys = extract<int>(keys.attr("__len__")());
for(int i=0;i<numkeys;i++) {
key = keys.attr("__getitem__")(i);
x.attr("__setitem__")(key,dictlike.attr("__getitem__")(key));
}
}
// set up operators to sample the key, value, or a tuple from a std::pair
struct iterkeys
{
typedef key_type result_type;
result_type operator()(value_type const& x) const
{
return x.first;
}
};
struct itervalues
{
typedef data_type result_type;
result_type operator()(value_type const& x) const
{
return x.second;
}
};
struct iteritems {
typedef tuple result_type;
result_type operator()(value_type const& x) const
{
return boost::python::make_tuple(x.first,x.second);
}
};
template <typename Transform>
struct make_transform_impl
{
typedef boost::transform_iterator<Transform, const_iterator> iterator;
static iterator begin(const Container& m)
{
return boost::make_transform_iterator(m.begin(), Transform());
}
static iterator end(const Container& m)
{
return boost::make_transform_iterator(m.end(), Transform());
}
static bp::object range()
{
return bp::range(&begin, &end);
}
};
template <typename Transform>
static bp::object
make_transform()
{
return make_transform_impl<Transform>::range();
}
static object
print_elem(typename Container::value_type const& e)
{
return "(%s, %s)" % python::make_tuple(e.first, e.second);
}
static
typename mpl::if_<
is_class<data_type>
, data_type&
, data_type
>::type
get_data(typename Container::value_type& e)
{
return e.second;
}
static typename Container::key_type
get_key(typename Container::value_type& e)
{
return e.first;
}
static data_type&
get_item(Container& container, index_type i_)
{
typename Container::iterator i = container.find(i_);
if (i == container.end())
{
PyErr_SetString(PyExc_KeyError, "Invalid key");
throw_error_already_set();
}
return i->second;
}
static void
set_item(Container& container, index_type i, data_type const& v)
{
container[i] = v;
}
static void
delete_item(Container& container, index_type i)
{
container.erase(i);
}
static size_t
size(Container& container)
{
return container.size();
}
static bool
contains(Container& container, key_type const& key)
{
return container.find(key) != container.end();
}
static bool
compare_index(Container& container, index_type a, index_type b)
{
return container.key_comp()(a, b);
}
static index_type
convert_index(Container& container, PyObject* i_)
{
extract<key_type const&> i(i_);
if (i.check())
{
return i();
}
else
{
extract<key_type> i(i_);
if (i.check())
return i();
}
PyErr_SetString(PyExc_TypeError, "Invalid index type");
throw_error_already_set();
return index_type();
}
template <class Class>
static void
extension_def(Class& cl)
{
// Wrap the map's element (value_type)
std::string elem_name = "std_map_indexing_suite_";
std::string cl_name;
object class_name(cl.attr("__name__"));
extract<std::string> class_name_extractor(class_name);
cl_name = class_name_extractor();
elem_name += cl_name;
elem_name += "_entry";
typedef typename mpl::if_<
is_class<data_type>
, return_internal_reference<>
, default_call_policies
>::type get_data_return_policy;
class_<value_type>(elem_name.c_str())
.def("__repr__", &DerivedPolicies::print_elem)
.def("data", &DerivedPolicies::get_data, get_data_return_policy(),
"K.data() -> the value associated with this pair.\n")
.def("key", &DerivedPolicies::get_key,
"K.key() -> the key associated with this pair.\n")
.def("__getitem__",&pair_getitem)
.def("__iter__",&pair_iter)
.def("__len__",&pair_len)
.def("first",&DerivedPolicies::get_key,
"K.first() -> the first item in this pair.\n")
.def("second",&DerivedPolicies::get_data, get_data_return_policy(),
"K.second() -> the second item in this pair.\n")
;
// add convenience methods to the map
cl
// declare constructors in descending order of arity
.def("__init__", init_factory<Class>::from_list,
"Initialize with keys and values from a Python dictionary: {'key':'value'}\n")
.def("__init__", init_factory<Class>::from_dict,
"Initialize with keys and values as tuples in a Python list: [('key','value')]\n")
.def(init<>()) // restore default constructor
.def("keys", &keys, "D.keys() -> list of D's keys\n")
.def("has_key", &contains, "D.has_key(k) -> True if D has a key k, else False\n") // don't re-invent the wheel
.def("values", &values, "D.values() -> list of D's values\n")
.def("items", &items, "D.items() -> list of D's (key, value) pairs, as 2-tuples\n")
.def("clear", &Container::clear, "D.clear() -> None. Remove all items from D.\n")
//.def("copy", ©, "D.copy() -> a shallow copy of D\n")
.def("get", dict_get, dict_get_overloads(args("default_val"),
"D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.\n"))
.def("pop", &dict_pop )
.def("pop", &dict_pop_default,
"D.pop(k[,d]) -> v, remove specified key and return the corresponding value\nIf key is not found, d is returned if given, otherwise KeyError is raised\n")
.def("popitem", &dict_pop_item,
"D.popitem() -> (k, v), remove and return some (key, value) pair as a\n2-tuple; but raise KeyError if D is empty\n")
.def("fromkeys", &dict_fromkeys,
(cl_name+".fromkeys(S,v) -> New "+cl_name+" with keys from S and values equal to v.\n").c_str())
.staticmethod("fromkeys")
.def("update", &dict_update,
"D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]\n")
.def("iteritems",
make_transform<iteritems>(),
"D.iteritems() -> an iterator over the (key, value) items of D\n")
.def("iterkeys",
make_transform<iterkeys>(),
"D.iterkeys() -> an iterator over the keys of D\n")
.def("itervalues",
make_transform<itervalues>(),
"D.itervalues() -> an iterator over the values of D\n")
;
}
};
}} // namespace boost::python
#endif // ICETRAY_PYTHON_STD_MAP_INDEXING_SUITE_HPP_INCLUDED
| stonier/ecto | include/ecto/python/std_map_indexing_suite.hpp | C++ | bsd-3-clause | 17,030 |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.autofill_assistant.user_data;
import static org.chromium.components.autofill_assistant.AssistantAccessibilityUtils.setAccessibility;
import android.content.Context;
import android.text.TextUtils;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.DrawableRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.chromium.components.autofill_assistant.R;
import org.chromium.components.autofill_assistant.user_data.AssistantCollectUserDataModel.LoginChoiceModel;
import java.util.List;
/**
* The login details section of the Autofill Assistant payment request.
*/
public class AssistantLoginSection extends AssistantCollectUserDataSection<LoginChoiceModel> {
AssistantLoginSection(Context context, ViewGroup parent) {
super(context, parent, R.layout.autofill_assistant_login, R.layout.autofill_assistant_login,
context.getResources().getDimensionPixelSize(
org.chromium.components.autofill_assistant.R.dimen
.autofill_assistant_payment_request_title_padding),
/*titleAddButton=*/null, /*listAddButton=*/null);
}
@Override
protected void createOrEditItem(@NonNull LoginChoiceModel oldItem) {
assert oldItem != null;
assert oldItem.mOption.getInfoPopup() != null;
oldItem.mOption.getInfoPopup().show(mContext);
}
@Override
protected void updateFullView(View fullView, LoginChoiceModel model) {
updateSummaryView(fullView, model);
}
@Override
protected void updateSummaryView(View summaryView, LoginChoiceModel model) {
AssistantLoginChoice option = model.mOption;
TextView labelView = summaryView.findViewById(R.id.label);
labelView.setText(option.getLabel());
TextView sublabelView = summaryView.findViewById(R.id.sublabel);
if (TextUtils.isEmpty(option.getSublabel())) {
sublabelView.setVisibility(View.GONE);
} else {
sublabelView.setText(option.getSublabel());
setAccessibility(sublabelView, option.getSublabelAccessibilityHint());
}
}
@Override
protected boolean canEditOption(LoginChoiceModel model) {
return model.mOption.getInfoPopup() != null;
}
@Override
protected @DrawableRes int getEditButtonDrawable(LoginChoiceModel model) {
return R.drawable.btn_info;
}
@Override
protected String getEditButtonContentDescription(LoginChoiceModel model) {
if (model.mOption.getEditButtonContentDescription() != null) {
return model.mOption.getEditButtonContentDescription();
} else {
return mContext.getString(R.string.learn_more);
}
}
@Override
protected boolean areEqual(
@Nullable LoginChoiceModel modelA, @Nullable LoginChoiceModel modelB) {
if (modelA == null || modelB == null) {
return modelA == modelB;
}
// Native ensures that each login choice has a unique identifier.
return TextUtils.equals(modelA.mOption.getIdentifier(), modelB.mOption.getIdentifier());
}
/**
* The login options have changed externally. This will rebuild the UI with the new/changed
* set of login options, while keeping the selected item if possible.
*/
void onLoginsChanged(List<LoginChoiceModel> options) {
int indexToSelect = -1;
if (mSelectedOption != null) {
for (int i = 0; i < getItems().size(); i++) {
if (areEqual(mSelectedOption, getItems().get(i))) {
indexToSelect = i;
break;
}
}
}
setItems(options, indexToSelect);
}
}
| chromium/chromium | components/autofill_assistant/android/java/src/org/chromium/components/autofill_assistant/user_data/AssistantLoginSection.java | Java | bsd-3-clause | 4,011 |
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Template support for writing HTML documents.
// Documents that include Template: true in their
// metadata are executed as input to text/template.
//
// This file defines functions for those templates to invoke.
// The template uses the function "code" to inject program
// source into the output by extracting code from files and
// injecting them as HTML-escaped <pre> blocks.
//
// The syntax is simple: 1, 2, or 3 space-separated arguments:
//
// Whole file:
// {{code "foo.go"}}
// One line (here the signature of main):
// {{code "foo.go" `/^func.main/`}}
// Block of text, determined by start and end (here the body of main):
// {{code "foo.go" `/^func.main/` `/^}/`
//
// Patterns can be `/regular expression/`, a decimal number, or "$"
// to signify the end of the file. In multi-line matches,
// lines that end with the four characters
// OMIT
// are omitted from the output, making it easy to provide marker
// lines in the input that will not appear in the output but are easy
// to identify by pattern.
package main
import (
"bytes"
"fmt"
"log"
"regexp"
"strings"
"text/template"
)
// Functions in this file panic on error, but the panic is recovered
// to an error by 'code'.
var templateFuncs = template.FuncMap{
"code": code,
}
// contents reads and returns the content of the named file
// (from the virtual file system, so for example /doc refers to $GOROOT/doc).
func contents(name string) string {
file, err := ReadFile(fs, name)
if err != nil {
log.Panic(err)
}
return string(file)
}
// format returns a textual representation of the arg, formatted according to its nature.
func format(arg interface{}) string {
switch arg := arg.(type) {
case int:
return fmt.Sprintf("%d", arg)
case string:
if len(arg) > 2 && arg[0] == '/' && arg[len(arg)-1] == '/' {
return fmt.Sprintf("%#q", arg)
}
return fmt.Sprintf("%q", arg)
default:
log.Panicf("unrecognized argument: %v type %T", arg, arg)
}
return ""
}
func code(file string, arg ...interface{}) (s string, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%v", r)
}
}()
text := contents(file)
var command string
switch len(arg) {
case 0:
// text is already whole file.
command = fmt.Sprintf("code %q", file)
case 1:
command = fmt.Sprintf("code %q %s", file, format(arg[0]))
text = oneLine(file, text, arg[0])
case 2:
command = fmt.Sprintf("code %q %s %s", file, format(arg[0]), format(arg[1]))
text = multipleLines(file, text, arg[0], arg[1])
default:
return "", fmt.Errorf("incorrect code invocation: code %q %q", file, arg)
}
// Trim spaces from output.
text = strings.Trim(text, "\n")
// Replace tabs by spaces, which work better in HTML.
text = strings.Replace(text, "\t", " ", -1)
var buf bytes.Buffer
// HTML-escape text and syntax-color comments like elsewhere.
FormatText(&buf, []byte(text), -1, true, "", nil)
// Include the command as a comment.
text = fmt.Sprintf("<pre><!--{{%s}}\n-->%s</pre>", command, buf.Bytes())
return text, nil
}
// parseArg returns the integer or string value of the argument and tells which it is.
func parseArg(arg interface{}, file string, max int) (ival int, sval string, isInt bool) {
switch n := arg.(type) {
case int:
if n <= 0 || n > max {
log.Panicf("%q:%d is out of range", file, n)
}
return n, "", true
case string:
return 0, n, false
}
log.Panicf("unrecognized argument %v type %T", arg, arg)
return
}
// oneLine returns the single line generated by a two-argument code invocation.
func oneLine(file, text string, arg interface{}) string {
lines := strings.SplitAfter(contents(file), "\n")
line, pattern, isInt := parseArg(arg, file, len(lines))
if isInt {
return lines[line-1]
}
return lines[match(file, 0, lines, pattern)-1]
}
// multipleLines returns the text generated by a three-argument code invocation.
func multipleLines(file, text string, arg1, arg2 interface{}) string {
lines := strings.SplitAfter(contents(file), "\n")
line1, pattern1, isInt1 := parseArg(arg1, file, len(lines))
line2, pattern2, isInt2 := parseArg(arg2, file, len(lines))
if !isInt1 {
line1 = match(file, 0, lines, pattern1)
}
if !isInt2 {
line2 = match(file, line1, lines, pattern2)
} else if line2 < line1 {
log.Panicf("lines out of order for %q: %d %d", text, line1, line2)
}
for k := line1 - 1; k < line2; k++ {
if strings.HasSuffix(lines[k], "OMIT\n") {
lines[k] = ""
}
}
return strings.Join(lines[line1-1:line2], "")
}
// match identifies the input line that matches the pattern in a code invocation.
// If start>0, match lines starting there rather than at the beginning.
// The return value is 1-indexed.
func match(file string, start int, lines []string, pattern string) int {
// $ matches the end of the file.
if pattern == "$" {
if len(lines) == 0 {
log.Panicf("%q: empty file", file)
}
return len(lines)
}
// /regexp/ matches the line that matches the regexp.
if len(pattern) > 2 && pattern[0] == '/' && pattern[len(pattern)-1] == '/' {
re, err := regexp.Compile(pattern[1 : len(pattern)-1])
if err != nil {
log.Panic(err)
}
for i := start; i < len(lines); i++ {
if re.MatchString(lines[i]) {
return i + 1
}
}
log.Panicf("%s: no match for %#q", file, pattern)
}
log.Panicf("unrecognized pattern: %q", pattern)
return 0
}
| oopos/go | src/cmd/godoc/template.go | GO | bsd-3-clause | 5,495 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.android_webview;
import android.annotation.SuppressLint;
import android.content.Context;
import android.media.AudioManager;
import android.net.Uri;
import android.os.Handler;
import android.os.Message;
import android.provider.MediaStore;
import android.text.TextUtils;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.webkit.URLUtil;
import android.widget.FrameLayout;
import org.chromium.android_webview.common.AwFeatures;
import org.chromium.base.Callback;
import org.chromium.base.ContentUriUtils;
import org.chromium.base.ThreadUtils;
import org.chromium.base.task.AsyncTask;
import org.chromium.content_public.browser.InvalidateTypes;
import org.chromium.content_public.common.ContentUrlConstants;
import org.chromium.content_public.common.ResourceRequestBody;
import org.chromium.url.GURL;
/**
* Adapts the AwWebContentsDelegate interface to the AwContentsClient interface.
* This class also serves a secondary function of routing certain callbacks from the content layer
* to specific listener interfaces.
*/
class AwWebContentsDelegateAdapter extends AwWebContentsDelegate {
private static final String TAG = "AwWebContentsDelegateAdapter";
private final AwContents mAwContents;
private final AwContentsClient mContentsClient;
private final AwSettings mAwSettings;
private final Context mContext;
private View mContainerView;
private FrameLayout mCustomView;
private boolean mDidSynthesizePageLoad;
public AwWebContentsDelegateAdapter(AwContents awContents, AwContentsClient contentsClient,
AwSettings settings, Context context, View containerView) {
mAwContents = awContents;
mContentsClient = contentsClient;
mAwSettings = settings;
mContext = context;
mDidSynthesizePageLoad = false;
setContainerView(containerView);
}
public void setContainerView(View containerView) {
mContainerView = containerView;
mContainerView.setClickable(true);
}
@Override
public void handleKeyboardEvent(KeyEvent event) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
int direction;
switch (event.getKeyCode()) {
case KeyEvent.KEYCODE_DPAD_DOWN:
direction = View.FOCUS_DOWN;
break;
case KeyEvent.KEYCODE_DPAD_UP:
direction = View.FOCUS_UP;
break;
case KeyEvent.KEYCODE_DPAD_LEFT:
direction = View.FOCUS_LEFT;
break;
case KeyEvent.KEYCODE_DPAD_RIGHT:
direction = View.FOCUS_RIGHT;
break;
default:
direction = 0;
break;
}
if (direction != 0 && tryToMoveFocus(direction)) return;
}
handleMediaKey(event);
mContentsClient.onUnhandledKeyEvent(event);
}
/**
* Redispatches unhandled media keys. This allows bluetooth headphones with play/pause or
* other buttons to function correctly.
*/
private void handleMediaKey(KeyEvent e) {
switch (e.getKeyCode()) {
case KeyEvent.KEYCODE_MUTE:
case KeyEvent.KEYCODE_HEADSETHOOK:
case KeyEvent.KEYCODE_MEDIA_PLAY:
case KeyEvent.KEYCODE_MEDIA_PAUSE:
case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
case KeyEvent.KEYCODE_MEDIA_STOP:
case KeyEvent.KEYCODE_MEDIA_NEXT:
case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
case KeyEvent.KEYCODE_MEDIA_REWIND:
case KeyEvent.KEYCODE_MEDIA_RECORD:
case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD:
case KeyEvent.KEYCODE_MEDIA_CLOSE:
case KeyEvent.KEYCODE_MEDIA_EJECT:
case KeyEvent.KEYCODE_MEDIA_AUDIO_TRACK:
AudioManager am = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
am.dispatchMediaKeyEvent(e);
break;
default:
break;
}
}
@Override
public boolean takeFocus(boolean reverse) {
int direction =
(reverse == (mContainerView.getLayoutDirection() == View.LAYOUT_DIRECTION_RTL))
? View.FOCUS_RIGHT : View.FOCUS_LEFT;
if (tryToMoveFocus(direction)) return true;
direction = reverse ? View.FOCUS_BACKWARD : View.FOCUS_FORWARD;
return tryToMoveFocus(direction);
}
private boolean tryToMoveFocus(int direction) {
View focus = mContainerView.focusSearch(direction);
return focus != null && focus != mContainerView && focus.requestFocus();
}
@Override
public boolean addMessageToConsole(int level, String message, int lineNumber,
String sourceId) {
@AwConsoleMessage.MessageLevel
int messageLevel = AwConsoleMessage.MESSAGE_LEVEL_DEBUG;
switch(level) {
case LOG_LEVEL_TIP:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_TIP;
break;
case LOG_LEVEL_LOG:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_LOG;
break;
case LOG_LEVEL_WARNING:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_WARNING;
break;
case LOG_LEVEL_ERROR:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_ERROR;
break;
default:
Log.w(TAG, "Unknown message level, defaulting to DEBUG");
break;
}
boolean result = mContentsClient.onConsoleMessage(
new AwConsoleMessage(message, sourceId, lineNumber, messageLevel));
return result;
}
@Override
public void onUpdateUrl(GURL url) {
// TODO: implement
}
@Override
public void openNewTab(GURL url, String extraHeaders, ResourceRequestBody postData,
int disposition, boolean isRendererInitiated) {
// This is only called in chrome layers.
assert false;
}
@Override
public void closeContents() {
mContentsClient.onCloseWindow();
}
@Override
@SuppressLint("HandlerLeak")
public void showRepostFormWarningDialog() {
// TODO(mkosiba) We should be using something akin to the JsResultReceiver as the
// callback parameter (instead of WebContents) and implement a way of converting
// that to a pair of messages.
final int msgContinuePendingReload = 1;
final int msgCancelPendingReload = 2;
// TODO(sgurun) Remember the URL to cancel the reload behavior
// if it is different than the most recent NavigationController entry.
final Handler handler = new Handler(ThreadUtils.getUiThreadLooper()) {
@Override
public void handleMessage(Message msg) {
if (mAwContents.getNavigationController() == null) return;
switch(msg.what) {
case msgContinuePendingReload: {
mAwContents.getNavigationController().continuePendingReload();
break;
}
case msgCancelPendingReload: {
mAwContents.getNavigationController().cancelPendingReload();
break;
}
default:
throw new IllegalStateException(
"WebContentsDelegateAdapter: unhandled message " + msg.what);
}
}
};
Message resend = handler.obtainMessage(msgContinuePendingReload);
Message dontResend = handler.obtainMessage(msgCancelPendingReload);
mContentsClient.getCallbackHelper().postOnFormResubmission(dontResend, resend);
}
@Override
public void runFileChooser(final int processId, final int renderId, final int modeFlags,
String acceptTypes, String title, String defaultFilename, boolean capture) {
int correctedModeFlags = FileModeConversionHelper.convertFileChooserMode(modeFlags);
AwContentsClient.FileChooserParamsImpl params = new AwContentsClient.FileChooserParamsImpl(
correctedModeFlags, acceptTypes, title, defaultFilename, capture);
mContentsClient.showFileChooser(new Callback<String[]>() {
boolean mCompleted;
@Override
public void onResult(String[] results) {
if (mCompleted) {
throw new IllegalStateException("Duplicate showFileChooser result");
}
mCompleted = true;
if (results == null) {
AwWebContentsDelegateJni.get().filesSelectedInChooser(
processId, renderId, correctedModeFlags, null, null);
return;
}
GetDisplayNameTask task = new GetDisplayNameTask(
mContext, processId, renderId, correctedModeFlags, results);
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
}, params);
}
@Override
public boolean addNewContents(boolean isDialog, boolean isUserGesture) {
return mContentsClient.onCreateWindow(isDialog, isUserGesture);
}
@Override
public void activateContents() {
mContentsClient.onRequestFocus();
}
@Override
public void navigationStateChanged(int flags) {
// If this is a popup whose document has been accessed by script, hint
// the client to show the last committed url through synthesizing a page
// load, as it may be unsafe to show the pending entry.
boolean shouldSynthesizePageLoad = ((flags & InvalidateTypes.URL) != 0)
&& mAwContents.isPopupWindow() && mAwContents.hasAccessedInitialDocument();
if (AwFeatureList.isEnabled(
AwFeatures.WEBVIEW_SYNTHESIZE_PAGE_LOAD_ONLY_ON_INITIAL_MAIN_DOCUMENT_ACCESS)) {
// Since we want to synthesize the page load only once for when the
// NavigationStateChange call is triggered by the first initial main
// document access, the flag must match InvalidateTypes.URL (the flag
// fired by NavigationControllerImpl::DidAccessInitialMainDocument())
// and we must check whether a page load has previously been
// synthesized here.
shouldSynthesizePageLoad &= (flags == InvalidateTypes.URL) && !mDidSynthesizePageLoad;
}
if (shouldSynthesizePageLoad) {
String url = mAwContents.getLastCommittedUrl();
url = TextUtils.isEmpty(url) ? ContentUrlConstants.ABOUT_BLANK_DISPLAY_URL : url;
mContentsClient.getCallbackHelper().postSynthesizedPageLoadingForUrlBarUpdate(url);
mDidSynthesizePageLoad = true;
}
}
@Override
public void enterFullscreenModeForTab(boolean prefersNavigationBar) {
enterFullscreen();
}
@Override
public void exitFullscreenModeForTab() {
exitFullscreen();
}
@Override
public int getDisplayMode() {
return mAwContents.getDisplayMode();
}
@Override
public void loadingStateChanged() {
mContentsClient.updateTitle(mAwContents.getTitle(), false);
}
/**
* Called to show the web contents in fullscreen mode.
*
* <p>If entering fullscreen on a video element the web contents will contain just
* the html5 video controls. {@link #enterFullscreenVideo(View)} will be called later
* once the ContentVideoView, which contains the hardware accelerated fullscreen video,
* is ready to be shown.
*/
private void enterFullscreen() {
if (mAwContents.isFullScreen()) {
return;
}
View fullscreenView = mAwContents.enterFullScreen();
if (fullscreenView == null) {
return;
}
AwContentsClient.CustomViewCallback cb = () -> {
if (mCustomView != null) {
mAwContents.requestExitFullscreen();
}
};
mCustomView = new FrameLayout(mContext);
mCustomView.addView(fullscreenView);
mContentsClient.onShowCustomView(mCustomView, cb);
}
/**
* Called to show the web contents in embedded mode.
*/
private void exitFullscreen() {
if (mCustomView != null) {
mCustomView = null;
mAwContents.exitFullScreen();
mContentsClient.onHideCustomView();
}
}
@Override
public boolean shouldBlockMediaRequest(GURL url) {
return mAwSettings != null
? mAwSettings.getBlockNetworkLoads() && URLUtil.isNetworkUrl(url.getSpec())
: true;
}
private static class GetDisplayNameTask extends AsyncTask<String[]> {
final int mProcessId;
final int mRenderId;
final int mModeFlags;
final String[] mFilePaths;
// The task doesn't run long, so we don't gain anything from a weak ref.
@SuppressLint("StaticFieldLeak")
final Context mContext;
public GetDisplayNameTask(
Context context, int processId, int renderId, int modeFlags, String[] filePaths) {
mProcessId = processId;
mRenderId = renderId;
mModeFlags = modeFlags;
mFilePaths = filePaths;
mContext = context;
}
@Override
protected String[] doInBackground() {
String[] displayNames = new String[mFilePaths.length];
for (int i = 0; i < mFilePaths.length; i++) {
displayNames[i] = resolveFileName(mFilePaths[i]);
}
return displayNames;
}
@Override
protected void onPostExecute(String[] result) {
AwWebContentsDelegateJni.get().filesSelectedInChooser(
mProcessId, mRenderId, mModeFlags, mFilePaths, result);
}
/**
* @return the display name of a path if it is a content URI and is present in the database
* or an empty string otherwise.
*/
private String resolveFileName(String filePath) {
if (filePath == null) return "";
Uri uri = Uri.parse(filePath);
return ContentUriUtils.getDisplayName(
uri, mContext, MediaStore.MediaColumns.DISPLAY_NAME);
}
}
}
| chromium/chromium | android_webview/java/src/org/chromium/android_webview/AwWebContentsDelegateAdapter.java | Java | bsd-3-clause | 14,739 |
require('should');
var option = require('..').sdk.option;
describe('option', function() {
it('can get default values', function() {
option.get('encoding').should.equal('utf8');
});
it('can set values', function() {
option.set('encoding', 'unicode');
option.get('encoding').should.equal('unicode');
option.clean();
option.get('encoding').should.equal('utf8');
option.option('encoding').should.equal('utf8');
option.option('encoding', 'unicode');
option.get('encoding').should.equal('unicode');
option.clean();
});
it('will init with some values', function() {
var o = new option.Option({foo: 'bar'});
o.get('foo').should.equal('bar');
});
it('can clean a key', function() {
var o = new option.Option({foo: 'bar'});
o.clean('foo');
o._cache.should.eql({});
});
it('can set defaults', function() {
option.defaults({
foo: {
foo: 'bar'
}
});
option.set('foo', {bar: 'foo'});
option.get('foo').should.have.ownProperty('foo');
option.get('foo').should.have.ownProperty('bar');
});
});
| thcode/nico | tests/sdk.option.test.js | JavaScript | bsd-3-clause | 1,096 |
import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.http import Http404
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from django.views.generic.base import View
from django.views.generic.detail import (
BaseDetailView, SingleObjectTemplateResponseMixin,
)
from django.views.generic.list import (
MultipleObjectMixin, MultipleObjectTemplateResponseMixin,
)
class YearMixin:
"""Mixin for views manipulating year-based data."""
year_format = '%Y'
year = None
def get_year_format(self):
"""
Get a year format string in strptime syntax to be used to parse the
year from url variables.
"""
return self.year_format
def get_year(self):
"""Return the year for which this view should display data."""
year = self.year
if year is None:
try:
year = self.kwargs['year']
except KeyError:
try:
year = self.request.GET['year']
except KeyError:
raise Http404(_("No year specified"))
return year
def get_next_year(self, date):
"""Get the next valid year."""
return _get_next_prev(self, date, is_previous=False, period='year')
def get_previous_year(self, date):
"""Get the previous valid year."""
return _get_next_prev(self, date, is_previous=True, period='year')
def _get_next_year(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
def _get_current_year(self, date):
"""Return the start date of the current interval."""
return date.replace(month=1, day=1)
class MonthMixin:
"""Mixin for views manipulating month-based data."""
month_format = '%b'
month = None
def get_month_format(self):
"""
Get a month format string in strptime syntax to be used to parse the
month from url variables.
"""
return self.month_format
def get_month(self):
"""Return the month for which this view should display data."""
month = self.month
if month is None:
try:
month = self.kwargs['month']
except KeyError:
try:
month = self.request.GET['month']
except KeyError:
raise Http404(_("No month specified"))
return month
def get_next_month(self, date):
"""Get the next valid month."""
return _get_next_prev(self, date, is_previous=False, period='month')
def get_previous_month(self, date):
"""Get the previous valid month."""
return _get_next_prev(self, date, is_previous=True, period='month')
def _get_next_month(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
if date.month == 12:
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
else:
return date.replace(month=date.month + 1, day=1)
def _get_current_month(self, date):
"""Return the start date of the previous interval."""
return date.replace(day=1)
class DayMixin:
"""Mixin for views manipulating day-based data."""
day_format = '%d'
day = None
def get_day_format(self):
"""
Get a day format string in strptime syntax to be used to parse the day
from url variables.
"""
return self.day_format
def get_day(self):
"""Return the day for which this view should display data."""
day = self.day
if day is None:
try:
day = self.kwargs['day']
except KeyError:
try:
day = self.request.GET['day']
except KeyError:
raise Http404(_("No day specified"))
return day
def get_next_day(self, date):
"""Get the next valid day."""
return _get_next_prev(self, date, is_previous=False, period='day')
def get_previous_day(self, date):
"""Get the previous valid day."""
return _get_next_prev(self, date, is_previous=True, period='day')
def _get_next_day(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
return date + datetime.timedelta(days=1)
def _get_current_day(self, date):
"""Return the start date of the current interval."""
return date
class WeekMixin:
"""Mixin for views manipulating week-based data."""
week_format = '%U'
week = None
def get_week_format(self):
"""
Get a week format string in strptime syntax to be used to parse the
week from url variables.
"""
return self.week_format
def get_week(self):
"""Return the week for which this view should display data."""
week = self.week
if week is None:
try:
week = self.kwargs['week']
except KeyError:
try:
week = self.request.GET['week']
except KeyError:
raise Http404(_("No week specified"))
return week
def get_next_week(self, date):
"""Get the next valid week."""
return _get_next_prev(self, date, is_previous=False, period='week')
def get_previous_week(self, date):
"""Get the previous valid week."""
return _get_next_prev(self, date, is_previous=True, period='week')
def _get_next_week(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date + datetime.timedelta(days=7 - self._get_weekday(date))
except OverflowError:
raise Http404(_("Date out of range"))
def _get_current_week(self, date):
"""Return the start date of the current interval."""
return date - datetime.timedelta(self._get_weekday(date))
def _get_weekday(self, date):
"""
Return the weekday for a given date.
The first day according to the week format is 0 and the last day is 6.
"""
week_format = self.get_week_format()
if week_format == '%W': # week starts on Monday
return date.weekday()
elif week_format == '%U': # week starts on Sunday
return (date.weekday() + 1) % 7
else:
raise ValueError("unknown week format: %s" % week_format)
class DateMixin:
"""Mixin class for views manipulating date-based data."""
date_field = None
allow_future = False
def get_date_field(self):
"""Get the name of the date field to be used to filter by."""
if self.date_field is None:
raise ImproperlyConfigured("%s.date_field is required." % self.__class__.__name__)
return self.date_field
def get_allow_future(self):
"""
Return `True` if the view should be allowed to display objects from
the future.
"""
return self.allow_future
# Note: the following three methods only work in subclasses that also
# inherit SingleObjectMixin or MultipleObjectMixin.
@cached_property
def uses_datetime_field(self):
"""
Return `True` if the date field is a `DateTimeField` and `False`
if it's a `DateField`.
"""
model = self.get_queryset().model if self.model is None else self.model
field = model._meta.get_field(self.get_date_field())
return isinstance(field, models.DateTimeField)
def _make_date_lookup_arg(self, value):
"""
Convert a date into a datetime when the date field is a DateTimeField.
When time zone support is enabled, `date` is assumed to be in the
current time zone, so that displayed items are consistent with the URL.
"""
if self.uses_datetime_field:
value = datetime.datetime.combine(value, datetime.time.min)
if settings.USE_TZ:
value = timezone.make_aware(value, timezone.get_current_timezone())
return value
def _make_single_date_lookup(self, date):
"""
Get the lookup kwargs for filtering on a single date.
If the date field is a DateTimeField, we can't just filter on
date_field=date because that doesn't take the time into account.
"""
date_field = self.get_date_field()
if self.uses_datetime_field:
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(date + datetime.timedelta(days=1))
return {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
else:
# Skip self._make_date_lookup_arg, it's a no-op in this branch.
return {date_field: date}
class BaseDateListView(MultipleObjectMixin, DateMixin, View):
"""Abstract base class for date-based views displaying a list of objects."""
allow_empty = False
date_list_period = 'year'
def get(self, request, *args, **kwargs):
self.date_list, self.object_list, extra_context = self.get_dated_items()
context = self.get_context_data(
object_list=self.object_list,
date_list=self.date_list,
**extra_context
)
return self.render_to_response(context)
def get_dated_items(self):
"""Obtain the list of dates and items."""
raise NotImplementedError('A DateView must provide an implementation of get_dated_items()')
def get_ordering(self):
"""
Return the field or fields to use for ordering the queryset; use the
date field by default.
"""
return '-%s' % self.get_date_field() if self.ordering is None else self.ordering
def get_dated_queryset(self, **lookup):
"""
Get a queryset properly filtered according to `allow_future` and any
extra lookup kwargs.
"""
qs = self.get_queryset().filter(**lookup)
date_field = self.get_date_field()
allow_future = self.get_allow_future()
allow_empty = self.get_allow_empty()
paginate_by = self.get_paginate_by(qs)
if not allow_future:
now = timezone.now() if self.uses_datetime_field else timezone_today()
qs = qs.filter(**{'%s__lte' % date_field: now})
if not allow_empty:
# When pagination is enabled, it's better to do a cheap query
# than to load the unpaginated queryset in memory.
is_empty = len(qs) == 0 if paginate_by is None else not qs.exists()
if is_empty:
raise Http404(_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
})
return qs
def get_date_list_period(self):
"""
Get the aggregation period for the list of dates: 'year', 'month', or
'day'.
"""
return self.date_list_period
def get_date_list(self, queryset, date_type=None, ordering='ASC'):
"""
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
if date_type is None:
date_type = self.get_date_list_period()
if self.uses_datetime_field:
date_list = queryset.datetimes(date_field, date_type, ordering)
else:
date_list = queryset.dates(date_field, date_type, ordering)
if date_list is not None and not date_list and not allow_empty:
raise Http404(
_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': queryset.model._meta.verbose_name_plural,
}
)
return date_list
class BaseArchiveIndexView(BaseDateListView):
"""
Base class for archives of date-based items. Requires a response mixin.
"""
context_object_name = 'latest'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
qs = self.get_dated_queryset()
date_list = self.get_date_list(qs, ordering='DESC')
if not date_list:
qs = qs.none()
return (date_list, qs, {})
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView):
"""Top-level archive of date-based items."""
template_name_suffix = '_archive'
class BaseYearArchiveView(YearMixin, BaseDateListView):
"""List of objects published in a given year."""
date_list_period = 'month'
make_object_list = False
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_year(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
if not self.get_make_object_list():
# We need this to be a queryset since parent classes introspect it
# to find information about the model.
qs = qs.none()
return (date_list, qs, {
'year': date,
'next_year': self.get_next_year(date),
'previous_year': self.get_previous_year(date),
})
def get_make_object_list(self):
"""
Return `True` if this view should contain the full list of objects in
the given year.
"""
return self.make_object_list
class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView):
"""List of objects published in a given year."""
template_name_suffix = '_archive_year'
class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView):
"""List of objects published in a given month."""
date_list_period = 'day'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_month(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
return (date_list, qs, {
'month': date,
'next_month': self.get_next_month(date),
'previous_month': self.get_previous_month(date),
})
class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView):
"""List of objects published in a given month."""
template_name_suffix = '_archive_month'
class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView):
"""List of objects published in a given week."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
week = self.get_week()
date_field = self.get_date_field()
week_format = self.get_week_format()
week_start = {
'%W': '1',
'%U': '0',
}[week_format]
date = _date_from_string(year, self.get_year_format(),
week_start, '%w',
week, week_format)
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_week(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'week': date,
'next_week': self.get_next_week(date),
'previous_week': self.get_previous_week(date),
})
class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView):
"""List of objects published in a given week."""
template_name_suffix = '_archive_week'
class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView):
"""List of objects published on a given day."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
return self._get_dated_items(date)
def _get_dated_items(self, date):
"""
Do the actual heavy lifting of getting the dated items; this accepts a
date object so that TodayArchiveView can be trivial.
"""
lookup_kwargs = self._make_single_date_lookup(date)
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'day': date,
'previous_day': self.get_previous_day(date),
'next_day': self.get_next_day(date),
'previous_month': self.get_previous_month(date),
'next_month': self.get_next_month(date)
})
class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView):
"""List of objects published on a given day."""
template_name_suffix = "_archive_day"
class BaseTodayArchiveView(BaseDayArchiveView):
"""List of objects published today."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
return self._get_dated_items(datetime.date.today())
class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView):
"""List of objects published today."""
template_name_suffix = "_archive_day"
class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
def get_object(self, queryset=None):
"""Get the object this request displays."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
# Use a custom queryset if provided
qs = self.get_queryset() if queryset is None else queryset
if not self.get_allow_future() and date > datetime.date.today():
raise Http404(_(
"Future %(verbose_name_plural)s not available because "
"%(class_name)s.allow_future is False."
) % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
'class_name': self.__class__.__name__,
})
# Filter down a queryset from self.queryset using the date from the
# URL. This'll get passed as the queryset to DetailView.get_object,
# which'll handle the 404
lookup_kwargs = self._make_single_date_lookup(date)
qs = qs.filter(**lookup_kwargs)
return super().get_object(queryset=qs)
class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
template_name_suffix = '_detail'
def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'):
"""
Get a datetime.date object given a format string and a year, month, and day
(only year is mandatory). Raise a 404 for an invalid date.
"""
format = year_format + delim + month_format + delim + day_format
datestr = str(year) + delim + str(month) + delim + str(day)
try:
return datetime.datetime.strptime(datestr, format).date()
except ValueError:
raise Http404(_("Invalid date string '%(datestr)s' given format '%(format)s'") % {
'datestr': datestr,
'format': format,
})
def _get_next_prev(generic_view, date, is_previous, period):
"""
Get the next or the previous valid date. The idea is to allow links on
month/day views to never be 404s by never providing a date that'll be
invalid for the given view.
This is a bit complicated since it handles different intervals of time,
hence the coupling to generic_view.
However in essence the logic comes down to:
* If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day/week/month,
regardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive result
isn't in the future, then return it; otherwise return None.
* If allow_empty is false and allow_future is true, return the next
date *that contains a valid object*, even if it's in the future. If
there are no next objects, return None.
* If allow_empty is false and allow_future is false, return the next
date that contains a valid object. If that date is in the future, or
if there are no next objects, return None.
"""
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
get_current = getattr(generic_view, '_get_current_%s' % period)
get_next = getattr(generic_view, '_get_next_%s' % period)
# Bounds of the current interval
start, end = get_current(date), get_next(date)
# If allow_empty is True, the naive result will be valid
if allow_empty:
if is_previous:
result = get_current(start - datetime.timedelta(days=1))
else:
result = end
if allow_future or result <= timezone_today():
return result
else:
return None
# Otherwise, we'll need to go to the database to look for an object
# whose date_field is at least (greater than/less than) the given
# naive result
else:
# Construct a lookup and an ordering depending on whether we're doing
# a previous date or a next date lookup.
if is_previous:
lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
ordering = '-%s' % date_field
else:
lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
ordering = date_field
# Filter out objects in the future if appropriate.
if not allow_future:
# Fortunately, to match the implementation of allow_future,
# we need __lte, which doesn't conflict with __lt above.
if generic_view.uses_datetime_field:
now = timezone.now()
else:
now = timezone_today()
lookup['%s__lte' % date_field] = now
qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)
# Snag the first object from the queryset; if it doesn't exist that
# means there's no next/previous link available.
try:
result = getattr(qs[0], date_field)
except IndexError:
return None
# Convert datetimes to dates in the current time zone.
if generic_view.uses_datetime_field:
if settings.USE_TZ:
result = timezone.localtime(result)
result = result.date()
# Return the first day of the period.
return get_current(result)
def timezone_today():
"""Return the current date in the current time zone."""
if settings.USE_TZ:
return timezone.localdate()
else:
return datetime.date.today()
| shacker/django | django/views/generic/dates.py | Python | bsd-3-clause | 25,251 |
from __future__ import absolute_import, print_function
import inspect
import logging
import raven
import sentry
from django.conf import settings
from django.db.utils import DatabaseError
from raven.contrib.django.client import DjangoClient
from . import metrics
UNSAFE_FILES = (
'sentry/event_manager.py',
'sentry/tasks/process_buffer.py',
)
def can_record_current_event():
"""
Tests the current stack for unsafe locations that would likely cause
recursion if an attempt to send to Sentry was made.
"""
for _, filename, _, _, _, _ in inspect.stack():
if filename.endswith(UNSAFE_FILES):
return False
return True
class SentryInternalClient(DjangoClient):
def is_enabled(self):
if getattr(settings, 'DISABLE_RAVEN', False):
return False
return settings.SENTRY_PROJECT is not None
def capture(self, *args, **kwargs):
if not can_record_current_event():
metrics.incr('internal.uncaptured.events')
self.error_logger.error('Not capturing event due to unsafe stacktrace:\n%r', kwargs)
return
return super(SentryInternalClient, self).capture(*args, **kwargs)
def send(self, **kwargs):
# TODO(dcramer): this should respect rate limits/etc and use the normal
# pipeline
from sentry.app import tsdb
from sentry.coreapi import ClientApiHelper
from sentry.event_manager import EventManager
from sentry.models import Project
helper = ClientApiHelper(
agent='raven-python/%s (sentry %s)' % (raven.VERSION, sentry.VERSION),
project_id=settings.SENTRY_PROJECT,
version=self.protocol_version,
)
try:
project = Project.objects.get_from_cache(id=settings.SENTRY_PROJECT)
except DatabaseError:
self.error_logger.error('Unable to fetch internal project',
exc_info=True)
except Project.DoesNotExist:
self.error_logger.error('Internal project (id=%s) does not exist',
settings.SENTRY_PROJECT)
return
helper.context.bind_project(project)
metrics.incr('events.total', 1)
kwargs['project'] = project.id
try:
manager = EventManager(kwargs)
data = manager.normalize()
tsdb.incr_multi([
(tsdb.models.project_total_received, project.id),
(tsdb.models.organization_total_received, project.organization_id),
])
helper.insert_data_to_database(data)
except Exception as e:
if self.raise_send_errors:
raise
self.error_logger.error(
'Unable to record event: %s\nEvent was: %r', e,
kwargs['message'], exc_info=True)
class SentryInternalFilter(logging.Filter):
def filter(self, record):
metrics.incr('internal.uncaptured.logs')
return can_record_current_event()
| hongliang5623/sentry | src/sentry/utils/raven.py | Python | bsd-3-clause | 3,051 |
function setSearchTextField(paramname, field) {
var passed = location.search.substring(1);
var query = getParm(passed,paramname);
var query = getParm(passed,paramname);
query = query.replace(/\+/g," ");
var loc = document.location;
if(/.*search.html/.test(loc)) {
document.title = decodeURIComponent(query) + ' - Wolfram Search';
}
field.value = decodeURIComponent(query);
}
function getParm(string,parm) {
// returns value of parm from string
var startPos = string.indexOf(parm + "=");
if (startPos > -1) {
startPos = startPos + parm.length + 1;
var endPos = string.indexOf("&",startPos);
if (endPos == -1)
endPos = string.length;
return string.substring(startPos,endPos);
}
return '';
}
| mfroeling/DTITools | docs/htmldoc/standard/javascript/search.js | JavaScript | bsd-3-clause | 723 |
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
import os
import shlex
from subprocess import Popen, PIPE
import time
import unittest
import utils
class TestCase(unittest.TestCase):
@classmethod
def setenv(cls, env):
cls.env = env
def assertContains(self, b, a):
self.assertTrue(a in b, "%r not found in %r" % (a, b))
class MultiDict(dict):
def __getattr__(self, name):
v = self[name]
if type(v)==dict:
v=MultiDict(v)
return v
def mget(self, mkey, default=None):
keys = mkey.split(".")
try:
v = self
for key in keys:
v = v[key]
except KeyError:
v = default
if type(v)==dict:
v = MultiDict(v)
return v
class Tailer(object):
def __init__(self, filepath, flush=None, sleep=0, timeout=10.0):
self.filepath = filepath
self.flush = flush
self.sleep = sleep
self.timeout = timeout
self.f = None
self.reset()
def reset(self):
"""Call reset when you want to start using the tailer."""
if self.flush:
self.flush()
else:
time.sleep(self.sleep)
# Re-open the file if open.
if self.f:
self.f.close()
self.f = None
# Wait for file to exist.
timeout = self.timeout
while not os.path.exists(self.filepath):
timeout = utils.wait_step('file exists: ' + self.filepath, timeout)
self.f = open(self.filepath)
self.f.seek(0, os.SEEK_END)
self.pos = self.f.tell()
def read(self):
"""Returns a string which may contain multiple lines."""
if self.flush:
self.flush()
else:
time.sleep(self.sleep)
self.f.seek(0, os.SEEK_END)
newpos = self.f.tell()
if newpos < self.pos:
return ""
self.f.seek(self.pos, os.SEEK_SET)
size = newpos-self.pos
self.pos = newpos
return self.f.read(size)
def readLines(self):
"""Returns a list of read lines."""
return self.read().splitlines()
# FIXME: Hijacked from go/vt/tabletserver/test.py
# Reuse when things come together
def execute(cmd, trap_output=False, verbose=False, **kargs):
args = shlex.split(cmd)
if trap_output:
kargs['stdout'] = PIPE
kargs['stderr'] = PIPE
if verbose:
print "Execute:", cmd, ', '.join('%s=%s' % x for x in kargs.iteritems())
proc = Popen(args, **kargs)
proc.args = args
stdout, stderr = proc.communicate()
if proc.returncode:
raise Exception('FAIL: %s %s %s' % (args, stdout, stderr))
return stdout, stderr
| anusornc/vitess | test/framework.py | Python | bsd-3-clause | 2,552 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.