code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
package amidst.settings;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.UnaryOperator;
import amidst.documentation.ThreadSafe;
@ThreadSafe
public class SettingBase<T> implements Setting<T> {
private final Consumer<T> setter;
private volatile T value;
public SettingBase(T defaultValue, UnaryOperator<T> getter, Consumer<T> setter) {
Objects.requireNonNull(defaultValue);
this.setter = setter;
this.set(getter.apply(defaultValue));
}
@Override
public T get() {
return value;
}
@Override
public synchronized void set(T value) {
Objects.requireNonNull(value);
this.value = value;
setter.accept(value);
}
}
| BlueSteelAUS/amidst | src/main/java/amidst/settings/SettingBase.java | Java | gpl-3.0 | 674 |
/*
Stockfish, a UCI chess playing engine derived from Glaurung 2.1
Copyright (C) 2004-2008 Tord Romstad (Glaurung author)
Copyright (C) 2008-2015 Marco Costalba, Joona Kiiski, Tord Romstad
Copyright (C) 2015-2016 Marco Costalba, Joona Kiiski, Gary Linscott, Tord Romstad
Stockfish is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Stockfish is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <fstream>
#include <iomanip>
#include <iostream>
#include <sstream>
#include "misc.h"
#include "thread.h"
using namespace std;
namespace {
/// Version number. If Version is left empty, then compile date in the format
/// DD-MM-YY and show in engine_info.
const string Version = "8";
/// Our fancy logging facility. The trick here is to replace cin.rdbuf() and
/// cout.rdbuf() with two Tie objects that tie cin and cout to a file stream. We
/// can toggle the logging of std::cout and std:cin at runtime whilst preserving
/// usual I/O functionality, all without changing a single line of code!
/// Idea from http://groups.google.com/group/comp.lang.c++/msg/1d941c0f26ea0d81
struct Tie: public streambuf { // MSVC requires split streambuf for cin and cout
Tie(streambuf* b, streambuf* l) : buf(b), logBuf(l) {}
int sync() { return logBuf->pubsync(), buf->pubsync(); }
int overflow(int c) { return log(buf->sputc((char)c), "<< "); }
int underflow() { return buf->sgetc(); }
int uflow() { return log(buf->sbumpc(), ">> "); }
streambuf *buf, *logBuf;
int log(int c, const char* prefix) {
static int last = '\n'; // Single log file
if (last == '\n')
logBuf->sputn(prefix, 3);
return last = logBuf->sputc((char)c);
}
};
class Logger {
Logger() : in(cin.rdbuf(), file.rdbuf()), out(cout.rdbuf(), file.rdbuf()) {}
~Logger() { start(""); }
ofstream file;
Tie in, out;
public:
static void start(const std::string& fname) {
static Logger l;
if (!fname.empty() && !l.file.is_open())
{
l.file.open(fname, ifstream::out);
cin.rdbuf(&l.in);
cout.rdbuf(&l.out);
}
else if (fname.empty() && l.file.is_open())
{
cout.rdbuf(l.out.buf);
cin.rdbuf(l.in.buf);
l.file.close();
}
}
};
} // namespace
/// engine_info() returns the full name of the current Stockfish version. This
/// will be either "Stockfish <Tag> DD-MM-YY" (where DD-MM-YY is the date when
/// the program was compiled) or "Stockfish <Version>", depending on whether
/// Version is empty.
const string engine_info(bool to_uci) {
const string months("Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec");
string month, day, year;
stringstream ss, date(__DATE__); // From compiler, format is "Sep 21 2008"
ss << "Stockfish " << Version << setfill('0');
if (Version.empty())
{
date >> month >> day >> year;
ss << setw(2) << day << setw(2) << (1 + months.find(month) / 4) << year.substr(2);
}
ss << (Is64Bit ? " 64" : "")
<< (HasPext ? " BMI2" : (HasPopCnt ? " POPCNT" : ""))
<< (to_uci ? "\nid author ": " by ")
<< "T. Romstad, M. Costalba, J. Kiiski, G. Linscott";
return ss.str();
}
/// Debug functions used mainly to collect run-time statistics
static int64_t hits[2], means[2];
void dbg_hit_on(bool b) { ++hits[0]; if (b) ++hits[1]; }
void dbg_hit_on(bool c, bool b) { if (c) dbg_hit_on(b); }
void dbg_mean_of(int v) { ++means[0]; means[1] += v; }
void dbg_print() {
if (hits[0])
cerr << "Total " << hits[0] << " Hits " << hits[1]
<< " hit rate (%) " << 100 * hits[1] / hits[0] << endl;
if (means[0])
cerr << "Total " << means[0] << " Mean "
<< (double)means[1] / means[0] << endl;
}
/// Used to serialize access to std::cout to avoid multiple threads writing at
/// the same time.
std::ostream& operator<<(std::ostream& os, SyncCout sc) {
static Mutex m;
if (sc == IO_LOCK)
m.lock();
if (sc == IO_UNLOCK)
m.unlock();
return os;
}
/// Trampoline helper to avoid moving Logger to misc.h
void start_logger(const std::string& fname) { Logger::start(fname); }
/// prefetch() preloads the given address in L1/L2 cache. This is a non-blocking
/// function that doesn't stall the CPU waiting for data to be loaded from memory,
/// which can be quite slow.
#ifdef NO_PREFETCH
void prefetch(void*) {}
#else
void prefetch(void* addr) {
# if defined(__INTEL_COMPILER)
// This hack prevents prefetches from being optimized away by
// Intel compiler. Both MSVC and gcc seem not be affected by this.
__asm__ ("");
# endif
# if defined(__INTEL_COMPILER) || defined(_MSC_VER)
_mm_prefetch((char*)addr, _MM_HINT_T0);
# else
__builtin_prefetch(addr);
# endif
}
#endif
| sovaz1997/ChessProblemGenerator | stockfish-8-src/src/misc.cpp | C++ | gpl-3.0 | 5,207 |
#ifndef _WIN32
#include <dlfcn.h>
#else
#include <windows.h>
#endif
#include "SymbolMatcher.h"
#include "config.h" // For UNDERSCORE_NEEDED etc
#include <iostream>
#define FACTORY_PROC "getInstanceFor"
SymbolMatcher * SymbolMatcherFactory_getInstanceFor(Prog *prog, const char *sSymbolContainer, const char *hint)
{
std::string libName = "libid";
SymbolMatcher *res;
// Load the specific loader library
#ifndef _WIN32 // Cygwin, Unix/Linux
libName = std::string("lib/lib") + libName;
#ifdef __CYGWIN__
libName += ".dll"; // Cygwin wants .dll, but is otherwise like Unix
#else
#if HOST_OSX
libName += ".dylib";
#else
libName += ".so";
#endif
#endif
static void* dlHandle = dlopen(libName.c_str(), RTLD_LAZY);
if (dlHandle == NULL)
{
fprintf( stderr, "Could not open dynamic loader library %s\n", libName.c_str());
fprintf( stderr, "%s\n", dlerror());
//fclose(f);
return NULL;
}
// Use the handle to find the "construct" function
#if UNDERSCORE_NEEDED
#define UNDERSCORE "_"
#else
#define UNDERSCORE
#endif
SYMMATCH_FACTORY pFcn = (SYMMATCH_FACTORY) dlsym(dlHandle, UNDERSCORE FACTORY_PROC);
#else // Else MSVC, MinGW
libName += ".dll"; // Example: ElfBinaryFile.dll (same dir as boomerang.exe)
#ifdef __MINGW32__
libName = "lib/lib" + libName;
#endif
static HMODULE hModule = LoadLibrary(libName.c_str());
if(hModule == NULL)
{
int err = GetLastError();
fprintf( stderr, "Could not open dynamic loader library %s (error #%d)\n", libName.c_str(), err);
return NULL;
}
// Use the handle to find the "construct" function
SYMMATCH_FACTORY pFcn = (SYMMATCH_FACTORY) GetProcAddress((HINSTANCE)hModule, FACTORY_PROC);
#endif
if (pFcn == NULL)
{
fprintf( stderr, "Loader library %s does not have a "FACTORY_PROC" function\n", libName.c_str());
#ifndef _WIN32
fprintf( stderr, "dlerror returns %s\n", dlerror());
#endif
return NULL;
}
// Call the construct function
res = (*pFcn)(prog, sSymbolContainer, hint);
return res;
}
| aidanhs/boomerang | symbols/libidloader.cpp | C++ | gpl-3.0 | 2,186 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util;
/**
* Timers schedule one-shot or recurring {@link TimerTask tasks} for execution.
* Prefer {@link java.util.concurrent.ScheduledThreadPoolExecutor
* ScheduledThreadPoolExecutor} for new code.
*
* <p>Each timer has one thread on which tasks are executed sequentially. When
* this thread is busy running a task, runnable tasks may be subject to delays.
*
* <p>One-shot tasks are scheduled to run at an absolute time or after a relative
* delay.
*
* <p>Recurring tasks are scheduled with either a fixed period or a fixed rate:
* <ul>
* <li>With the default <strong>fixed-period execution</strong>, each
* successive run of a task is scheduled relative to the start time of
* the previous run, so two runs are never fired closer together in time
* than the specified {@code period}.
* <li>With <strong>fixed-rate execution</strong>, the start time of each
* successive run of a task is scheduled without regard for when the
* previous run took place. This may result in a series of bunched-up runs
* (one launched immediately after another) if delays prevent the timer
* from starting tasks on time.
* </ul>
*
* <p>When a timer is no longer needed, users should call {@link #cancel}, which
* releases the timer's thread and other resources. Timers not explicitly
* cancelled may hold resources indefinitely.
*
* <p>This class does not offer guarantees about the real-time nature of task
* scheduling. Multiple threads can share a single timer without
* synchronization.
*/
public class Timer {
private static final class TimerImpl extends Thread {
private static final class TimerHeap {
private int DEFAULT_HEAP_SIZE = 256;
private TimerTask[] timers = new TimerTask[DEFAULT_HEAP_SIZE];
private int size = 0;
private int deletedCancelledNumber = 0;
public TimerTask minimum() {
return timers[0];
}
public boolean isEmpty() {
return size == 0;
}
public void insert(TimerTask task) {
if (timers.length == size) {
TimerTask[] appendedTimers = new TimerTask[size * 2];
System.arraycopy(timers, 0, appendedTimers, 0, size);
timers = appendedTimers;
}
timers[size++] = task;
upHeap();
}
public void delete(int pos) {
// posible to delete any position of the heap
if (pos >= 0 && pos < size) {
timers[pos] = timers[--size];
timers[size] = null;
downHeap(pos);
}
}
private void upHeap() {
int current = size - 1;
int parent = (current - 1) / 2;
while (timers[current].when < timers[parent].when) {
// swap the two
TimerTask tmp = timers[current];
timers[current] = timers[parent];
timers[parent] = tmp;
// update pos and current
current = parent;
parent = (current - 1) / 2;
}
}
private void downHeap(int pos) {
int current = pos;
int child = 2 * current + 1;
while (child < size && size > 0) {
// compare the children if they exist
if (child + 1 < size
&& timers[child + 1].when < timers[child].when) {
child++;
}
// compare selected child with parent
if (timers[current].when < timers[child].when) {
break;
}
// swap the two
TimerTask tmp = timers[current];
timers[current] = timers[child];
timers[child] = tmp;
// update pos and current
current = child;
child = 2 * current + 1;
}
}
public void reset() {
timers = new TimerTask[DEFAULT_HEAP_SIZE];
size = 0;
}
public void adjustMinimum() {
downHeap(0);
}
public void deleteIfCancelled() {
for (int i = 0; i < size; i++) {
if (timers[i].cancelled) {
deletedCancelledNumber++;
delete(i);
// re-try this point
i--;
}
}
}
private int getTask(TimerTask task) {
for (int i = 0; i < timers.length; i++) {
if (timers[i] == task) {
return i;
}
}
return -1;
}
}
/**
* True if the method cancel() of the Timer was called or the !!!stop()
* method was invoked
*/
private boolean cancelled;
/**
* True if the Timer has become garbage
*/
private boolean finished;
/**
* Contains scheduled events, sorted according to
* {@code when} field of TaskScheduled object.
*/
private TimerHeap tasks = new TimerHeap();
/**
* Starts a new timer.
*
* @param name thread's name
* @param isDaemon daemon thread or not
*/
TimerImpl(String name, boolean isDaemon) {
this.setName(name);
this.setDaemon(isDaemon);
this.start();
}
/**
* This method will be launched on separate thread for each Timer
* object.
*/
@Override
public void run() {
while (true) {
TimerTask task;
synchronized (this) {
// need to check cancelled inside the synchronized block
if (cancelled) {
return;
}
if (tasks.isEmpty()) {
if (finished) {
return;
}
// no tasks scheduled -- sleep until any task appear
try {
this.wait();
} catch (InterruptedException ignored) {
}
continue;
}
long currentTime = System.currentTimeMillis();
task = tasks.minimum();
long timeToSleep;
synchronized (task.lock) {
if (task.cancelled) {
tasks.delete(0);
continue;
}
// check the time to sleep for the first task scheduled
timeToSleep = task.when - currentTime;
}
if (timeToSleep > 0) {
// sleep!
try {
this.wait(timeToSleep);
} catch (InterruptedException ignored) {
}
continue;
}
// no sleep is necessary before launching the task
synchronized (task.lock) {
int pos = 0;
if (tasks.minimum().when != task.when) {
pos = tasks.getTask(task);
}
if (task.cancelled) {
tasks.delete(tasks.getTask(task));
continue;
}
// set time to schedule
task.setScheduledTime(task.when);
// remove task from queue
tasks.delete(pos);
// set when the next task should be launched
if (task.period >= 0) {
// this is a repeating task,
if (task.fixedRate) {
// task is scheduled at fixed rate
task.when = task.when + task.period;
} else {
// task is scheduled at fixed delay
task.when = System.currentTimeMillis()
+ task.period;
}
// insert this task into queue
insertTask(task);
} else {
task.when = 0;
}
}
}
boolean taskCompletedNormally = false;
try {
task.run();
taskCompletedNormally = true;
} finally {
if (!taskCompletedNormally) {
synchronized (this) {
cancelled = true;
}
}
}
}
}
private void insertTask(TimerTask newTask) {
// callers are synchronized
tasks.insert(newTask);
this.notify();
}
/**
* Cancels timer.
*/
public synchronized void cancel() {
cancelled = true;
tasks.reset();
this.notify();
}
public int purge() {
if (tasks.isEmpty()) {
return 0;
}
// callers are synchronized
tasks.deletedCancelledNumber = 0;
tasks.deleteIfCancelled();
return tasks.deletedCancelledNumber;
}
}
private static final class FinalizerHelper {
private final TimerImpl impl;
FinalizerHelper(TimerImpl impl) {
this.impl = impl;
}
@Override protected void finalize() throws Throwable {
try {
synchronized (impl) {
impl.finished = true;
impl.notify();
}
} finally {
super.finalize();
}
}
}
private static long timerId;
private synchronized static long nextId() {
return timerId++;
}
/* This object will be used in synchronization purposes */
private final TimerImpl impl;
// Used to finalize thread
@SuppressWarnings("unused")
private final FinalizerHelper finalizer;
/**
* Creates a new named {@code Timer} which may be specified to be run as a
* daemon thread.
*
* @throws NullPointerException if {@code name == null}
*/
public Timer(String name, boolean isDaemon) {
if (name == null) {
throw new NullPointerException("name == null");
}
this.impl = new TimerImpl(name, isDaemon);
this.finalizer = new FinalizerHelper(impl);
}
/**
* Creates a new named {@code Timer} which does not run as a daemon thread.
*
* @throws NullPointerException if {@code name == null}
*/
public Timer(String name) {
this(name, false);
}
/**
* Creates a new {@code Timer} which may be specified to be run as a daemon thread.
*
* @param isDaemon {@code true} if the {@code Timer}'s thread should be a daemon thread.
*/
public Timer(boolean isDaemon) {
this("Timer-" + Timer.nextId(), isDaemon);
}
/**
* Creates a new non-daemon {@code Timer}.
*/
public Timer() {
this(false);
}
/**
* Cancels the {@code Timer} and all scheduled tasks. If there is a
* currently running task it is not affected. No more tasks may be scheduled
* on this {@code Timer}. Subsequent calls do nothing.
*/
public void cancel() {
impl.cancel();
}
/**
* Removes all canceled tasks from the task queue. If there are no
* other references on the tasks, then after this call they are free
* to be garbage collected.
*
* @return the number of canceled tasks that were removed from the task
* queue.
*/
public int purge() {
synchronized (impl) {
return impl.purge();
}
}
/**
* Schedule a task for single execution. If {@code when} is less than the
* current time, it will be scheduled to be executed as soon as possible.
*
* @param task
* the task to schedule.
* @param when
* time of execution.
* @throws IllegalArgumentException
* if {@code when.getTime() < 0}.
* @throws IllegalStateException
* if the {@code Timer} has been canceled, or if the task has been
* scheduled or canceled.
*/
public void schedule(TimerTask task, Date when) {
if (when.getTime() < 0) {
throw new IllegalArgumentException("when < 0: " + when.getTime());
}
long delay = when.getTime() - System.currentTimeMillis();
scheduleImpl(task, delay < 0 ? 0 : delay, -1, false);
}
/**
* Schedule a task for single execution after a specified delay.
*
* @param task
* the task to schedule.
* @param delay
* amount of time in milliseconds before execution.
* @throws IllegalArgumentException
* if {@code delay < 0}.
* @throws IllegalStateException
* if the {@code Timer} has been canceled, or if the task has been
* scheduled or canceled.
*/
public void schedule(TimerTask task, long delay) {
if (delay < 0) {
throw new IllegalArgumentException("delay < 0: " + delay);
}
scheduleImpl(task, delay, -1, false);
}
/**
* Schedule a task for repeated fixed-delay execution after a specific delay.
*
* @param task
* the task to schedule.
* @param delay
* amount of time in milliseconds before first execution.
* @param period
* amount of time in milliseconds between subsequent executions.
* @throws IllegalArgumentException
* if {@code delay < 0} or {@code period <= 0}.
* @throws IllegalStateException
* if the {@code Timer} has been canceled, or if the task has been
* scheduled or canceled.
*/
public void schedule(TimerTask task, long delay, long period) {
if (delay < 0 || period <= 0) {
throw new IllegalArgumentException();
}
scheduleImpl(task, delay, period, false);
}
/**
* Schedule a task for repeated fixed-delay execution after a specific time
* has been reached.
*
* @param task
* the task to schedule.
* @param when
* time of first execution.
* @param period
* amount of time in milliseconds between subsequent executions.
* @throws IllegalArgumentException
* if {@code when.getTime() < 0} or {@code period <= 0}.
* @throws IllegalStateException
* if the {@code Timer} has been canceled, or if the task has been
* scheduled or canceled.
*/
public void schedule(TimerTask task, Date when, long period) {
if (period <= 0 || when.getTime() < 0) {
throw new IllegalArgumentException();
}
long delay = when.getTime() - System.currentTimeMillis();
scheduleImpl(task, delay < 0 ? 0 : delay, period, false);
}
/**
* Schedule a task for repeated fixed-rate execution after a specific delay
* has passed.
*
* @param task
* the task to schedule.
* @param delay
* amount of time in milliseconds before first execution.
* @param period
* amount of time in milliseconds between subsequent executions.
* @throws IllegalArgumentException
* if {@code delay < 0} or {@code period <= 0}.
* @throws IllegalStateException
* if the {@code Timer} has been canceled, or if the task has been
* scheduled or canceled.
*/
public void scheduleAtFixedRate(TimerTask task, long delay, long period) {
if (delay < 0 || period <= 0) {
throw new IllegalArgumentException();
}
scheduleImpl(task, delay, period, true);
}
/**
* Schedule a task for repeated fixed-rate execution after a specific time
* has been reached.
*
* @param task
* the task to schedule.
* @param when
* time of first execution.
* @param period
* amount of time in milliseconds between subsequent executions.
* @throws IllegalArgumentException
* if {@code when.getTime() < 0} or {@code period <= 0}.
* @throws IllegalStateException
* if the {@code Timer} has been canceled, or if the task has been
* scheduled or canceled.
*/
public void scheduleAtFixedRate(TimerTask task, Date when, long period) {
if (period <= 0 || when.getTime() < 0) {
throw new IllegalArgumentException();
}
long delay = when.getTime() - System.currentTimeMillis();
scheduleImpl(task, delay, period, true);
}
/*
* Schedule a task.
*/
private void scheduleImpl(TimerTask task, long delay, long period, boolean fixed) {
synchronized (impl) {
if (impl.cancelled) {
throw new IllegalStateException("Timer was canceled");
}
long when = delay + System.currentTimeMillis();
if (when < 0) {
throw new IllegalArgumentException("Illegal delay to start the TimerTask: " + when);
}
synchronized (task.lock) {
if (task.isScheduled()) {
throw new IllegalStateException("TimerTask is scheduled already");
}
if (task.cancelled) {
throw new IllegalStateException("TimerTask is canceled");
}
task.when = when;
task.period = period;
task.fixedRate = fixed;
}
// insert the newTask into queue
impl.insertTask(task);
}
}
}
| s20121035/rk3288_android5.1_repo | libcore/luni/src/main/java/java/util/Timer.java | Java | gpl-3.0 | 19,820 |
# -*- coding: utf-8 -*-
# Copyright (C) 2012 VT SuperDARN Lab
# Full license can be found in LICENSE.txt
"""tsyganenko module
This modules containes the following object(s):
Classes
-------------------------------------------------------------
tsygTrace Wraps fortran subroutines in one convenient class
-------------------------------------------------------------
Module
-------------------------------
tsygFort Fortran subroutines
-------------------------------
"""
import tsygFort
import logging
class tsygTrace(object):
"""models.tsyganenko.trace
Trace magnetic field line(s) from point(s)
Parameters
----------
lat : Optional[ ]
latitude [degrees]
lon : Optional[ ]
longitude [degrees]
rho : Optional[ ]
distance from center of the Earth [km]
filename : Optional[ ]
load a trace object directly from a file
coords : Optional[str]
coordinates used for start point ['geo']
datetime : Optional[datetime]
a python datetime object
vswgse : Optional[list, float]
solar wind velocity in GSE coordinates [m/s, m/s, m/s]
pdyn : Optional[float]
solar wind dynamic pressure [nPa]
dst : Optional[flaot]
Dst index [nT]
byimf : Optional[float]
IMF By [nT]
bzimf : Optional[float]
IMF Bz [nT]
lmax : Optional[int]
maximum number of points to trace
rmax : Optional[float]
upper trace boundary in Re
rmin : Optional[float]
lower trace boundary in Re
dsmax : Optional[float]
maximum tracing step size
err : Optional[float]
tracing step tolerance
Attributes
----------
lat :
latitude [degrees]
lon :
longitude [degrees]
rho :
distance from center of the Earth [km]
coords : str
coordinates used for start point ['geo']
vswgse : list
solar wind velocity in GSE coordinates [m/s, m/s, m/s]
pdyn : float
solar wind dynamic pressure [nPa]
dst : flaot
Dst index [nT]
byimf : float
IMF By [nT]
bzimf : float
IMF Bz [nT]
datetime : Optional[datetime]
a python datetime object
Returns
-------
Elements of this object:
lat[N/S]H :
latitude of the trace footpoint in Northern/Southern hemispher
lon[N/S]H :
longitude of the trace footpoint in Northern/Southern hemispher
rho[N/S]H :
distance of the trace footpoint in Northern/Southern hemispher
Examples
--------
from numpy import arange, zeros, ones
import tsyganenko
# trace a series of points
lats = arange(10, 90, 10)
lons = zeros(len(lats))
rhos = 6372.*ones(len(lats))
trace = tsyganenko.tsygTrace(lats, lons, rhos)
# Print the results nicely
print trace
# Plot the traced field lines
ax = trace.plot()
# Or generate a 3d view of the traced field lines
ax = trace.plot3d()
# Save your trace to a file for later use
trace.save('trace.dat')
# And when you want to re-use the saved trace
trace = tsyganenko.tsygTrace(filename='trace.dat')
Notes
-----
**FUNCTION**: trace(lat, lon, rho, coords='geo', datetime=None,
vswgse=[-400.,0.,0.], Pdyn=2., Dst=-5., ByIMF=0., BzIMF=-5.
lmax=5000, rmax=60., rmin=1., dsmax=0.01, err=0.000001)
Written by Sebastien 2012-10
"""
def __init__(self, lat=None, lon=None, rho=None, filename=None,
coords='geo', datetime=None,
vswgse=[-400.,0.,0.], pdyn=2., dst=-5., byimf=0., bzimf=-5.,
lmax=5000, rmax=60., rmin=1., dsmax=0.01, err=0.000001):
from datetime import datetime as pydt
assert (None not in [lat, lon, rho]) or filename, 'You must provide either (lat, lon, rho) or a filename to read from'
if None not in [lat, lon, rho]:
self.lat = lat
self.lon = lon
self.rho = rho
self.coords = coords
self.vswgse = vswgse
self.pdyn = pdyn
self.dst = dst
self.byimf = byimf
self.bzimf = bzimf
# If no datetime is provided, defaults to today
if datetime is None: datetime = pydt.utcnow()
self.datetime = datetime
iTest = self.__test_valid__()
if not iTest: self.__del__()
self.trace()
elif filename:
self.load(filename)
def __test_valid__(self):
"""Test the validity of input arguments to the tsygTrace class and trace method
Written by Sebastien 2012-10
"""
assert (len(self.vswgse) == 3), 'vswgse must have 3 elements'
assert (self.coords.lower() == 'geo'), '{}: this coordinae system is not supported'.format(self.coords.lower())
# A provision for those who want to batch trace
try:
[l for l in self.lat]
except:
self.lat = [self.lat]
try:
[l for l in self.lon]
except:
self.lon = [self.lon]
try:
[r for r in self.rho]
except:
self.rho = [self.rho]
try:
[d for d in self.datetime]
except:
self.datetime = [self.datetime for l in self.lat]
# Make sure they're all the sam elength
assert (len(self.lat) == len(self.lon) == len(self.rho) == len(self.datetime)), \
'lat, lon, rho and datetime must me the same length'
return True
def trace(self, lat=None, lon=None, rho=None, coords=None, datetime=None,
vswgse=None, pdyn=None, dst=None, byimf=None, bzimf=None,
lmax=5000, rmax=60., rmin=1., dsmax=0.01, err=0.000001):
"""See tsygTrace for a description of each parameter
Any unspecified parameter default to the one stored in the object
Unspecified lmax, rmax, rmin, dsmax, err has a set default value
Parameters
----------
lat : Optional[ ]
latitude [degrees]
lon : Optional[ ]
longitude [degrees]
rho : Optional[ ]
distance from center of the Earth [km]
coords : Optional[str]
coordinates used for start point ['geo']
datetime : Optional[datetime]
a python datetime object
vswgse : Optional[list, float]
solar wind velocity in GSE coordinates [m/s, m/s, m/s]
pdyn : Optional[float]
solar wind dynamic pressure [nPa]
dst : Optional[flaot]
Dst index [nT]
byimf : Optional[float]
IMF By [nT]
bzimf : Optional[float]
IMF Bz [nT]
lmax : Optional[int]
maximum number of points to trace
rmax : Optional[float]
upper trace boundary in Re
rmin : Optional[float]
lower trace boundary in Re
dsmax : Optional[float]
maximum tracing step size
err : Optional[float]
tracing step tolerance
Written by Sebastien 2012-10
"""
from numpy import radians, degrees, zeros
# Store existing values of class attributes in case something is wrong
# and we need to revert back to them
if lat: _lat = self.lat
if lon: _lon = self.lon
if rho: _rho = self.rho
if coords: _coords = self.coords
if vswgse: _vswgse = self.vswgse
if not datetime is None: _datetime = self.datetime
# Pass position if new
if lat: self.lat = lat
lat = self.lat
if lon: self.lon = lon
lon = self.lon
if rho: self.rho = rho
rho = self.rho
if not datetime is None: self.datetime = datetime
datetime = self.datetime
# Set necessary parameters if new
if coords: self.coords = coords
coords = self.coords
if not datetime is None: self.datetime = datetime
datetime = self.datetime
if vswgse: self.vswgse = vswgse
vswgse = self.vswgse
if pdyn: self.pdyn = pdyn
pdyn = self.pdyn
if dst: self.dst = dst
dst = self.dst
if byimf: self.byimf = byimf
byimf = self.byimf
if bzimf: self.bzimf = bzimf
bzimf = self.bzimf
# Test that everything is in order, if not revert to existing values
iTest = self.__test_valid__()
if not iTest:
if lat: self.lat = _lat
if lon: _self.lon = lon
if rho: self.rho = _rho
if coords: self.coords = _coords
if vswgse: self.vswgse = _vswgse
if not datetime is None: self.datetime = _datetime
# Declare the same Re as used in Tsyganenko models [km]
Re = 6371.2
# Initialize trace array
self.l = zeros(len(lat))
self.xTrace = zeros((len(lat),2*lmax))
self.yTrace = self.xTrace.copy()
self.zTrace = self.xTrace.copy()
self.xGsw = self.l.copy()
self.yGsw = self.l.copy()
self.zGsw = self.l.copy()
self.latNH = self.l.copy()
self.lonNH = self.l.copy()
self.rhoNH = self.l.copy()
self.latSH = self.l.copy()
self.lonSH = self.l.copy()
self.rhoSH = self.l.copy()
# And now iterate through the desired points
for ip in xrange(len(lat)):
# This has to be called first
tsygFort.recalc_08(datetime[ip].year,datetime[ip].timetuple().tm_yday,
datetime[ip].hour,datetime[ip].minute,datetime[ip].second,
vswgse[0],vswgse[1],vswgse[2])
# Convert lat,lon to geographic cartesian and then gsw
r, theta, phi, xgeo, ygeo, zgeo = tsygFort.sphcar_08(
rho[ip]/Re, radians(90.-lat[ip]), radians(lon[ip]),
0., 0., 0.,
1)
if coords.lower() == 'geo':
xgeo, ygeo, zgeo, xgsw, ygsw, zgsw = tsygFort.geogsw_08(
xgeo, ygeo, zgeo,
0. ,0. ,0. ,
1)
self.xGsw[ip] = xgsw
self.yGsw[ip] = ygsw
self.zGsw[ip] = zgsw
# Trace field line
inmod = 'IGRF_GSW_08'
exmod = 'T96_01'
parmod = [pdyn, dst, byimf, bzimf, 0, 0, 0, 0, 0, 0]
# First towards southern hemisphere
maptoL = [-1, 1]
for mapto in maptoL:
xfgsw, yfgsw, zfgsw, xarr, yarr, zarr, l = tsygFort.trace_08( xgsw, ygsw, zgsw,
mapto, dsmax, err, rmax, rmin, 0,
parmod, exmod, inmod,
lmax )
# Convert back to spherical geographic coords
xfgeo, yfgeo, zfgeo, xfgsw, yfgsw, zfgsw = tsygFort.geogsw_08(
0. ,0. ,0. ,
xfgsw, yfgsw, zfgsw,
-1)
geoR, geoColat, geoLon, xgeo, ygeo, zgeo = tsygFort.sphcar_08(
0., 0., 0.,
xfgeo, yfgeo, zfgeo,
-1)
# Get coordinates of traced point
if mapto == 1:
self.latSH[ip] = 90. - degrees(geoColat)
self.lonSH[ip] = degrees(geoLon)
self.rhoSH[ip] = geoR*Re
elif mapto == -1:
self.latNH[ip] = 90. - degrees(geoColat)
self.lonNH[ip] = degrees(geoLon)
self.rhoNH[ip] = geoR*Re
# Store trace
if mapto == -1:
self.xTrace[ip,0:l] = xarr[l-1::-1]
self.yTrace[ip,0:l] = yarr[l-1::-1]
self.zTrace[ip,0:l] = zarr[l-1::-1]
elif mapto == 1:
self.xTrace[ip,self.l[ip]:self.l[ip]+l] = xarr[0:l]
self.yTrace[ip,self.l[ip]:self.l[ip]+l] = yarr[0:l]
self.zTrace[ip,self.l[ip]:self.l[ip]+l] = zarr[0:l]
self.l[ip] += l
# Resize trace output to more minimum possible length
self.xTrace = self.xTrace[:,0:self.l.max()]
self.yTrace = self.yTrace[:,0:self.l.max()]
self.zTrace = self.zTrace[:,0:self.l.max()]
def __str__(self):
"""Print object information in a nice way
Written by Sebastien 2012-10
"""
# Declare print format
outstr = '''
vswgse=[{:6.0f},{:6.0f},{:6.0f}] [m/s]
pdyn={:3.0f} [nPa]
dst={:3.0f} [nT]
byimf={:3.0f} [nT]
bzimf={:3.0f} [nT]
'''.format(self.vswgse[0],
self.vswgse[1],
self.vswgse[2],
self.pdyn,
self.dst,
self.byimf,
self.bzimf)
outstr += '\nCoords: {}\n'.format(self.coords)
outstr += '(latitude [degrees], longitude [degrees], distance from center of the Earth [km])\n'
# Print stuff
for ip in xrange(len(self.lat)):
outstr += '''
({:6.3f}, {:6.3f}, {:6.3f}) @ {}
--> NH({:6.3f}, {:6.3f}, {:6.3f})
--> SH({:6.3f}, {:6.3f}, {:6.3f})
'''.format(self.lat[ip], self.lon[ip], self.rho[ip],
self.datetime[ip].strftime('%H:%M UT (%d-%b-%y)'),
self.latNH[ip], self.lonNH[ip], self.rhoNH[ip],
self.latSH[ip], self.lonSH[ip], self.rhoSH[ip])
return outstr
def save(self, filename):
"""Save trace information to a file
Parameters
----------
filename : str
Written by Sebastien 2012-10
"""
import cPickle as pickle
with open( filename, "wb" ) as fileObj:
pickle.dump(self, fileObj)
def load(self, filename):
"""load trace information from a file
Parameters
----------
filename : str
Written by Sebastien 2012-10
"""
import cPickle as pickle
with open( filename, "rb" ) as fileObj:
obj = pickle.load(fileObj)
for k, v in obj.__dict__.items():
self.__dict__[k] = v
def plot(self, proj='xz', color='b', onlyPts=None, showPts=False,
showEarth=True, disp=True, **kwargs):
"""Generate a 2D plot of the trace projected onto a given plane
Graphic keywords apply to the plot method for the field lines
Parameters
----------
proj : Optional[str]
the projection plane in GSW coordinates
color : Optional[char]
field line color
onlyPts : Optional[ ]
if the trace countains multiple point, only show the specified indices (list)
showEarth : Optional[bool]
Toggle Earth disk visibility on/off
showPts : Optional[bool]
Toggle start points visibility on/off
disp : Optional[bool]
invoke pylab.show()
**kwargs :
see matplotlib.axes.Axes.plot
Returns
-------
ax : matplotlib axes object
Written by Sebastien 2012-10
"""
from pylab import gcf, gca, show
from matplotlib.patches import Circle
from numpy import pi, linspace, outer, ones, size, cos, sin, radians, cross
from numpy.ma import masked_array
assert (len(proj) == 2) or \
(proj[0] in ['x','y','z'] and proj[1] in ['x','y','z']) or \
(proj[0] != proj[1]), 'Invalid projection plane'
fig = gcf()
ax = fig.gca()
ax.set_aspect('equal')
# First plot a nice disk for the Earth
if showEarth:
circ = Circle(xy=(0,0), radius=1, facecolor='0.8', edgecolor='k', alpha=.5, zorder=0)
ax.add_patch(circ)
# Select indices to show
if onlyPts is None:
inds = xrange(len(self.lat))
else:
try:
inds = [ip for ip in onlyPts]
except:
inds = [onlyPts]
# Then plot the traced field line
for ip in inds:
# Select projection plane
if proj[0] == 'x':
xx = self.xTrace[ip,0:self.l[ip]]
xpt = self.xGsw[ip]
ax.set_xlabel(r'$X_{GSW}$')
xdir = [1,0,0]
elif proj[0] == 'y':
xx = self.yTrace[ip,0:self.l[ip]]
xpt = self.yGsw[ip]
ax.set_xlabel(r'$Y_{GSW}$')
xdir = [0,1,0]
elif proj[0] == 'z':
xx = self.zTrace[ip,0:self.l[ip]]
xpt = self.zGsw[ip]
ax.set_xlabel(r'$Z_{GSW}$')
xdir = [0,0,1]
if proj[1] == 'x':
yy = self.xTrace[ip,0:self.l[ip]]
ypt = self.xGsw[ip]
ax.set_ylabel(r'$X_{GSW}$')
ydir = [1,0,0]
elif proj[1] == 'y':
yy = self.yTrace[ip,0:self.l[ip]]
ypt = self.yGsw[ip]
ax.set_ylabel(r'$Y_{GSW}$')
ydir = [0,1,0]
elif proj[1] == 'z':
yy = self.zTrace[ip,0:self.l[ip]]
ypt = self.zGsw[ip]
ax.set_ylabel(r'$Z_{GSW}$')
ydir = [0,0,1]
sign = 1 if -1 not in cross(xdir,ydir) else -1
if 'x' not in proj:
zz = sign*self.xGsw[ip]
indMask = sign*self.xTrace[ip,0:self.l[ip]] < 0
if 'y' not in proj:
zz = sign*self.yGsw[ip]
indMask = sign*self.yTrace[ip,0:self.l[ip]] < 0
if 'z' not in proj:
zz = sign*self.zGsw[ip]
indMask = sign*self.zTrace[ip,0:self.l[ip]] < 0
# Plot
ax.plot(masked_array(xx, mask=~indMask),
masked_array(yy, mask=~indMask),
zorder=-1, color=color, **kwargs)
ax.plot(masked_array(xx, mask=indMask),
masked_array(yy, mask=indMask),
zorder=1, color=color, **kwargs)
if showPts:
ax.scatter(xpt, ypt, c='k', s=40, zorder=zz)
if disp: show()
return ax
def plot3d(self, onlyPts=None, showEarth=True, showPts=False, disp=True,
xyzlim=None, zorder=1, linewidth=2, color='b', **kwargs):
"""Generate a 3D plot of the trace
Graphic keywords apply to the plot3d method for the field lines
Parameters
----------
onlyPts : Optional[ ]
if the trace countains multiple point, only show the specified indices (list)
showEarth : Optional[bool]
Toggle Earth sphere visibility on/off
showPts : Optional[bool]
Toggle start points visibility on/off
disp : Optional[bool]
invoke pylab.show()
xyzlim : Optional[ ]
3D axis limits
zorder : Optional[int]
3D layers ordering
linewidth : Optional[int]
field line width
color : Optional[char]
field line color
**kwargs :
see mpl_toolkits.mplot3d.axes3d.Axes3D.plot3D
Returns
-------
ax : matplotlib axes
axes object
Written by Sebastien 2012-10
"""
from mpl_toolkits.mplot3d import proj3d
from numpy import pi, linspace, outer, ones, size, cos, sin, radians
from pylab import gca, gcf, show
fig = gcf()
ax = fig.gca(projection='3d')
# First plot a nice sphere for the Earth
if showEarth:
u = linspace(0, 2 * pi, 179)
v = linspace(0, pi, 179)
tx = outer(cos(u), sin(v))
ty = outer(sin(u), sin(v))
tz = outer(ones(size(u)), cos(v))
ax.plot_surface(tx,ty,tz,rstride=10, cstride=10, color='grey', alpha=.5, zorder=0, linewidth=0.5)
# Select indices to show
if onlyPts is None:
inds = xrange(len(self.lat))
else:
try:
inds = [ip for ip in onlyPts]
except:
inds = [onlyPts]
# Then plot the traced field line
for ip in inds:
ax.plot3D( self.xTrace[ip,0:self.l[ip]],
self.yTrace[ip,0:self.l[ip]],
self.zTrace[ip,0:self.l[ip]],
zorder=zorder, linewidth=linewidth, color=color, **kwargs)
if showPts:
ax.scatter3D(self.xGsw[ip], self.yGsw[ip], self.zGsw[ip], c='k')
# Set plot limits
if not xyzlim:
xyzlim = max( [ ax.get_xlim3d().max(),
ax.get_ylim3d().max(),
ax.get_zlim3d().max(), ] )
ax.set_xlim3d([-xyzlim,xyzlim])
ax.set_ylim3d([-xyzlim,xyzlim])
ax.set_zlim3d([-xyzlim,xyzlim])
if disp: show()
return ax
| aburrell/davitpy | davitpy/models/tsyganenko/__init__.py | Python | gpl-3.0 | 21,946 |
#!/usr/bin/env python
#Pools assigned OTUs with identical names and renumbers the remaining distinct
#OTUs. Also allows filtering out OTUs with less than "min_cts" in at least
#one sample.
# Copyright (C) <2012> <Benjamin C. Smith>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, os, re, argparse, csv
from numpy import array, sum, append, amax, hstack, savetxt, linspace
from itertools import product
from time import strftime
parser = argparse.ArgumentParser(description='''Filter an OTU table by pooling
OTUs with identical names. (Optionally: discard OTUs with less than a specified
minimum proportion of counts in any one sample)''')
parser.add_argument('-i','--infile', required=True, nargs=1,
type=str, help='''input filepath. Should be a tab-delimited
OTU table.''')
parser.add_argument('-o', '--outfile', required=True, nargs=1, type=str,
help='''output filepath. The resulting pooled OTU table is
written here.''')
parser.add_argument('-k', '--keep', nargs=1, default=[0], type=float,
help='''set the minimum percentile of matched taxa to keep
based on maximum reads per sample for each taxon.
E.g., setting 50 will keep the taxon with a maximum number of
reads per sample that represents the 50th
percentile and all taxa above. In microbial communities,
there is usually a high degree of taxon uneveness and their
distribution may have a long tail. For this reason, you may be
required to set this value much higher than you would normally
expect, to filter out taxa with very small read numbers.''')
parser.add_argument('-r', '--reads', action='store_true',
help='''print information about number of reads''')
args = parser.parse_args()
min_cts = args.keep[0]
if min_cts >= 100 or min_cts < 0:
print "Invalid minimum count threshold (-k/--keep parameter). \
Value must be >= 0 and < 100 ."
sys.exit(1)
infile = args.infile[0]
outfile = args.outfile[0]
print "\nRun started " + strftime("%Y-%m-%d %H:%M:%S") + "."
#collect sample names, using first line of file
inhandle = csv.reader(open(infile, 'rU'), delimiter='\t')
outhandle = csv.writer(open(outfile, 'wb'), delimiter='\t')
for line in inhandle:
if line[0][0] == "#":
if line[0]=="#OTU ID":
sample_ids = [column for column in line if \
re.search(column, "#OTU ID"'|'"Consensus Lineage")==None]
outhandle.writerow(line)
else:
break
otu_names = []
otu_dict = {}
#build list of OTU names
inhandle = csv.reader(open(infile, 'rU'), delimiter='\t')
for line in inhandle :
if line[0][0]!="#":
if line[-1] not in otu_names:
otu_names.append(line[-1])
# K,V = name of taxon, list of number of occurrences in each sample
#there may be more than one V for each K.
otu_dict[line[-1]] = [line[1:-1]]
else :
otu_dict[line[-1]].append(line[1:-1])
#create array of total counts per sample per otu by summing columns for all lists of
#counts for each otu
counts_per_otu=array([array(lists, dtype=int).sum(axis=0) for lists in
otu_dict.values()])
#Calculate the total reads in the table prior to filtering
tot_start_cts = counts_per_otu.sum()
#Order the taxa according to maximum number of counts in a sample
ordered_taxa=sorted([(name, max(counts)) for name, counts in
zip(otu_dict.keys(), counts_per_otu)],
key=lambda taxon: taxon[1])
#Calculate the rank above which to keep taxa based on the specified percentile.
#Subtract 1 because python list numbering starts at 0.
keep_rank=int(round((min_cts/100)*len(ordered_taxa)+0.5))-1
otu_table = [] #empty array that will be filled with filtered count data
ictr = 1 #counter for assigning new OTU IDs.
#counters for tracking numbers of reads in intial and final OTU tables
tot_end_cts = 0
for i, entry in enumerate(ordered_taxa):
key=entry[0]
if i >= keep_rank and entry[1]>0:
#create row for output
if key != 'Noise' : #if not the "Noise" OTU add otu_id from ictr
# and increment it by 1.
otu_id = array( [ictr], dtype=object)
ictr += 1
else: # if "Noise" OTU, set otu_id to '0' and don't increment ictr.
otu_id = array( [0], dtype=object)
otu_counts=array(otu_dict[key], dtype=int).sum(axis=0)
otu_name = array( [key], dtype=object)
otu_row = hstack( (otu_id, otu_counts, otu_name) )
tot_end_cts += otu_counts.sum()
otu_table.append(otu_row.tolist())
final_otu_table=otu_table
#otu_table = array(otu_table) # convert to numpy array to allow easy sorting
#final_otu_table = otu_table[otu_table[:,0].argsort(),:].tolist() # sort
#otu_table by otu_id and convert back to list
for row in final_otu_table:
outhandle.writerow(row)
print "Finished.\n"
print "Final OTU table preview: "
print array(final_otu_table)
# Write log
logpath = open(str(os.path.splitext(outfile)[0]) + ".log","wb")
logpath.write("Logfile for OTU pooling of " \
+ infile + "\n" + strftime("%Y-%m-%d %H:%M:%S") + "\n\n" \
"Parameters specified:\n" \
"Minimum read threshold: " + str(min_cts) + "\n" \
"Counts:"
"\nTotal reads in input OTU table: " + str(tot_start_cts) + "\n" \
"Total reads in output OTU table: " + str(tot_end_cts) + "\n" \
"Reads discarded through retaining " + str(min_cts) \
+ " percentile and above: " + str(tot_start_cts-tot_end_cts) + "\n" \
"Maximum reads per sample of " + str(min_cts) + " percentile: " + str(ordered_taxa[keep_rank][1]) + "\n" )
logpath.close()
print "\n\nLog file written (" + str(os.path.splitext(outfile)[0]) + ".log" + ")\n"
if args.reads:
print '\nTotal reads in input OTU table: ' + str(tot_start_cts)
print 'Total reads in output OTU table: ' + str(tot_end_cts)
print 'Reads discarded through retaining' + str(min_cts) \
+ ' percentile and above: ' + str(tot_start_cts-tot_end_cts)
print 'Maximum reads per sample of ' + str(min_cts) + ' percentile: ' \
+ str(ordered_taxa[keep_rank][1]) + "\n"
| benjsmith/mubiomics | scripts/pool_otus.py | Python | gpl-3.0 | 6,450 |
using System;
using System.Globalization;
using SmartStore.Utilities;
namespace SmartStore.Core.Search.Facets
{
[Serializable]
public class FacetValue : IEquatable<FacetValue>, ICloneable<FacetValue>
{
public FacetValue()
{
}
public FacetValue(object value, IndexTypeCode typeCode)
{
Value = value;
TypeCode = typeCode;
IsRange = false;
}
public FacetValue(object value, object upperValue, IndexTypeCode typeCode, bool includesLower, bool includesUpper)
{
Value = value;
UpperValue = upperValue;
TypeCode = typeCode;
IncludesLower = includesLower;
IncludesUpper = includesUpper;
IsRange = true;
}
public object Value
{
get;
set;
}
public object UpperValue
{
get;
set;
}
public IndexTypeCode TypeCode
{
get;
set;
}
public bool IncludesLower
{
get;
set;
}
public bool IncludesUpper
{
get;
set;
}
public bool IsRange
{
get;
set;
}
public bool IsSelected
{
get;
set;
}
public bool IsEmpty
{
get
{
return TypeCode == IndexTypeCode.Empty && Value == null;
}
}
#region Metadata
public string Label { get; set; }
public int ParentId { get; set; }
public int DisplayOrder { get; set; }
public FacetSorting? Sorting { get; set; }
public string PictureUrl { get; set; }
public string Color { get; set; }
#endregion
public override int GetHashCode()
{
if (Value != null && UpperValue != null)
{
var combiner = HashCodeCombiner
.Start()
.Add(Value.GetHashCode())
.Add(UpperValue.GetHashCode());
return combiner.CombinedHash;
}
else if (UpperValue != null)
{
return UpperValue.GetHashCode();
}
else if (Value != null)
{
return Value.GetHashCode();
}
return 0;
}
public bool Equals(FacetValue other)
{
if (other == null || other.TypeCode != TypeCode || other.IsRange != IsRange)
{
return false;
}
if (other.IsRange)
{
if (other.IncludesLower != IncludesLower || other.IncludesUpper != IncludesUpper)
{
return false;
}
if (other.Value == null && Value == null && other.UpperValue == null && UpperValue == null)
{
return true;
}
if (other.UpperValue != null && !other.UpperValue.Equals(UpperValue))
{
return false;
}
}
if (other.Value == null && Value == null)
{
return true;
}
return other.Value != null && other.Value.Equals(Value);
}
public override bool Equals(object obj)
{
return this.Equals(obj as FacetValue);
}
public override string ToString()
{
var result = string.Empty;
var valueStr = Value != null
? Convert.ToString(Value, CultureInfo.InvariantCulture)
: string.Empty;
if (IsRange)
{
var upperValueStr = UpperValue != null
? Convert.ToString(UpperValue, CultureInfo.InvariantCulture)
: string.Empty;
if (upperValueStr.HasValue())
{
result = string.Concat(valueStr, "~", upperValueStr);
}
else
{
result = valueStr;
}
}
else
{
result = valueStr;
}
return result;
}
public FacetValue Clone()
{
return (FacetValue)this.MemberwiseClone();
}
object ICloneable.Clone()
{
return this.MemberwiseClone();
}
}
}
| nitware/estore | src/Libraries/SmartStore.Core/Search/Facets/FacetValue.cs | C# | gpl-3.0 | 3,289 |
class CreateProjectSubjectTable < ActiveRecord::Migration[5.0]
def change
create_table :project_subjects do |t|
t.uuid :project_id, foreign_key: true
t.uuid :subject_id, foreign_key: true
end
end
end
| zdavis/manifold | api/db/migrate/20161112145302_create_project_subject_table.rb | Ruby | gpl-3.0 | 224 |
<?php
// Heading
$_['heading_title'] = '訂單計算項目(Order Totals)';
// Text
$_['text_install'] = '安裝(Install)';
$_['text_uninstall'] = '解除安裝(Uninstall)';
// Column
$_['column_name'] = '項目(Order Totals)';
$_['column_status'] = '狀態(Status)';
$_['column_sort_order'] = '排序(Sort Order)';
$_['column_action'] = '動作(Action)';
// Error
$_['error_permission'] = '你沒有權限更改訂單計算項目的設置';
?> | alvinhsian/Opencart1556 | upload/admin/language/zh-TW/extension/total.php | PHP | gpl-3.0 | 473 |
'use strict';
var desks = require('./helpers/desks');
describe('desks management', function () {
beforeEach(function() {
desks.openDesksSettings();
});
it('lists macros under the Macro tab for new desks', function () {
desks.newDeskBtn.click();
desks.showTab('macros');
expect(desks.listedMacros.count()).toBeGreaterThan(0);
});
});
| nistormihai/superdesk | client/spec/desks_management_spec.js | JavaScript | gpl-3.0 | 386 |
/*
* Copyright (C) 2010 Pavel Stastny
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package cz.incad.kramerius.security.impl.criteria;
import java.util.logging.Level;
import java.util.regex.Pattern;
import cz.incad.kramerius.security.RightCriterium;
public abstract class AbstractIPAddressFilter extends AbstractCriterium implements RightCriterium {
static java.util.logging.Logger LOGGER = java.util.logging.Logger.getLogger(AbstractIPAddressFilter.class.getName());
protected boolean matchIPAddresses(Object[] objs) {
String remoteAddr = this.getEvaluateContext().getRemoteAddr();
return matchIPAddresses(objs, remoteAddr);
}
protected boolean matchIPAddresses(Object[] objs, String remoteAddr) {
for (Object pattern : objs) {
boolean negativePattern = false;
String patternStr = pattern.toString();
if (patternStr.startsWith("!")) {
patternStr = patternStr.substring(1);
negativePattern = true;
}
boolean matched = remoteAddr.matches(patternStr);
if ((matched) && (!negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' - ACCEPTING");
return true;
} else if ((!matched) && (negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' - (negative pattern) ACCEPTING");
return true;
}
// only debug
if ((!matched) && (!negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' - NOT ACCEPTING");
} else if ((matched) && (negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' -(negative pattern) NOT ACCEPTING");
}
}
return false;
}
@Override
public boolean isParamsNecessary() {
return true;
}
@Override
public boolean validateParams(Object[] vals) {
try {
for (Object pattern : vals) {
String patternStr = pattern.toString();
Pattern compiled = Pattern.compile(patternStr);
if (compiled == null) return false;
}
return true;
} catch (Exception e) {
LOGGER.log(Level.SEVERE, e.getMessage(), e);
return false;
}
}
}
| moravianlibrary/kramerius | common/src/main/java/cz/incad/kramerius/security/impl/criteria/AbstractIPAddressFilter.java | Java | gpl-3.0 | 3,192 |
/*
* Copyright (c) 2006 Genome Research Limited.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program; see the file COPYING.LIB. If not, write to
* the Free Software Foundation Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307 USA
*/
package org.genedb.web.mvc.controller;
import org.genedb.db.dao.SequenceDao;
import org.genedb.querying.history.HistoryManager;
import org.genedb.querying.history.HistoryType;
import org.gmod.schema.feature.Transcript;
import org.gmod.schema.mapped.Feature;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
* Looks up a feature by unique name
*
* @author Chinmay Patel (cp2)
* @author Adrian Tivey (art)
*/
@Controller
@RequestMapping("/Basket")
public class BasketController {
private static final Logger logger = Logger.getLogger(BasketController.class);
private SequenceDao sequenceDao;
private HistoryManagerFactory hmFactory;
//private ModelBuilder modelBuilder;
@RequestMapping(method=RequestMethod.GET, value="/{name}")
protected void addFeatureToBasket(
@PathVariable("name") String name,
@RequestParam(value="historyType", required=true) HistoryType historyType,
HttpSession session,
HttpServletResponse response
) throws Exception {
logger.info("Trying to store in basket " + name + ", history type " + historyType + " for session " + session.getId());
Feature feature = sequenceDao.getFeatureByUniqueName(name, Feature.class);
if (feature == null) {
logger.warn(String.format("Failed to find feature '%s'", name));
return;
}
// Transcript transcript = modelBuilder.findTranscriptForFeature(feature);
// if (transcript == null) {
// // If feature isn't transcript redirect - include model
// // is it part of a gene
// logger.warn(String.format("Failed to find transcript for an id of '%s'", name));
// //be.reject("no.results");
// return;
// }
//logger.trace("dto cache hit for '"+feature.getUniqueName());
HistoryManager hm = hmFactory.getHistoryManager(session);
hm.addHistoryItem(historyType, feature.getUniqueName());
// Add messag
response.setStatus(HttpServletResponse.SC_OK);
return;
}
// public void setModelBuilder(ModelBuilder modelBuilder) {
// this.modelBuilder = modelBuilder;
// }
public void setHistoryManagerFactory(HistoryManagerFactory hmFactory) {
this.hmFactory = hmFactory;
}
public void setSequenceDao(SequenceDao sequenceDao) {
this.sequenceDao = sequenceDao;
}
}
| satta/GeneDB | ng/src/org/genedb/web/mvc/controller/BasketController.java | Java | gpl-3.0 | 3,651 |
<?php
namespace Alchemy\Tests\Phrasea\SearchEngine;
use Alchemy\Phrasea\SearchEngine\SearchEngineOptions;
use Symfony\Component\HttpFoundation\Request;
/**
* @group functional
* @group legacy
*/
class SearchEngineOptionsTest extends \PhraseanetTestCase
{
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions
*/
public function testSerialize()
{
$options = new SearchEngineOptions(self::$DI['app']);
$options->onCollections([self::$DI['collection']]);
$options->allowBusinessFieldsOn([self::$DI['collection']]);
foreach (self::$DI['collection']->get_databox()->get_meta_structure() as $field) {
$options->setFields([$field]);
$options->setDateFields([$field]);
break;
}
$min_date = new \DateTime('-5 days');
$max_date = new \DateTime('+5 days');
$options->setMinDate(\DateTime::createFromFormat(DATE_ATOM, $min_date->format(DATE_ATOM)));
$options->setMaxDate(\DateTime::createFromFormat(DATE_ATOM, $max_date->format(DATE_ATOM)));
$serialized = $options->serialize();
$this->assertEquals($options, SearchEngineOptions::hydrate(self::$DI['app'], $serialized));
}
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions::fromRequest
*/
public function testFromRequest()
{
$this->authenticate(self::$DI['app']);
foreach ($this->provideRequestData() as $pack) {
list ($query, $request, $field, $dateField) = $pack;
$httpRequest = new Request($query, $request);
$options = SearchEngineOptions::fromRequest(self::$DI['app'], $httpRequest);
// Check done this way because returned array can be indexed differently
$collections = $options->getCollections();
$this->assertCount(1, $collections);
$this->assertContains(self::$DI['collection'], $collections);
$this->assertEquals([$field], $options->getFields());
$this->assertEquals('video', $options->getRecordType());
$this->assertEquals('1', $options->getSearchType());
$this->assertEquals('2012/12/21', $options->getMaxDate()->format('Y/m/d'));
$this->assertEquals('2009/04/24', $options->getMinDate()->format('Y/m/d'));
$this->assertEquals([$dateField], $options->getDateFields());
$this->assertEquals('asc', $options->getSortOrder());
$this->assertEquals('topinambour', $options->getSortBy());
$this->assertEquals(true, $options->isStemmed());
}
}
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions::fromRequest
*/
public function testFromRequestUnauthenticated()
{
foreach ($this->provideRequestData() as $pack) {
list ($query, $request, $field, $dateField) = $pack;
$httpRequest = new Request($query, $request);
$options = SearchEngineOptions::fromRequest(self::$DI['app'], $httpRequest);
$this->assertEquals([], $options->getCollections());
$this->assertEquals([], $options->getFields());
$this->assertEquals('video', $options->getRecordType());
$this->assertEquals('1', $options->getSearchType());
$this->assertEquals('2012/12/21', $options->getMaxDate()->format('Y/m/d'));
$this->assertEquals('2009/04/24', $options->getMinDate()->format('Y/m/d'));
$this->assertEquals([], $options->getDateFields());
$this->assertEquals('asc', $options->getSortOrder());
$this->assertEquals('topinambour', $options->getSortBy());
$this->assertEquals(true, $options->isStemmed());
}
}
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions::fromRequest
*/
public function testFromRequestEmptyUnauthenticated()
{
$options = SearchEngineOptions::fromRequest(self::$DI['app'], new Request());
$this->assertEquals([], $options->getCollections());
$this->assertEquals([], $options->getFields());
$this->assertEquals(null, $options->getRecordType());
$this->assertEquals('0', $options->getSearchType());
$this->assertEquals(null, $options->getMaxDate());
$this->assertEquals(null, $options->getMinDate());
$this->assertEquals([], $options->getDateFields());
$this->assertEquals('desc', $options->getSortOrder());
$this->assertEquals(null, $options->getSortBy());
$this->assertEquals(false, $options->isStemmed());
}
private function provideRequestData()
{
$field = $dateField = null;
foreach (self::$DI['collection']->get_databox()->get_meta_structure() as $db_field) {
if (!$field) {
$field = $db_field;
} elseif (!$dateField) {
$dateField = $db_field;
} else {
break;
}
}
if (!$field || !$dateField) {
$this->fail('Unable to get a field');
}
$data = [
'bases' => [self::$DI['collection']->get_base_id()],
'status' => ['4' => ['on' => [self::$DI['collection']->get_databox()->get_sbas_id()]]],
'fields' => [$field->get_name()],
'record_type' => 'video',
'search_type' => '1',
'date_min' => '2009/04/24',
'date_max' => '2012/12/21',
'date_field' => $dateField->get_name(),
'ord' => 'asc',
'sort' => 'topinambour',
'stemme' => 'true',
];
$dataWithoutBases = $data;
unset($dataWithoutBases['bases']);
return [
[[], $data, $field, $dateField],
[$data, [], $field, $dateField],
];
}
}
| kwemi/Phraseanet | tests/Alchemy/Tests/Phrasea/SearchEngine/SearchEngineOptionsTest.php | PHP | gpl-3.0 | 5,841 |
Bitrix 16.5 Business Demo = be1a97cd8e66f427f52f0ba0dc4d6ffb
| gohdan/DFC | known_files/hashes/bitrix/modules/photogallery/install/components/bitrix/photogallery_user/templates/old/bitrix/photogallery.section.edit/.default/lang/ru/template.php | PHP | gpl-3.0 | 61 |
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* External user API
*
* @package core_user
* @copyright 2009 Moodle Pty Ltd (http://moodle.com)
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
/**
* Creates a user
*
* @throws moodle_exception
* @param stdClass $user user to create
* @param bool $updatepassword if true, authentication plugin will update password.
* @param bool $triggerevent set false if user_created event should not be triggred.
* This will not affect user_password_updated event triggering.
* @return int id of the newly created user
*/
function user_create_user($user, $updatepassword = true, $triggerevent = true) {
global $DB;
// Set the timecreate field to the current time.
if (!is_object($user)) {
$user = (object) $user;
}
// Check username.
if ($user->username !== core_text::strtolower($user->username)) {
throw new moodle_exception('usernamelowercase');
} else {
if ($user->username !== core_user::clean_field($user->username, 'username')) {
throw new moodle_exception('invalidusername');
}
}
// Save the password in a temp value for later.
if ($updatepassword && isset($user->password)) {
// Check password toward the password policy.
if (!check_password_policy($user->password, $errmsg)) {
throw new moodle_exception($errmsg);
}
$userpassword = $user->password;
unset($user->password);
}
// Apply default values for user preferences that are stored in users table.
if (!isset($user->calendartype)) {
$user->calendartype = core_user::get_property_default('calendartype');
}
if (!isset($user->maildisplay)) {
$user->maildisplay = core_user::get_property_default('maildisplay');
}
if (!isset($user->mailformat)) {
$user->mailformat = core_user::get_property_default('mailformat');
}
if (!isset($user->maildigest)) {
$user->maildigest = core_user::get_property_default('maildigest');
}
if (!isset($user->autosubscribe)) {
$user->autosubscribe = core_user::get_property_default('autosubscribe');
}
if (!isset($user->trackforums)) {
$user->trackforums = core_user::get_property_default('trackforums');
}
if (!isset($user->lang)) {
$user->lang = core_user::get_property_default('lang');
}
$user->timecreated = time();
$user->timemodified = $user->timecreated;
// Validate user data object.
$uservalidation = core_user::validate($user);
if ($uservalidation !== true) {
foreach ($uservalidation as $field => $message) {
debugging("The property '$field' has invalid data and has been cleaned.", DEBUG_DEVELOPER);
$user->$field = core_user::clean_field($user->$field, $field);
}
}
// Insert the user into the database.
$newuserid = $DB->insert_record('user', $user);
// Create USER context for this user.
$usercontext = context_user::instance($newuserid);
// Update user password if necessary.
if (isset($userpassword)) {
// Get full database user row, in case auth is default.
$newuser = $DB->get_record('user', array('id' => $newuserid));
$authplugin = get_auth_plugin($newuser->auth);
$authplugin->user_update_password($newuser, $userpassword);
}
// Trigger event If required.
if ($triggerevent) {
\core\event\user_created::create_from_userid($newuserid)->trigger();
}
return $newuserid;
}
/**
* Update a user with a user object (will compare against the ID)
*
* @throws moodle_exception
* @param stdClass $user the user to update
* @param bool $updatepassword if true, authentication plugin will update password.
* @param bool $triggerevent set false if user_updated event should not be triggred.
* This will not affect user_password_updated event triggering.
*/
function user_update_user($user, $updatepassword = true, $triggerevent = true) {
global $DB;
// Set the timecreate field to the current time.
if (!is_object($user)) {
$user = (object) $user;
}
// Check username.
if (isset($user->username)) {
if ($user->username !== core_text::strtolower($user->username)) {
throw new moodle_exception('usernamelowercase');
} else {
if ($user->username !== core_user::clean_field($user->username, 'username')) {
throw new moodle_exception('invalidusername');
}
}
}
// Unset password here, for updating later, if password update is required.
if ($updatepassword && isset($user->password)) {
// Check password toward the password policy.
if (!check_password_policy($user->password, $errmsg)) {
throw new moodle_exception($errmsg);
}
$passwd = $user->password;
unset($user->password);
}
// Make sure calendartype, if set, is valid.
if (empty($user->calendartype)) {
// Unset this variable, must be an empty string, which we do not want to update the calendartype to.
unset($user->calendartype);
}
$user->timemodified = time();
// Validate user data object.
$uservalidation = core_user::validate($user);
if ($uservalidation !== true) {
foreach ($uservalidation as $field => $message) {
debugging("The property '$field' has invalid data and has been cleaned.", DEBUG_DEVELOPER);
$user->$field = core_user::clean_field($user->$field, $field);
}
}
$DB->update_record('user', $user);
if ($updatepassword) {
// Get full user record.
$updateduser = $DB->get_record('user', array('id' => $user->id));
// If password was set, then update its hash.
if (isset($passwd)) {
$authplugin = get_auth_plugin($updateduser->auth);
if ($authplugin->can_change_password()) {
$authplugin->user_update_password($updateduser, $passwd);
}
}
}
// Trigger event if required.
if ($triggerevent) {
\core\event\user_updated::create_from_userid($user->id)->trigger();
}
}
/**
* Marks user deleted in internal user database and notifies the auth plugin.
* Also unenrols user from all roles and does other cleanup.
*
* @todo Decide if this transaction is really needed (look for internal TODO:)
* @param object $user Userobject before delete (without system magic quotes)
* @return boolean success
*/
function user_delete_user($user) {
return delete_user($user);
}
/**
* Get users by id
*
* @param array $userids id of users to retrieve
* @return array
*/
function user_get_users_by_id($userids) {
global $DB;
return $DB->get_records_list('user', 'id', $userids);
}
/**
* Returns the list of default 'displayable' fields
*
* Contains database field names but also names used to generate information, such as enrolledcourses
*
* @return array of user fields
*/
function user_get_default_fields() {
return array( 'id', 'username', 'fullname', 'firstname', 'lastname', 'email',
'address', 'phone1', 'phone2', 'icq', 'skype', 'yahoo', 'aim', 'msn', 'department',
'institution', 'interests', 'firstaccess', 'lastaccess', 'auth', 'confirmed',
'idnumber', 'lang', 'theme', 'timezone', 'mailformat', 'description', 'descriptionformat',
'city', 'url', 'country', 'profileimageurlsmall', 'profileimageurl', 'customfields',
'groups', 'roles', 'preferences', 'enrolledcourses', 'suspended'
);
}
/**
*
* Give user record from mdl_user, build an array contains all user details.
*
* Warning: description file urls are 'webservice/pluginfile.php' is use.
* it can be changed with $CFG->moodlewstextformatlinkstoimagesfile
*
* @throws moodle_exception
* @param stdClass $user user record from mdl_user
* @param stdClass $course moodle course
* @param array $userfields required fields
* @return array|null
*/
function user_get_user_details($user, $course = null, array $userfields = array()) {
global $USER, $DB, $CFG, $PAGE;
require_once($CFG->dirroot . "/user/profile/lib.php"); // Custom field library.
require_once($CFG->dirroot . "/lib/filelib.php"); // File handling on description and friends.
$defaultfields = user_get_default_fields();
if (empty($userfields)) {
$userfields = $defaultfields;
}
foreach ($userfields as $thefield) {
if (!in_array($thefield, $defaultfields)) {
throw new moodle_exception('invaliduserfield', 'error', '', $thefield);
}
}
// Make sure id and fullname are included.
if (!in_array('id', $userfields)) {
$userfields[] = 'id';
}
if (!in_array('fullname', $userfields)) {
$userfields[] = 'fullname';
}
if (!empty($course)) {
$context = context_course::instance($course->id);
$usercontext = context_user::instance($user->id);
$canviewdetailscap = (has_capability('moodle/user:viewdetails', $context) || has_capability('moodle/user:viewdetails', $usercontext));
} else {
$context = context_user::instance($user->id);
$usercontext = $context;
$canviewdetailscap = has_capability('moodle/user:viewdetails', $usercontext);
}
$currentuser = ($user->id == $USER->id);
$isadmin = is_siteadmin($USER);
$showuseridentityfields = get_extra_user_fields($context);
if (!empty($course)) {
$canviewhiddenuserfields = has_capability('moodle/course:viewhiddenuserfields', $context);
} else {
$canviewhiddenuserfields = has_capability('moodle/user:viewhiddendetails', $context);
}
$canviewfullnames = has_capability('moodle/site:viewfullnames', $context);
if (!empty($course)) {
$canviewuseremail = has_capability('moodle/course:useremail', $context);
} else {
$canviewuseremail = false;
}
$cannotviewdescription = !empty($CFG->profilesforenrolledusersonly) && !$currentuser && !$DB->record_exists('role_assignments', array('userid' => $user->id));
if (!empty($course)) {
$canaccessallgroups = has_capability('moodle/site:accessallgroups', $context);
} else {
$canaccessallgroups = false;
}
if (!$currentuser && !$canviewdetailscap && !has_coursecontact_role($user->id)) {
// Skip this user details.
return null;
}
$userdetails = array();
$userdetails['id'] = $user->id;
if (in_array('username', $userfields)) {
if ($currentuser or has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['username'] = $user->username;
}
}
if ($isadmin or $canviewfullnames) {
if (in_array('firstname', $userfields)) {
$userdetails['firstname'] = $user->firstname;
}
if (in_array('lastname', $userfields)) {
$userdetails['lastname'] = $user->lastname;
}
}
$userdetails['fullname'] = fullname($user);
if (in_array('customfields', $userfields)) {
$fields = $DB->get_recordset_sql("SELECT f.*
FROM {user_info_field} f
JOIN {user_info_category} c
ON f.categoryid=c.id
ORDER BY c.sortorder ASC, f.sortorder ASC");
$userdetails['customfields'] = array();
foreach ($fields as $field) {
require_once($CFG->dirroot.'/user/profile/field/'.$field->datatype.'/field.class.php');
$newfield = 'profile_field_'.$field->datatype;
$formfield = new $newfield($field->id, $user->id);
if ($formfield->is_visible() and !$formfield->is_empty()) {
// TODO: Part of MDL-50728, this conditional coding must be moved to
// proper profile fields API so they are self-contained.
// We only use display_data in fields that require text formatting.
if ($field->datatype == 'text' or $field->datatype == 'textarea') {
$fieldvalue = $formfield->display_data();
} else {
// Cases: datetime, checkbox and menu.
$fieldvalue = $formfield->data;
}
$userdetails['customfields'][] =
array('name' => $formfield->field->name, 'value' => $fieldvalue,
'type' => $field->datatype, 'shortname' => $formfield->field->shortname);
}
}
$fields->close();
// Unset customfields if it's empty.
if (empty($userdetails['customfields'])) {
unset($userdetails['customfields']);
}
}
// Profile image.
if (in_array('profileimageurl', $userfields)) {
$userpicture = new user_picture($user);
$userpicture->size = 1; // Size f1.
$userdetails['profileimageurl'] = $userpicture->get_url($PAGE)->out(false);
}
if (in_array('profileimageurlsmall', $userfields)) {
if (!isset($userpicture)) {
$userpicture = new user_picture($user);
}
$userpicture->size = 0; // Size f2.
$userdetails['profileimageurlsmall'] = $userpicture->get_url($PAGE)->out(false);
}
// Hidden user field.
if ($canviewhiddenuserfields) {
$hiddenfields = array();
// Address, phone1 and phone2 not appears in hidden fields list but require viewhiddenfields capability
// according to user/profile.php.
if (!empty($user->address) && in_array('address', $userfields)) {
$userdetails['address'] = $user->address;
}
} else {
$hiddenfields = array_flip(explode(',', $CFG->hiddenuserfields));
}
if (!empty($user->phone1) && in_array('phone1', $userfields) &&
(in_array('phone1', $showuseridentityfields) or $canviewhiddenuserfields)) {
$userdetails['phone1'] = $user->phone1;
}
if (!empty($user->phone2) && in_array('phone2', $userfields) &&
(in_array('phone2', $showuseridentityfields) or $canviewhiddenuserfields)) {
$userdetails['phone2'] = $user->phone2;
}
if (isset($user->description) &&
((!isset($hiddenfields['description']) && !$cannotviewdescription) or $isadmin)) {
if (in_array('description', $userfields)) {
// Always return the descriptionformat if description is requested.
list($userdetails['description'], $userdetails['descriptionformat']) =
external_format_text($user->description, $user->descriptionformat,
$usercontext->id, 'user', 'profile', null);
}
}
if (in_array('country', $userfields) && (!isset($hiddenfields['country']) or $isadmin) && $user->country) {
$userdetails['country'] = $user->country;
}
if (in_array('city', $userfields) && (!isset($hiddenfields['city']) or $isadmin) && $user->city) {
$userdetails['city'] = $user->city;
}
if (in_array('url', $userfields) && $user->url && (!isset($hiddenfields['webpage']) or $isadmin)) {
$url = $user->url;
if (strpos($user->url, '://') === false) {
$url = 'http://'. $url;
}
$user->url = clean_param($user->url, PARAM_URL);
$userdetails['url'] = $user->url;
}
if (in_array('icq', $userfields) && $user->icq && (!isset($hiddenfields['icqnumber']) or $isadmin)) {
$userdetails['icq'] = $user->icq;
}
if (in_array('skype', $userfields) && $user->skype && (!isset($hiddenfields['skypeid']) or $isadmin)) {
$userdetails['skype'] = $user->skype;
}
if (in_array('yahoo', $userfields) && $user->yahoo && (!isset($hiddenfields['yahooid']) or $isadmin)) {
$userdetails['yahoo'] = $user->yahoo;
}
if (in_array('aim', $userfields) && $user->aim && (!isset($hiddenfields['aimid']) or $isadmin)) {
$userdetails['aim'] = $user->aim;
}
if (in_array('msn', $userfields) && $user->msn && (!isset($hiddenfields['msnid']) or $isadmin)) {
$userdetails['msn'] = $user->msn;
}
if (in_array('suspended', $userfields) && (!isset($hiddenfields['suspended']) or $isadmin)) {
$userdetails['suspended'] = (bool)$user->suspended;
}
if (in_array('firstaccess', $userfields) && (!isset($hiddenfields['firstaccess']) or $isadmin)) {
if ($user->firstaccess) {
$userdetails['firstaccess'] = $user->firstaccess;
} else {
$userdetails['firstaccess'] = 0;
}
}
if (in_array('lastaccess', $userfields) && (!isset($hiddenfields['lastaccess']) or $isadmin)) {
if ($user->lastaccess) {
$userdetails['lastaccess'] = $user->lastaccess;
} else {
$userdetails['lastaccess'] = 0;
}
}
if (in_array('email', $userfields) && ($isadmin // The admin is allowed the users email.
or $currentuser // Of course the current user is as well.
or $canviewuseremail // This is a capability in course context, it will be false in usercontext.
or in_array('email', $showuseridentityfields)
or $user->maildisplay == 1
or ($user->maildisplay == 2 and enrol_sharing_course($user, $USER)))) {
$userdetails['email'] = $user->email;
}
if (in_array('interests', $userfields)) {
$interests = core_tag_tag::get_item_tags_array('core', 'user', $user->id, core_tag_tag::BOTH_STANDARD_AND_NOT, 0, false);
if ($interests) {
$userdetails['interests'] = join(', ', $interests);
}
}
// Departement/Institution/Idnumber are not displayed on any profile, however you can get them from editing profile.
if (in_array('idnumber', $userfields) && $user->idnumber) {
if (in_array('idnumber', $showuseridentityfields) or $currentuser or
has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['idnumber'] = $user->idnumber;
}
}
if (in_array('institution', $userfields) && $user->institution) {
if (in_array('institution', $showuseridentityfields) or $currentuser or
has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['institution'] = $user->institution;
}
}
// Isset because it's ok to have department 0.
if (in_array('department', $userfields) && isset($user->department)) {
if (in_array('department', $showuseridentityfields) or $currentuser or
has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['department'] = $user->department;
}
}
if (in_array('roles', $userfields) && !empty($course)) {
// Not a big secret.
$roles = get_user_roles($context, $user->id, false);
$userdetails['roles'] = array();
foreach ($roles as $role) {
$userdetails['roles'][] = array(
'roleid' => $role->roleid,
'name' => $role->name,
'shortname' => $role->shortname,
'sortorder' => $role->sortorder
);
}
}
// If groups are in use and enforced throughout the course, then make sure we can meet in at least one course level group.
if (in_array('groups', $userfields) && !empty($course) && $canaccessallgroups) {
$usergroups = groups_get_all_groups($course->id, $user->id, $course->defaultgroupingid,
'g.id, g.name,g.description,g.descriptionformat');
$userdetails['groups'] = array();
foreach ($usergroups as $group) {
list($group->description, $group->descriptionformat) =
external_format_text($group->description, $group->descriptionformat,
$context->id, 'group', 'description', $group->id);
$userdetails['groups'][] = array('id' => $group->id, 'name' => $group->name,
'description' => $group->description, 'descriptionformat' => $group->descriptionformat);
}
}
// List of courses where the user is enrolled.
if (in_array('enrolledcourses', $userfields) && !isset($hiddenfields['mycourses'])) {
$enrolledcourses = array();
if ($mycourses = enrol_get_users_courses($user->id, true)) {
foreach ($mycourses as $mycourse) {
if ($mycourse->category) {
$coursecontext = context_course::instance($mycourse->id);
$enrolledcourse = array();
$enrolledcourse['id'] = $mycourse->id;
$enrolledcourse['fullname'] = format_string($mycourse->fullname, true, array('context' => $coursecontext));
$enrolledcourse['shortname'] = format_string($mycourse->shortname, true, array('context' => $coursecontext));
$enrolledcourses[] = $enrolledcourse;
}
}
$userdetails['enrolledcourses'] = $enrolledcourses;
}
}
// User preferences.
if (in_array('preferences', $userfields) && $currentuser) {
$preferences = array();
$userpreferences = get_user_preferences();
foreach ($userpreferences as $prefname => $prefvalue) {
$preferences[] = array('name' => $prefname, 'value' => $prefvalue);
}
$userdetails['preferences'] = $preferences;
}
if ($currentuser or has_capability('moodle/user:viewalldetails', $context)) {
$extrafields = ['auth', 'confirmed', 'lang', 'theme', 'timezone', 'mailformat'];
foreach ($extrafields as $extrafield) {
if (in_array($extrafield, $userfields) && isset($user->$extrafield)) {
$userdetails[$extrafield] = $user->$extrafield;
}
}
}
return $userdetails;
}
/**
* Tries to obtain user details, either recurring directly to the user's system profile
* or through one of the user's course enrollments (course profile).
*
* @param stdClass $user The user.
* @return array if unsuccessful or the allowed user details.
*/
function user_get_user_details_courses($user) {
global $USER;
$userdetails = null;
// Get the courses that the user is enrolled in (only active).
$courses = enrol_get_users_courses($user->id, true);
$systemprofile = false;
if (can_view_user_details_cap($user) || ($user->id == $USER->id) || has_coursecontact_role($user->id)) {
$systemprofile = true;
}
// Try using system profile.
if ($systemprofile) {
$userdetails = user_get_user_details($user, null);
} else {
// Try through course profile.
foreach ($courses as $course) {
if (can_view_user_details_cap($user, $course) || ($user->id == $USER->id) || has_coursecontact_role($user->id)) {
$userdetails = user_get_user_details($user, $course);
}
}
}
return $userdetails;
}
/**
* Check if $USER have the necessary capabilities to obtain user details.
*
* @param stdClass $user
* @param stdClass $course if null then only consider system profile otherwise also consider the course's profile.
* @return bool true if $USER can view user details.
*/
function can_view_user_details_cap($user, $course = null) {
// Check $USER has the capability to view the user details at user context.
$usercontext = context_user::instance($user->id);
$result = has_capability('moodle/user:viewdetails', $usercontext);
// Otherwise can $USER see them at course context.
if (!$result && !empty($course)) {
$context = context_course::instance($course->id);
$result = has_capability('moodle/user:viewdetails', $context);
}
return $result;
}
/**
* Return a list of page types
* @param string $pagetype current page type
* @param stdClass $parentcontext Block's parent context
* @param stdClass $currentcontext Current context of block
* @return array
*/
function user_page_type_list($pagetype, $parentcontext, $currentcontext) {
return array('user-profile' => get_string('page-user-profile', 'pagetype'));
}
/**
* Count the number of failed login attempts for the given user, since last successful login.
*
* @param int|stdclass $user user id or object.
* @param bool $reset Resets failed login count, if set to true.
*
* @return int number of failed login attempts since the last successful login.
*/
function user_count_login_failures($user, $reset = true) {
global $DB;
if (!is_object($user)) {
$user = $DB->get_record('user', array('id' => $user), '*', MUST_EXIST);
}
if ($user->deleted) {
// Deleted user, nothing to do.
return 0;
}
$count = get_user_preferences('login_failed_count_since_success', 0, $user);
if ($reset) {
set_user_preference('login_failed_count_since_success', 0, $user);
}
return $count;
}
/**
* Converts a string into a flat array of menu items, where each menu items is a
* stdClass with fields type, url, title, pix, and imgsrc.
*
* @param string $text the menu items definition
* @param moodle_page $page the current page
* @return array
*/
function user_convert_text_to_menu_items($text, $page) {
global $OUTPUT, $CFG;
$lines = explode("\n", $text);
$items = array();
$lastchild = null;
$lastdepth = null;
$lastsort = 0;
$children = array();
foreach ($lines as $line) {
$line = trim($line);
$bits = explode('|', $line, 3);
$itemtype = 'link';
if (preg_match("/^#+$/", $line)) {
$itemtype = 'divider';
} else if (!array_key_exists(0, $bits) or empty($bits[0])) {
// Every item must have a name to be valid.
continue;
} else {
$bits[0] = ltrim($bits[0], '-');
}
// Create the child.
$child = new stdClass();
$child->itemtype = $itemtype;
if ($itemtype === 'divider') {
// Add the divider to the list of children and skip link
// processing.
$children[] = $child;
continue;
}
// Name processing.
$namebits = explode(',', $bits[0], 2);
if (count($namebits) == 2) {
// Check the validity of the identifier part of the string.
if (clean_param($namebits[0], PARAM_STRINGID) !== '') {
// Treat this as a language string.
$child->title = get_string($namebits[0], $namebits[1]);
$child->titleidentifier = implode(',', $namebits);
}
}
if (empty($child->title)) {
// Use it as is, don't even clean it.
$child->title = $bits[0];
$child->titleidentifier = str_replace(" ", "-", $bits[0]);
}
// URL processing.
if (!array_key_exists(1, $bits) or empty($bits[1])) {
// Set the url to null, and set the itemtype to invalid.
$bits[1] = null;
$child->itemtype = "invalid";
} else {
// Nasty hack to replace the grades with the direct url.
if (strpos($bits[1], '/grade/report/mygrades.php') !== false) {
$bits[1] = user_mygrades_url();
}
// Make sure the url is a moodle url.
$bits[1] = new moodle_url(trim($bits[1]));
}
$child->url = $bits[1];
// PIX processing.
$pixpath = "t/edit";
if (!array_key_exists(2, $bits) or empty($bits[2])) {
// Use the default.
$child->pix = $pixpath;
} else {
// Check for the specified image existing.
$pixpath = "t/" . $bits[2];
if ($page->theme->resolve_image_location($pixpath, 'moodle', true)) {
// Use the image.
$child->pix = $pixpath;
} else {
// Treat it like a URL.
$child->pix = null;
$child->imgsrc = $bits[2];
}
}
// Add this child to the list of children.
$children[] = $child;
}
return $children;
}
/**
* Get a list of essential user navigation items.
*
* @param stdclass $user user object.
* @param moodle_page $page page object.
* @param array $options associative array.
* options are:
* - avatarsize=35 (size of avatar image)
* @return stdClass $returnobj navigation information object, where:
*
* $returnobj->navitems array array of links where each link is a
* stdClass with fields url, title, and
* pix
* $returnobj->metadata array array of useful user metadata to be
* used when constructing navigation;
* fields include:
*
* ROLE FIELDS
* asotherrole bool whether viewing as another role
* rolename string name of the role
*
* USER FIELDS
* These fields are for the currently-logged in user, or for
* the user that the real user is currently logged in as.
*
* userid int the id of the user in question
* userfullname string the user's full name
* userprofileurl moodle_url the url of the user's profile
* useravatar string a HTML fragment - the rendered
* user_picture for this user
* userloginfail string an error string denoting the number
* of login failures since last login
*
* "REAL USER" FIELDS
* These fields are for when asotheruser is true, and
* correspond to the underlying "real user".
*
* asotheruser bool whether viewing as another user
* realuserid int the id of the user in question
* realuserfullname string the user's full name
* realuserprofileurl moodle_url the url of the user's profile
* realuseravatar string a HTML fragment - the rendered
* user_picture for this user
*
* MNET PROVIDER FIELDS
* asmnetuser bool whether viewing as a user from an
* MNet provider
* mnetidprovidername string name of the MNet provider
* mnetidproviderwwwroot string URL of the MNet provider
*/
function user_get_user_navigation_info($user, $page, $options = array()) {
global $OUTPUT, $DB, $SESSION, $CFG;
$returnobject = new stdClass();
$returnobject->navitems = array();
$returnobject->metadata = array();
$course = $page->course;
// Query the environment.
$context = context_course::instance($course->id);
// Get basic user metadata.
$returnobject->metadata['userid'] = $user->id;
$returnobject->metadata['userfullname'] = fullname($user, true);
$returnobject->metadata['userprofileurl'] = new moodle_url('/user/profile.php', array(
'id' => $user->id
));
$avataroptions = array('link' => false, 'visibletoscreenreaders' => false);
if (!empty($options['avatarsize'])) {
$avataroptions['size'] = $options['avatarsize'];
}
$returnobject->metadata['useravatar'] = $OUTPUT->user_picture (
$user, $avataroptions
);
// Build a list of items for a regular user.
// Query MNet status.
if ($returnobject->metadata['asmnetuser'] = is_mnet_remote_user($user)) {
$mnetidprovider = $DB->get_record('mnet_host', array('id' => $user->mnethostid));
$returnobject->metadata['mnetidprovidername'] = $mnetidprovider->name;
$returnobject->metadata['mnetidproviderwwwroot'] = $mnetidprovider->wwwroot;
}
// Did the user just log in?
if (isset($SESSION->justloggedin)) {
// Don't unset this flag as login_info still needs it.
if (!empty($CFG->displayloginfailures)) {
// Don't reset the count either, as login_info() still needs it too.
if ($count = user_count_login_failures($user, false)) {
// Get login failures string.
$a = new stdClass();
$a->attempts = html_writer::tag('span', $count, array('class' => 'value'));
$returnobject->metadata['userloginfail'] =
get_string('failedloginattempts', '', $a);
}
}
}
// Links: Dashboard.
$myhome = new stdClass();
$myhome->itemtype = 'link';
$myhome->url = new moodle_url('/my/');
$myhome->title = get_string('mymoodle', 'admin');
$myhome->titleidentifier = 'mymoodle,admin';
$myhome->pix = "i/dashboard";
$returnobject->navitems[] = $myhome;
// Links: My Profile.
$myprofile = new stdClass();
$myprofile->itemtype = 'link';
$myprofile->url = new moodle_url('/user/profile.php', array('id' => $user->id));
$myprofile->title = get_string('profile');
$myprofile->titleidentifier = 'profile,moodle';
$myprofile->pix = "i/user";
$returnobject->navitems[] = $myprofile;
$returnobject->metadata['asotherrole'] = false;
// Before we add the last items (usually a logout + switch role link), add any
// custom-defined items.
$customitems = user_convert_text_to_menu_items($CFG->customusermenuitems, $page);
foreach ($customitems as $item) {
$returnobject->navitems[] = $item;
}
if ($returnobject->metadata['asotheruser'] = \core\session\manager::is_loggedinas()) {
$realuser = \core\session\manager::get_realuser();
// Save values for the real user, as $user will be full of data for the
// user the user is disguised as.
$returnobject->metadata['realuserid'] = $realuser->id;
$returnobject->metadata['realuserfullname'] = fullname($realuser, true);
$returnobject->metadata['realuserprofileurl'] = new moodle_url('/user/profile.php', array(
'id' => $realuser->id
));
$returnobject->metadata['realuseravatar'] = $OUTPUT->user_picture($realuser, $avataroptions);
// Build a user-revert link.
$userrevert = new stdClass();
$userrevert->itemtype = 'link';
$userrevert->url = new moodle_url('/course/loginas.php', array(
'id' => $course->id,
'sesskey' => sesskey()
));
$userrevert->pix = "a/logout";
$userrevert->title = get_string('logout');
$userrevert->titleidentifier = 'logout,moodle';
$returnobject->navitems[] = $userrevert;
} else {
// Build a logout link.
$logout = new stdClass();
$logout->itemtype = 'link';
$logout->url = new moodle_url('/login/logout.php', array('sesskey' => sesskey()));
$logout->pix = "a/logout";
$logout->title = get_string('logout');
$logout->titleidentifier = 'logout,moodle';
$returnobject->navitems[] = $logout;
}
if (is_role_switched($course->id)) {
if ($role = $DB->get_record('role', array('id' => $user->access['rsw'][$context->path]))) {
// Build role-return link instead of logout link.
$rolereturn = new stdClass();
$rolereturn->itemtype = 'link';
$rolereturn->url = new moodle_url('/course/switchrole.php', array(
'id' => $course->id,
'sesskey' => sesskey(),
'switchrole' => 0,
'returnurl' => $page->url->out_as_local_url(false)
));
$rolereturn->pix = "a/logout";
$rolereturn->title = get_string('switchrolereturn');
$rolereturn->titleidentifier = 'switchrolereturn,moodle';
$returnobject->navitems[] = $rolereturn;
$returnobject->metadata['asotherrole'] = true;
$returnobject->metadata['rolename'] = role_get_name($role, $context);
}
} else {
// Build switch role link.
$roles = get_switchable_roles($context);
if (is_array($roles) && (count($roles) > 0)) {
$switchrole = new stdClass();
$switchrole->itemtype = 'link';
$switchrole->url = new moodle_url('/course/switchrole.php', array(
'id' => $course->id,
'switchrole' => -1,
'returnurl' => $page->url->out_as_local_url(false)
));
$switchrole->pix = "i/switchrole";
$switchrole->title = get_string('switchroleto');
$switchrole->titleidentifier = 'switchroleto,moodle';
$returnobject->navitems[] = $switchrole;
}
}
return $returnobject;
}
/**
* Add password to the list of used hashes for this user.
*
* This is supposed to be used from:
* 1/ change own password form
* 2/ password reset process
* 3/ user signup in auth plugins if password changing supported
*
* @param int $userid user id
* @param string $password plaintext password
* @return void
*/
function user_add_password_history($userid, $password) {
global $CFG, $DB;
if (empty($CFG->passwordreuselimit) or $CFG->passwordreuselimit < 0) {
return;
}
// Note: this is using separate code form normal password hashing because
// we need to have this under control in the future. Also the auth
// plugin might not store the passwords locally at all.
$record = new stdClass();
$record->userid = $userid;
$record->hash = password_hash($password, PASSWORD_DEFAULT);
$record->timecreated = time();
$DB->insert_record('user_password_history', $record);
$i = 0;
$records = $DB->get_records('user_password_history', array('userid' => $userid), 'timecreated DESC, id DESC');
foreach ($records as $record) {
$i++;
if ($i > $CFG->passwordreuselimit) {
$DB->delete_records('user_password_history', array('id' => $record->id));
}
}
}
/**
* Was this password used before on change or reset password page?
*
* The $CFG->passwordreuselimit setting determines
* how many times different password needs to be used
* before allowing previously used password again.
*
* @param int $userid user id
* @param string $password plaintext password
* @return bool true if password reused
*/
function user_is_previously_used_password($userid, $password) {
global $CFG, $DB;
if (empty($CFG->passwordreuselimit) or $CFG->passwordreuselimit < 0) {
return false;
}
$reused = false;
$i = 0;
$records = $DB->get_records('user_password_history', array('userid' => $userid), 'timecreated DESC, id DESC');
foreach ($records as $record) {
$i++;
if ($i > $CFG->passwordreuselimit) {
$DB->delete_records('user_password_history', array('id' => $record->id));
continue;
}
// NOTE: this is slow but we cannot compare the hashes directly any more.
if (password_verify($password, $record->hash)) {
$reused = true;
}
}
return $reused;
}
/**
* Remove a user device from the Moodle database (for PUSH notifications usually).
*
* @param string $uuid The device UUID.
* @param string $appid The app id. If empty all the devices matching the UUID for the user will be removed.
* @return bool true if removed, false if the device didn't exists in the database
* @since Moodle 2.9
*/
function user_remove_user_device($uuid, $appid = "") {
global $DB, $USER;
$conditions = array('uuid' => $uuid, 'userid' => $USER->id);
if (!empty($appid)) {
$conditions['appid'] = $appid;
}
if (!$DB->count_records('user_devices', $conditions)) {
return false;
}
$DB->delete_records('user_devices', $conditions);
return true;
}
/**
* Trigger user_list_viewed event.
*
* @param stdClass $course course object
* @param stdClass $context course context object
* @since Moodle 2.9
*/
function user_list_view($course, $context) {
$event = \core\event\user_list_viewed::create(array(
'objectid' => $course->id,
'courseid' => $course->id,
'context' => $context,
'other' => array(
'courseshortname' => $course->shortname,
'coursefullname' => $course->fullname
)
));
$event->trigger();
}
/**
* Returns the url to use for the "Grades" link in the user navigation.
*
* @param int $userid The user's ID.
* @param int $courseid The course ID if available.
* @return mixed A URL to be directed to for "Grades".
*/
function user_mygrades_url($userid = null, $courseid = SITEID) {
global $CFG, $USER;
$url = null;
if (isset($CFG->grade_mygrades_report) && $CFG->grade_mygrades_report != 'external') {
if (isset($userid) && $USER->id != $userid) {
// Send to the gradebook report.
$url = new moodle_url('/grade/report/' . $CFG->grade_mygrades_report . '/index.php',
array('id' => $courseid, 'userid' => $userid));
} else {
$url = new moodle_url('/grade/report/' . $CFG->grade_mygrades_report . '/index.php');
}
} else if (isset($CFG->grade_mygrades_report) && $CFG->grade_mygrades_report == 'external'
&& !empty($CFG->gradereport_mygradeurl)) {
$url = $CFG->gradereport_mygradeurl;
} else {
$url = $CFG->wwwroot;
}
return $url;
}
/**
* Check if a user has the permission to viewdetails in a shared course's context.
*
* @param object $user The other user's details.
* @param object $course Use this course to see if we have permission to see this user's profile.
* @param context $usercontext The user context if available.
* @return bool true for ability to view this user, else false.
*/
function user_can_view_profile($user, $course = null, $usercontext = null) {
global $USER, $CFG;
if ($user->deleted) {
return false;
}
// Perform some quick checks and eventually return early.
// Number 1.
if (empty($CFG->forceloginforprofiles)) {
return true;
} else {
if (!isloggedin() || isguestuser()) {
// User is not logged in and forceloginforprofile is set, we need to return now.
return false;
}
}
// Number 2.
if ($USER->id == $user->id) {
return true;
}
if (empty($usercontext)) {
$usercontext = context_user::instance($user->id);
}
// Number 3.
if (has_capability('moodle/user:viewdetails', $usercontext) || has_capability('moodle/user:viewalldetails', $usercontext)) {
return true;
}
// Number 4.
if (has_coursecontact_role($user->id)) {
return true;
}
if (isset($course)) {
$sharedcourses = array($course);
} else {
$sharedcourses = enrol_get_shared_courses($USER->id, $user->id, true);
}
if (empty($sharedcourses)) {
return false;
}
foreach ($sharedcourses as $sharedcourse) {
$coursecontext = context_course::instance($sharedcourse->id);
if (has_capability('moodle/user:viewdetails', $coursecontext)) {
if (!groups_user_groups_visible($sharedcourse, $user->id)) {
// Not a member of the same group.
continue;
}
return true;
}
}
return false;
}
/**
* Returns users tagged with a specified tag.
*
* @param core_tag_tag $tag
* @param bool $exclusivemode if set to true it means that no other entities tagged with this tag
* are displayed on the page and the per-page limit may be bigger
* @param int $fromctx context id where the link was displayed, may be used by callbacks
* to display items in the same context first
* @param int $ctx context id where to search for records
* @param bool $rec search in subcontexts as well
* @param int $page 0-based number of page being displayed
* @return \core_tag\output\tagindex
*/
function user_get_tagged_users($tag, $exclusivemode = false, $fromctx = 0, $ctx = 0, $rec = 1, $page = 0) {
global $PAGE;
if ($ctx && $ctx != context_system::instance()->id) {
$usercount = 0;
} else {
// Users can only be displayed in system context.
$usercount = $tag->count_tagged_items('core', 'user',
'it.deleted=:notdeleted', array('notdeleted' => 0));
}
$perpage = $exclusivemode ? 24 : 5;
$content = '';
$totalpages = ceil($usercount / $perpage);
if ($usercount) {
$userlist = $tag->get_tagged_items('core', 'user', $page * $perpage, $perpage,
'it.deleted=:notdeleted', array('notdeleted' => 0));
$renderer = $PAGE->get_renderer('core', 'user');
$content .= $renderer->user_list($userlist, $exclusivemode);
}
return new core_tag\output\tagindex($tag, 'core', 'user', $content,
$exclusivemode, $fromctx, $ctx, $rec, $page, $totalpages);
}
/**
* Returns the SQL used by the participants table.
*
* @param int $courseid The course id
* @param int $groupid The groupid, 0 means all groups
* @param int $accesssince The time since last access, 0 means any time
* @param int $roleid The role id, 0 means all roles
* @param int $enrolid The enrolment id, 0 means all enrolment methods will be returned.
* @param int $statusid The user enrolment status, -1 means all enrolments regardless of the status will be returned, if allowed.
* @param string|array $search The search that was performed, empty means perform no search
* @param string $additionalwhere Any additional SQL to add to where
* @param array $additionalparams The additional params
* @return array
*/
function user_get_participants_sql($courseid, $groupid = 0, $accesssince = 0, $roleid = 0, $enrolid = 0, $statusid = -1,
$search = '', $additionalwhere = '', $additionalparams = array()) {
global $DB;
// Get the context.
$context = \context_course::instance($courseid, MUST_EXIST);
$isfrontpage = ($courseid == SITEID);
// Default filter settings. We only show active by default, especially if the user has no capability to review enrolments.
$onlyactive = true;
$onlysuspended = false;
if (has_capability('moodle/course:enrolreview', $context)) {
switch ($statusid) {
case ENROL_USER_ACTIVE:
// Nothing to do here.
break;
case ENROL_USER_SUSPENDED:
$onlyactive = false;
$onlysuspended = true;
break;
default:
// If the user has capability to review user enrolments, but statusid is set to -1, set $onlyactive to false.
$onlyactive = false;
break;
}
}
list($esql, $params) = get_enrolled_sql($context, null, $groupid, $onlyactive, $onlysuspended, $enrolid);
$joins = array('FROM {user} u');
$wheres = array();
$userfields = get_extra_user_fields($context, array('username', 'lang', 'timezone', 'maildisplay'));
$userfieldssql = user_picture::fields('u', $userfields);
if ($isfrontpage) {
$select = "SELECT $userfieldssql, u.lastaccess";
$joins[] = "JOIN ($esql) e ON e.id = u.id"; // Everybody on the frontpage usually.
if ($accesssince) {
$wheres[] = user_get_user_lastaccess_sql($accesssince);
}
} else {
$select = "SELECT $userfieldssql, COALESCE(ul.timeaccess, 0) AS lastaccess";
$joins[] = "JOIN ($esql) e ON e.id = u.id"; // Course enrolled users only.
// Not everybody has accessed the course yet.
$joins[] = 'LEFT JOIN {user_lastaccess} ul ON (ul.userid = u.id AND ul.courseid = :courseid)';
$params['courseid'] = $courseid;
if ($accesssince) {
$wheres[] = user_get_course_lastaccess_sql($accesssince);
}
}
// Performance hacks - we preload user contexts together with accounts.
$ccselect = ', ' . context_helper::get_preload_record_columns_sql('ctx');
$ccjoin = 'LEFT JOIN {context} ctx ON (ctx.instanceid = u.id AND ctx.contextlevel = :contextlevel)';
$params['contextlevel'] = CONTEXT_USER;
$select .= $ccselect;
$joins[] = $ccjoin;
// Limit list to users with some role only.
if ($roleid) {
// We want to query both the current context and parent contexts.
list($relatedctxsql, $relatedctxparams) = $DB->get_in_or_equal($context->get_parent_context_ids(true),
SQL_PARAMS_NAMED, 'relatedctx');
$wheres[] = "u.id IN (SELECT userid FROM {role_assignments} WHERE roleid = :roleid AND contextid $relatedctxsql)";
$params = array_merge($params, array('roleid' => $roleid), $relatedctxparams);
}
if (!empty($search)) {
if (!is_array($search)) {
$search = [$search];
}
foreach ($search as $index => $keyword) {
$searchkey1 = 'search' . $index . '1';
$searchkey2 = 'search' . $index . '2';
$searchkey3 = 'search' . $index . '3';
$fullname = $DB->sql_fullname('u.firstname', 'u.lastname');
$wheres[] = '(' . $DB->sql_like($fullname, ':' . $searchkey1, false, false) .
' OR ' . $DB->sql_like('email', ':' . $searchkey2, false, false) .
' OR ' . $DB->sql_like('idnumber', ':' . $searchkey3, false, false) . ') ';
$params[$searchkey1] = "%$keyword%";
$params[$searchkey2] = "%$keyword%";
$params[$searchkey3] = "%$keyword%";
}
}
if (!empty($additionalwhere)) {
$wheres[] = $additionalwhere;
$params = array_merge($params, $additionalparams);
}
$from = implode("\n", $joins);
if ($wheres) {
$where = 'WHERE ' . implode(' AND ', $wheres);
} else {
$where = '';
}
return array($select, $from, $where, $params);
}
/**
* Returns the total number of participants for a given course.
*
* @param int $courseid The course id
* @param int $groupid The groupid, 0 means all groups
* @param int $accesssince The time since last access, 0 means any time
* @param int $roleid The role id, 0 means all roles
* @param int $enrolid The applied filter for the user enrolment ID.
* @param int $status The applied filter for the user's enrolment status.
* @param string|array $search The search that was performed, empty means perform no search
* @param string $additionalwhere Any additional SQL to add to where
* @param array $additionalparams The additional params
* @return int
*/
function user_get_total_participants($courseid, $groupid = 0, $accesssince = 0, $roleid = 0, $enrolid = 0, $statusid = -1,
$search = '', $additionalwhere = '', $additionalparams = array()) {
global $DB;
list($select, $from, $where, $params) = user_get_participants_sql($courseid, $groupid, $accesssince, $roleid, $enrolid,
$statusid, $search, $additionalwhere, $additionalparams);
return $DB->count_records_sql("SELECT COUNT(u.id) $from $where", $params);
}
/**
* Returns the participants for a given course.
*
* @param int $courseid The course id
* @param int $groupid The group id
* @param int $accesssince The time since last access
* @param int $roleid The role id
* @param int $enrolid The applied filter for the user enrolment ID.
* @param int $status The applied filter for the user's enrolment status.
* @param string $search The search that was performed
* @param string $additionalwhere Any additional SQL to add to where
* @param array $additionalparams The additional params
* @param string $sort The SQL sort
* @param int $limitfrom return a subset of records, starting at this point (optional).
* @param int $limitnum return a subset comprising this many records (optional, required if $limitfrom is set).
* @return moodle_recordset
*/
function user_get_participants($courseid, $groupid = 0, $accesssince, $roleid, $enrolid = 0, $statusid, $search,
$additionalwhere = '', $additionalparams = array(), $sort = '', $limitfrom = 0, $limitnum = 0) {
global $DB;
list($select, $from, $where, $params) = user_get_participants_sql($courseid, $groupid, $accesssince, $roleid, $enrolid,
$statusid, $search, $additionalwhere, $additionalparams);
return $DB->get_recordset_sql("$select $from $where $sort", $params, $limitfrom, $limitnum);
}
/**
* Returns SQL that can be used to limit a query to a period where the user last accessed a course.
*
* @param int $accesssince The time since last access
* @param string $tableprefix
* @return string
*/
function user_get_course_lastaccess_sql($accesssince = null, $tableprefix = 'ul') {
if (empty($accesssince)) {
return '';
}
if ($accesssince == -1) { // Never.
return $tableprefix . '.timeaccess = 0';
} else {
return $tableprefix . '.timeaccess != 0 AND ul.timeaccess < ' . $accesssince;
}
}
/**
* Returns SQL that can be used to limit a query to a period where the user last accessed the system.
*
* @param int $accesssince The time since last access
* @param string $tableprefix
* @return string
*/
function user_get_user_lastaccess_sql($accesssince = null, $tableprefix = 'u') {
if (empty($accesssince)) {
return '';
}
if ($accesssince == -1) { // Never.
return $tableprefix . '.lastaccess = 0';
} else {
return $tableprefix . '.lastaccess != 0 AND u.lastaccess < ' . $accesssince;
}
}
/**
* Callback for inplace editable API.
*
* @param string $itemtype - Only user_roles is supported.
* @param string $itemid - Courseid and userid separated by a :
* @param string $newvalue - json encoded list of roleids.
* @return \core\output\inplace_editable
*/
function core_user_inplace_editable($itemtype, $itemid, $newvalue) {
if ($itemtype === 'user_roles') {
return \core_user\output\user_roles_editable::update($itemid, $newvalue);
}
}
| pramithkm/moodle | user/lib.php | PHP | gpl-3.0 | 54,890 |
var searchData=
[
['file',['File',['../class_gra_vito_n_1_1_utils_1_1_file.html',1,'GraVitoN::Utils']]]
];
| null--/graviton | doc/GraVitoN/html/search/classes_66.js | JavaScript | gpl-3.0 | 109 |
#include <AP_HAL/AP_HAL.h>
#if HAL_WITH_UAVCAN
#include "AP_RangeFinder_UAVCAN.h"
#include <AP_BoardConfig/AP_BoardConfig_CAN.h>
#include <AP_UAVCAN/AP_UAVCAN.h>
#include <uavcan/equipment/range_sensor/Measurement.hpp>
extern const AP_HAL::HAL& hal;
#define debug_range_finder_uavcan(level_debug, can_driver, fmt, args...) do { if ((level_debug) <= AP::can().get_debug_level_driver(can_driver)) { hal.console->printf(fmt, ##args); }} while (0)
//UAVCAN Frontend Registry Binder
UC_REGISTRY_BINDER(MeasurementCb, uavcan::equipment::range_sensor::Measurement);
/*
constructor - registers instance at top RangeFinder driver
*/
AP_RangeFinder_UAVCAN::AP_RangeFinder_UAVCAN(RangeFinder::RangeFinder_State &_state, AP_RangeFinder_Params &_params) :
AP_RangeFinder_Backend(_state, _params)
{}
//links the rangefinder uavcan message to this backend
void AP_RangeFinder_UAVCAN::subscribe_msgs(AP_UAVCAN* ap_uavcan)
{
if (ap_uavcan == nullptr) {
return;
}
auto* node = ap_uavcan->get_node();
uavcan::Subscriber<uavcan::equipment::range_sensor::Measurement, MeasurementCb> *measurement_listener;
measurement_listener = new uavcan::Subscriber<uavcan::equipment::range_sensor::Measurement, MeasurementCb>(*node);
// Register method to handle incoming RangeFinder measurement
const int measurement_listener_res = measurement_listener->start(MeasurementCb(ap_uavcan, &handle_measurement));
if (measurement_listener_res < 0) {
AP_HAL::panic("UAVCAN RangeFinder subscriber start problem\n\r");
return;
}
}
//Method to find the backend relating to the node id
AP_RangeFinder_UAVCAN* AP_RangeFinder_UAVCAN::get_uavcan_backend(AP_UAVCAN* ap_uavcan, uint8_t node_id, uint8_t address, bool create_new)
{
if (ap_uavcan == nullptr) {
return nullptr;
}
AP_RangeFinder_UAVCAN* driver = nullptr;
//Scan through the Rangefinder params to find UAVCAN RFND with matching address.
for (uint8_t i = 0; i < RANGEFINDER_MAX_INSTANCES; i++) {
if ((RangeFinder::Type)AP::rangefinder()->params[i].type.get() == RangeFinder::Type::UAVCAN &&
AP::rangefinder()->params[i].address == address) {
driver = (AP_RangeFinder_UAVCAN*)AP::rangefinder()->drivers[i];
}
//Double check if the driver was initialised as UAVCAN Type
if (driver != nullptr && (driver->_backend_type == RangeFinder::Type::UAVCAN)) {
if (driver->_ap_uavcan == ap_uavcan &&
driver->_node_id == node_id) {
return driver;
} else {
//we found a possible duplicate addressed sensor
//we return nothing in such scenario
return nullptr;
}
}
}
if (create_new) {
for (uint8_t i = 0; i < RANGEFINDER_MAX_INSTANCES; i++) {
if ((RangeFinder::Type)AP::rangefinder()->params[i].type.get() == RangeFinder::Type::UAVCAN &&
AP::rangefinder()->params[i].address == address) {
if (AP::rangefinder()->drivers[i] != nullptr) {
//we probably initialised this driver as something else, reboot is required for setting
//it up as UAVCAN type
return nullptr;
}
AP::rangefinder()->drivers[i] = new AP_RangeFinder_UAVCAN(AP::rangefinder()->state[i], AP::rangefinder()->params[i]);
driver = (AP_RangeFinder_UAVCAN*)AP::rangefinder()->drivers[i];
if (driver == nullptr) {
break;
}
AP::rangefinder()->num_instances = MAX(i+1, AP::rangefinder()->num_instances);
//Assign node id and respective uavcan driver, for identification
if (driver->_ap_uavcan == nullptr) {
driver->_ap_uavcan = ap_uavcan;
driver->_node_id = node_id;
break;
}
}
}
}
return driver;
}
//Called from frontend to update with the readings received by handler
void AP_RangeFinder_UAVCAN::update()
{
WITH_SEMAPHORE(_sem);
if ((AP_HAL::millis() - _last_reading_ms) > 500) {
//if data is older than 500ms, report NoData
set_status(RangeFinder::Status::NoData);
} else if (_status == RangeFinder::Status::Good && new_data) {
//copy over states
state.distance_cm = _distance_cm;
state.last_reading_ms = _last_reading_ms;
update_status();
new_data = false;
} else if (_status != RangeFinder::Status::Good) {
//handle additional states received by measurement handler
set_status(_status);
}
}
//RangeFinder message handler
void AP_RangeFinder_UAVCAN::handle_measurement(AP_UAVCAN* ap_uavcan, uint8_t node_id, const MeasurementCb &cb)
{
//fetch the matching uavcan driver, node id and sensor id backend instance
AP_RangeFinder_UAVCAN* driver = get_uavcan_backend(ap_uavcan, node_id, cb.msg->sensor_id, true);
if (driver == nullptr) {
return;
}
WITH_SEMAPHORE(driver->_sem);
switch (cb.msg->reading_type) {
case uavcan::equipment::range_sensor::Measurement::READING_TYPE_VALID_RANGE:
{
//update the states in backend instance
driver->_distance_cm = cb.msg->range*100.0f;
driver->_last_reading_ms = AP_HAL::millis();
driver->_status = RangeFinder::Status::Good;
driver->new_data = true;
break;
}
//Additional states supported by RFND message
case uavcan::equipment::range_sensor::Measurement::READING_TYPE_TOO_CLOSE:
{
driver->_last_reading_ms = AP_HAL::millis();
driver->_status = RangeFinder::Status::OutOfRangeLow;
break;
}
case uavcan::equipment::range_sensor::Measurement::READING_TYPE_TOO_FAR:
{
driver->_last_reading_ms = AP_HAL::millis();
driver->_status = RangeFinder::Status::OutOfRangeHigh;
break;
}
default:
{
break;
}
}
//copy over the sensor type of Rangefinder
switch (cb.msg->sensor_type) {
case uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_SONAR:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_ULTRASOUND;
break;
}
case uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_LIDAR:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_LASER;
break;
}
case uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_RADAR:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_RADAR;
break;
}
default:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_UNKNOWN;
break;
}
}
}
#endif // HAL_WITH_UAVCAN
| R-Lefebvre/ardupilot | libraries/AP_RangeFinder/AP_RangeFinder_UAVCAN.cpp | C++ | gpl-3.0 | 6,923 |
/**
* This file is part of Graylog.
*
* Graylog is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Graylog is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Graylog. If not, see <http://www.gnu.org/licenses/>.
*/
package org.graylog2.alerts;
import org.graylog2.plugin.alarms.AlertCondition;
import org.graylog2.plugin.database.ValidationException;
import org.graylog2.plugin.streams.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.Optional;
public class AlertScanner {
private static final Logger LOG = LoggerFactory.getLogger(AlertScanner.class);
private final AlertService alertService;
private final AlertNotificationsSender alertNotificationsSender;
@Inject
public AlertScanner(AlertService alertService, AlertNotificationsSender alertNotificationsSender) {
this.alertService = alertService;
this.alertNotificationsSender = alertNotificationsSender;
}
private Alert handleTriggeredAlert(AlertCondition.CheckResult result, Stream stream, AlertCondition alertCondition) throws ValidationException {
// Persist alert.
final Alert alert = alertService.factory(result);
alertService.save(alert);
alertNotificationsSender.send(result, stream, alert, alertCondition);
return alert;
}
private void handleRepeatedAlert(Stream stream, AlertCondition alertCondition, AlertCondition.CheckResult result, Alert alert2) {
alertNotificationsSender.send(result, stream, alert2, alertCondition);
}
private void handleResolveAlert(Alert alert) {
alertService.resolveAlert(alert);
// TODO: Send resolve notifications
}
public boolean checkAlertCondition(Stream stream, AlertCondition alertCondition) {
if (stream.isPaused() || alertService.inGracePeriod(alertCondition)) {
return false;
}
try {
final AlertCondition.CheckResult result = alertCondition.runCheck();
final Optional<Alert> alert = alertService.getLastTriggeredAlert(stream.getId(), alertCondition.getId());
if (result.isTriggered()) {
if (!alert.isPresent() || alertService.isResolved(alert.get())) {
// Alert is triggered for the first time
LOG.debug("Alert condition [{}] is triggered. Sending alerts.", alertCondition);
handleTriggeredAlert(result, stream, alertCondition);
} else {
final Alert triggeredAlert = alert.get();
// There is already an alert for this condition and is unresolved
if (alertService.shouldRepeatNotifications(alertCondition, triggeredAlert)) {
// Repeat notifications because user wants to do that
LOG.debug("Alert condition [{}] is triggered and configured to repeat alert notifications. Sending alerts.", alertCondition);
handleRepeatedAlert(stream, alertCondition, result, triggeredAlert);
} else {
LOG.debug("Alert condition [{}] is triggered but alerts were already sent. Nothing to do.", alertCondition);
}
}
return true;
} else {
// if stream and condition had already an alert, mark it as resolved
if (alert.isPresent() && !alertService.isResolved(alert.get())) {
LOG.debug("Alert condition [{}] is not triggered anymore. Resolving alert.", alertCondition);
handleResolveAlert(alert.get());
} else {
LOG.debug("Alert condition [{}] is not triggered and is marked as resolved. Nothing to do.", alertCondition);
}
}
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.error("Skipping alert check <{}/{}>", alertCondition.getTitle(), alertCondition.getId(), e);
} else {
LOG.error("Skipping alert check <{}/{}>: {} ({})", alertCondition.getTitle(),
alertCondition.getId(), e.getMessage(), e.getClass().getSimpleName());
}
}
return false;
}
}
| hellasmoon/graylog2-server | graylog2-server/src/main/java/org/graylog2/alerts/AlertScanner.java | Java | gpl-3.0 | 4,756 |
# coding: utf-8
from handlers import base
from common import functions
class IndexHandler(base.BaseHandler):
def get(self, *args, **kwargs):
self.render('index.html')
class InfoHandler(base.SocketHandler):
def on_message(self, message):
data = functions.jsonToObject(message)
if not data:
return None
if not data.get('target') or not isinstance(data['target'], basestring):
return self.write_message('done')
base.SocketHandler.status = True # 重置查询状态
findRes = self.db.targets.find_one({'target': data['target']})
if not findRes:
result = self._insertTarget(data['target'])
if not result:
return self.write_message('done')
findRes = {'plugins': []}
# 如果数据库中存在某些插件的记录就先输出, 再校验不存在记录的插件
for pluginName in findRes['plugins']:
tempObj = self.getPlugins.get(pluginName)
# 防止插件名变动后与数据库中的记录不统一,所以移除数据库中已发生变更的插件记录
if not tempObj:
self._removePlugin(data['target'], pluginName)
continue
self.write_message({
'title': tempObj.__title__,
'url': tempObj.__url__
})
# 计算差集,然后使用数据库中不存在记录的插件进行校验
diffList = list(set(self.getPlugins.keys()).difference(set(findRes['plugins'])))
if diffList:
map(lambda x: self.taskQueue.put(self.getPlugins[x]), diffList)
self.start(data['target'])
else:
self.write_message('done')
def _insertTarget(self, target):
insertRes = self.db.targets.insert_one({
'target': target,
'plugins': []
})
if insertRes.inserted_id:
return True
else:
return False
def _removePlugin(self, target, name):
updateRes = self.db.targets.update_one({
'target': target
}, {
'$pull': {
'plugins': name
}
})
# 因为mongodb < 2.6的版本没有modified_count,所以通过 raw_result里 n的值来判断是否更新成功
if not updateRes.raw_result.has_key('n'):
return False
if updateRes.raw_result['n']:
return True
else:
return False
| tonybreak/Registered | handlers/index.py | Python | gpl-3.0 | 2,512 |
<?php
/**
+-----------------------------------------------------------------------+
| This file is part of the Roundcube Webmail client |
| |
| Copyright (C) 2011-2013, Kolab Systems AG |
| Copyright (C) 2008-2013, The Roundcube Dev Team |
| |
| Licensed under the GNU General Public License version 3 or |
| any later version with exceptions for skins & plugins. |
| See the README file for a full license statement. |
| |
| PURPOSE: |
| Interface class for a spell-checking backend |
+-----------------------------------------------------------------------+
| Author: Thomas Bruederli <roundcube@gmail.com> |
+-----------------------------------------------------------------------+
*/
/**
* Interface class for a spell-checking backend
*
* @package Framework
* @subpackage Utils
*/
abstract class rcube_spellchecker_engine
{
const MAX_SUGGESTIONS = 10;
protected $lang;
protected $error;
protected $dictionary;
protected $separator = '/[\s\r\n\t\(\)\/\[\]{}<>\\"]+|[:;?!,\.](?=\W|$)/';
/**
* Default constructor
*/
public function __construct($dict, $lang)
{
$this->dictionary = $dict;
$this->lang = $lang;
}
/**
* Return a list of languages supported by this backend
*
* @return array Indexed list of language codes
*/
abstract function languages();
/**
* Set content and check spelling
*
* @param string $text Text content for spellchecking
*
* @return bool True when no mispelling found, otherwise false
*/
abstract function check($text);
/**
* Returns suggestions for the specified word
*
* @param string $word The word
*
* @return array Suggestions list
*/
abstract function get_suggestions($word);
/**
* Returns misspelled words
*
* @param string $text The content for spellchecking. If empty content
* used for check() method will be used.
*
* @return array List of misspelled words
*/
abstract function get_words($text = null);
/**
* Returns error message
*
* @return string Error message
*/
public function error()
{
return $this->error;
}
}
| raoulbhatia/roundcubemail | program/lib/Roundcube/spellchecker/engine.php | PHP | gpl-3.0 | 2,697 |
/**
* (C) Moorfields Eye Hospital NHS Foundation Trust, 2008-2011
* (C) OpenEyes Foundation, 2011-2014
* This file is part of OpenEyes.
*
* OpenEyes is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenEyes is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenEyes. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Represents a range of numerical values
*
* @class Range
* @property {Float} min Minimum value
* @property {Float} max Maximum value
* @param {Float} _min
* @param {Float} _max
*/
ED.Range = function(_min, _max) {
// Properties
this.min = _min;
this.max = _max;
}
/**
* Set min and max with one function call
*
* @param {Float} _min
* @param {Float} _max
*/
ED.Range.prototype.setMinAndMax = function(_min, _max) {
// Set properties
this.min = _min;
this.max = _max;
}
/**
* Returns true if the parameter is less than the minimum of the range
*
* @param {Float} _num
* @returns {Bool} True if the parameter is less than the minimum
*/
ED.Range.prototype.isBelow = function(_num) {
if (_num < this.min) {
return true;
} else {
return false;
}
}
/**
* Returns true if the parameter is more than the maximum of the range
*
* @param {Float} _num
* @returns {Bool} True if the parameter is more than the maximum
*/
ED.Range.prototype.isAbove = function(_num) {
if (_num > this.max) {
return true;
} else {
return false;
}
}
/**
* Returns true if the parameter is inclusively within the range
*
* @param {Float} _num
* @returns {Bool} True if the parameter is within the range
*/
ED.Range.prototype.includes = function(_num) {
if (_num < this.min || _num > this.max) {
return false;
} else {
return true;
}
}
/**
* Constrains a value to the limits of the range
*
* @param {Float} _num
* @param {Float} _scaleLevel The drawing scale level.
* @returns {Float} The constrained value
*/
ED.Range.prototype.constrain = function(_num, _scaleLevel) {
_scaleLevel = _scaleLevel === undefined ? 1 : _scaleLevel
var min = this.min * _scaleLevel;
var max = this.max * _scaleLevel;
if (_num < min) {
return min;
} else if (_num > max) {
return max;
} else {
return _num;
}
}
/**
* Returns true if the parameter is within the 'clockface' range represented by the min and max values
*
* @param {Float} _angle Angle to test
* @param {Bool} _isDegrees Flag indicating range is in degrees rather than radians
* @returns {Bool} True if the parameter is within the range
*/
ED.Range.prototype.includesInAngularRange = function(_angle, _isDegrees) {
// Arbitrary radius
var r = 100;
// Points representing vectos of angles within range
var min = new ED.Point(0, 0);
var max = new ED.Point(0, 0);
var angle = new ED.Point(0, 0);
// Set points using polar coordinates
if (!_isDegrees) {
min.setWithPolars(r, this.min);
max.setWithPolars(r, this.max);
angle.setWithPolars(r, _angle);
} else {
min.setWithPolars(r, this.min * Math.PI / 180);
max.setWithPolars(r, this.max * Math.PI / 180);
angle.setWithPolars(r, _angle * Math.PI / 180);
}
return (min.clockwiseAngleTo(angle) <= min.clockwiseAngleTo(max));
}
/**
* Constrains a value to the limits of the angular range
*
* @param {Float} _angle Angle to test
* @param {Bool} _isDegrees Flag indicating range is in degrees rather than radians
* @returns {Float} The constrained value
*/
ED.Range.prototype.constrainToAngularRange = function(_angle, _isDegrees) {
// No point in constraining unless range is less than 360 degrees!
if ((this.max - this.min) < (_isDegrees ? 360 : (2 * Math.PI))) {
// Arbitrary radius
var r = 100;
// Points representing vectors of angles within range
var min = new ED.Point(0, 0);
var max = new ED.Point(0, 0);
var angle = new ED.Point(0, 0);
// Set points using polar coordinates
if (!_isDegrees) {
min.setWithPolars(r, this.min);
max.setWithPolars(r, this.max);
angle.setWithPolars(r, _angle);
} else {
min.setWithPolars(r, this.min * Math.PI / 180);
max.setWithPolars(r, this.max * Math.PI / 180);
angle.setWithPolars(r, _angle * Math.PI / 180);
}
// Return appropriate value depending on relationship to range
if (min.clockwiseAngleTo(angle) <= min.clockwiseAngleTo(max)) {
return _angle;
} else {
if (angle.clockwiseAngleTo(min) < max.clockwiseAngleTo(angle)) {
return this.min;
} else {
return this.max;
}
}
} else {
return _angle;
}
} | openeyesarchive/eyedraw | src/ED/Drawing/Range.js | JavaScript | gpl-3.0 | 4,875 |
require 'package'
class Openjpeg < Package
description 'OpenJPEG is an open-source JPEG 2000 codec written in C language.'
homepage 'https://github.com/uclouvain/openjpeg/'
version '2.1.2'
source_url 'https://github.com/uclouvain/openjpeg/archive/v2.1.2.tar.gz'
source_sha256 '4ce77b6ef538ef090d9bde1d5eeff8b3069ab56c4906f083475517c2c023dfa7'
binary_url ({
aarch64: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-armv7l.tar.xz',
armv7l: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-armv7l.tar.xz',
i686: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-i686.tar.xz',
x86_64: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-x86_64.tar.xz',
})
binary_sha256 ({
aarch64: '5a757e5b3576e636c9b04def1784dab0d54fb2d2b397a8f41f96e973920b5dad',
armv7l: '5a757e5b3576e636c9b04def1784dab0d54fb2d2b397a8f41f96e973920b5dad',
i686: '023b8baa817e114c2fa97a5cc0a0e79728d3587c0fd8d385b13d1d5a0994470f',
x86_64: '218d4224019530780f6b739b4f28e3c3a29d04a0f471f49290961d3956d7d9aa',
})
depends_on 'cmake'
def self.build
system "cmake ."
system "make"
end
def self.install
system "make DESTDIR=#{CREW_DEST_DIR} install"
end
end
| jam7/chromebrew | packages/openjpeg.rb | Ruby | gpl-3.0 | 1,284 |
package pneumaticCraft.client.gui;
import java.awt.Point;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.client.resources.I18n;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.util.MathHelper;
import pneumaticCraft.client.gui.widget.GuiAnimatedStat;
import pneumaticCraft.client.gui.widget.GuiCheckBox;
import pneumaticCraft.client.gui.widget.WidgetTank;
import pneumaticCraft.client.gui.widget.WidgetTemperature;
import pneumaticCraft.common.inventory.ContainerPlasticMixer;
import pneumaticCraft.common.item.Itemss;
import pneumaticCraft.common.tileentity.TileEntityPlasticMixer;
import pneumaticCraft.lib.PneumaticValues;
import pneumaticCraft.lib.Textures;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class GuiPlasticMixer extends GuiPneumaticContainerBase<TileEntityPlasticMixer>{
private GuiButtonSpecial[] buttons;
private GuiCheckBox lockSelection;
public GuiPlasticMixer(InventoryPlayer player, TileEntityPlasticMixer te){
super(new ContainerPlasticMixer(player, te), te, Textures.GUI_PLASTIC_MIXER);
}
@Override
public void initGui(){
super.initGui();
addWidget(new WidgetTemperature(0, guiLeft + 55, guiTop + 25, 295, 500, te.getLogic(0)));
addWidget(new WidgetTemperature(1, guiLeft + 82, guiTop + 25, 295, 500, te.getLogic(1), PneumaticValues.PLASTIC_MIXER_MELTING_TEMP));
addWidget(new WidgetTank(3, guiLeft + 152, guiTop + 14, te.getFluidTank()));
GuiAnimatedStat stat = addAnimatedStat("gui.tab.plasticMixer.plasticSelection", new ItemStack(Itemss.plastic, 1, 1), 0xFF005500, false);
List<String> text = new ArrayList<String>();
for(int i = 0; i < 12; i++) {
text.add(" ");
}
stat.setTextWithoutCuttingString(text);
buttons = new GuiButtonSpecial[16];
for(int x = 0; x < 4; x++) {
for(int y = 0; y < 4; y++) {
int index = y * 4 + x;
ItemStack plastic = new ItemStack(Itemss.plastic, 1, index);
buttons[index] = new GuiButtonSpecial(index, x * 21 + 4, y * 21 + 30, 20, 20, "").setRenderStacks(plastic).setTooltipText(plastic.getDisplayName());
stat.addWidget(buttons[index]);
}
}
stat.addWidget(lockSelection = new GuiCheckBox(16, 4, 18, 0xFF000000, "gui.plasticMixer.lockSelection").setChecked(te.lockSelection).setTooltip(I18n.format("gui.plasticMixer.lockSelection.tooltip")));
}
@Override
public void updateScreen(){
super.updateScreen();
for(int i = 0; i < buttons.length; i++) {
buttons[i].enabled = te.selectedPlastic != i;
}
lockSelection.checked = te.lockSelection;
}
@Override
protected void drawGuiContainerForegroundLayer(int x, int y){
super.drawGuiContainerForegroundLayer(x, y);
fontRendererObj.drawString("Upgr.", 15, 19, 4210752);
fontRendererObj.drawString("Hull", 56, 16, 4210752);
fontRendererObj.drawString("Item", 88, 16, 4210752);
}
@Override
protected void drawGuiContainerBackgroundLayer(float partialTicks, int x, int y){
super.drawGuiContainerBackgroundLayer(partialTicks, x, y);
for(int i = 0; i < 3; i++) {
double percentage = (double)te.dyeBuffers[i] / TileEntityPlasticMixer.DYE_BUFFER_MAX;
drawVerticalLine(guiLeft + 123, guiTop + 37 + i * 18, guiTop + 37 - MathHelper.clamp_int((int)(percentage * 16), 1, 15) + i * 18, 0xFF000000 | 0xFF0000 >> 8 * i);
}
}
@Override
protected Point getInvNameOffset(){
return new Point(0, -1);
}
@Override
protected Point getInvTextOffset(){
return null;
}
@Override
protected void addProblems(List<String> curInfo){
super.addProblems(curInfo);
if(te.getFluidTank().getFluidAmount() == 0) {
if(te.getStackInSlot(4) == null) {
curInfo.add("gui.tab.problems.plasticMixer.noPlastic");
} else {
curInfo.add("gui.tab.problems.notEnoughHeat");
}
} else {
if(te.getStackInSlot(4) != null) {
if(te.getLogic(1).getTemperature() >= PneumaticValues.PLASTIC_MIXER_MELTING_TEMP && te.getFluidTank().getCapacity() - te.getFluidTank().getFluidAmount() < 1000) {
curInfo.add("gui.tab.problems.plasticMixer.plasticLiquidOverflow");
}
}
}
if(te.getStackInSlot(TileEntityPlasticMixer.INV_DYE_RED) == null) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noDye", new ItemStack(Items.dye, 1, 1).getDisplayName()));
}
if(te.getStackInSlot(TileEntityPlasticMixer.INV_DYE_GREEN) == null) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noDye", new ItemStack(Items.dye, 1, 2).getDisplayName()));
}
if(te.getStackInSlot(TileEntityPlasticMixer.INV_DYE_BLUE) == null) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noDye", new ItemStack(Items.dye, 1, 4).getDisplayName()));
}
if(curInfo.size() == 0) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noProblems"));
}
}
}
| islanderz/pneumaticcraft | src/pneumaticCraft/client/gui/GuiPlasticMixer.java | Java | gpl-3.0 | 5,420 |
#ifndef _TUTTLE_PLUGIN_NORMALIZE_DEFINITIONS_HPP_
#define _TUTTLE_PLUGIN_NORMALIZE_DEFINITIONS_HPP_
#include <tuttle/plugin/global.hpp>
namespace tuttle {
namespace plugin {
namespace normalize {
static const std::string kParamHelpButton = "Help";
static const std::string kParamMode = "mode";
static const std::string kParamModeAnalyse = "analyse";
static const std::string kParamModeCustom = "custom";
enum EParamMode
{
eParamModeAnalyse = 0,
eParamModeCustom
};
static const std::string kParamAnalyseNow = "analyseNow";
static const std::string kParamAnalyseMode = "analyseMode";
static const std::string kParamAnalysePerChannel = "perChannel";
static const std::string kParamAnalyseLuminosity = "luminosity";
static const std::string kParamAnalyseR = "r";
static const std::string kParamAnalyseG = "g";
static const std::string kParamAnalyseB = "b";
static const std::string kParamAnalyseA = "a";
enum EParamAnalyseMode
{
eParamAnalyseModePerChannel = 0,
eParamAnalyseModeLuminosity,
eParamAnalyseModeR,
eParamAnalyseModeG,
eParamAnalyseModeB,
eParamAnalyseModeA
};
static const std::string kParamSrcGroup = "srcGroup";
static const std::string kParamSrcCustomColorMin = "srcColorMin";
static const std::string kParamSrcCustomColorMax = "srcColorMax";
static const std::string kParamSrcCustomValueMin = "srcValueMin";
static const std::string kParamSrcCustomValueMax = "srcValueMax";
static const std::string kParamDstGroup = "dstGroup";
static const std::string kParamDstCustomColorMin = "dstColorMin";
static const std::string kParamDstCustomColorMax = "dstColorMax";
static const std::string kParamDstCustomValueMin = "dstValueMin";
static const std::string kParamDstCustomValueMax = "dstValueMax";
static const std::string kParamProcessGroup = "processGroup";
static const std::string kParamProcessR = "processR";
static const std::string kParamProcessG = "processG";
static const std::string kParamProcessB = "processB";
static const std::string kParamProcessA = "processA";
}
}
}
#endif
| cchampet/TuttleOFX | plugins/image/process/color/Normalize/src/NormalizeDefinitions.hpp | C++ | gpl-3.0 | 2,016 |
/*
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
Copyright (c) 2014 Mozilla Corporation
Contributors:
Jeff Bryner jbryner@mozilla.com
Anthony Verez averez@mozilla.com
Yash Mehrotra yashmehrotra95@gmail.com
*/
//collections shared by client/server
events = new Meteor.Collection("events");
alerts = new Meteor.Collection("alerts");
investigations = new Meteor.Collection("investigations");
incidents = new Meteor.Collection("incidents");
veris = new Meteor.Collection("veris");
kibanadashboards = new Meteor.Collection("kibanadashboards");
mozdefsettings = new Meteor.Collection("mozdefsettings");
healthfrontend = new Meteor.Collection("healthfrontend");
healthescluster = new Meteor.Collection("healthescluster");
healthesnodes = new Meteor.Collection("healthesnodes");
healtheshotthreads = new Meteor.Collection("healtheshotthreads");
attackers = new Meteor.Collection("attackers");
actions = new Meteor.Collection("actions");
userActivity = new Meteor.Collection("userActivity");
if (Meteor.isServer) {
//Publishing setups
Meteor.publish("mozdefsettings",function(){
return mozdefsettings.find();
});
Meteor.publish("alerts-summary", function (searchregex,timeperiod,recordlimit) {
//tail the last 100 records by default
//default parameters
timeperiod = typeof timeperiod !== 'undefined' ? timeperiod: 'tail';
searchregex = typeof searchregex !== 'undefined' ? searchregex: '';
recordlimit = ['number'].indexOf(typeof(recordlimit)) ? 100:recordlimit;
//sanity check the record limit
if ( recordlimit >10000 || recordlimit < 1){
recordlimit = 100;
}
if ( timeperiod ==='tail' || timeperiod == 'none' ){
return alerts.find(
{summary: {$regex:searchregex}},
{fields:{
_id:1,
esmetadata:1,
utctimestamp:1,
utcepoch:1,
summary:1,
severity:1,
category:1,
acknowledged:1,
acknowledgedby:1,
url:1
},
sort: {utcepoch: -1},
limit:recordlimit}
);
} else {
//determine the utcepoch range
beginningtime=moment().utc();
//expect timeperiod like '1 days'
timevalue=Number(timeperiod.split(" ")[0]);
timeunits=timeperiod.split(" ")[1];
beginningtime.subtract(timevalue,timeunits);
return alerts.find(
{summary: {$regex:searchregex},
utcepoch: {$gte: beginningtime.unix()}},
{fields:{
_id:1,
esmetadata:1,
utctimestamp:1,
utcepoch:1,
summary:1,
severity:1,
category:1,
acknowledged:1
},
sort: {utcepoch: -1},
limit:recordlimit}
);
}
});
Meteor.publish("alerts-details",function(alertid,includeEvents){
//return alerts.find({'esmetadata.id': alertid});
//alert ids can be either mongo or elastic search IDs
//look for both to publish to the collection.
//default parameters
includeEvents = typeof includeEvents !== 'undefined' ? includeEvents: true;
if ( includeEvents ){
return alerts.find({
$or:[
{'esmetadata.id': alertid},
{'_id': alertid},
]
});
}else{
return alerts.find({
$or:[
{'esmetadata.id': alertid},
{'_id': alertid},
]
},
{fields:{events:0},
});
}
});
Meteor.publish("alerts-count", function () {
var self = this;
var count = 0;
var initializing = true;
var recordID=Meteor.uuid();
//get a count by watching for only 1 new entry sorted in reverse date order.
//use that hook to return a find().count rather than iterating the entire result set over and over
var handle = alerts.find({}, {sort: {utcepoch: -1},limit:1}).observeChanges({
added: function (newDoc,oldDoc) {
count=alerts.find().count();
if (!initializing) {
self.changed("alerts-count", recordID,{count: count});
//console.log('added alerts count to' + count);
}
},
changed: function (newDoc,oldDoc) {
count=alerts.find().count();
if (!initializing) {
self.changed("alerts-count", recordID,{count: count});
//console.log('changed alerts count to' + count);
}
},
removed: function (newDoc,oldDoc) {
count=alerts.find().count();
if (!initializing) {
self.changed("alerts-count", recordID,{count: count});
//console.log('changed alerts count to' + count);
}
}
});
initializing = false;
self.added("alerts-count", recordID,{count: count});
//console.log('count is ready: ' + count);
self.ready();
// Stop observing the cursor when client unsubs.
// Stopping a subscription automatically takes
// care of sending the client any removed messages.
self.onStop(function () {
//console.log('stopped publishing alerts count.')
handle.stop();
});
});
//publish the last X event/alerts
//using document index instead of date
// Meteor.publish("attacker-details",function(attackerid){
// return attackers.find({'_id': attackerid},
// {fields: {
// events:{$slice: 20,
// $sort: { documentindex: -1 }},
// alerts:{$slice: -10}
// }}
// );
// });
Meteor.publish("attacker-details",function(attackerid){
return attackers.find({'_id': attackerid},
{fields: {
events:{$slice: -20},
alerts:{$slice: -10}
},
sort: { 'events.documentsource.utctimestamp': -1 },
reactive:false
}
);
});
Meteor.publish("attackers-summary", function () {
//limit to the last 100 records by default
//to ease the sync transfer to dc.js/crossfilter
return attackers.find({},
{fields:{
events:0,
alerts:0,
},
sort: {lastseentimestamp: -1},
limit:100});
});
Meteor.publish("attackers-summary-landmass", function () {
//limit to the last 100 records by default
//to ease the sync transfer to dc.js/crossfilter
var inModifier = { $in: ["broxss", "brotunnel", "brosqli"]};
return attackers.find({"events.documentsource.category": inModifier},
{sort: {lastseentimestamp: -1},
limit: 100});
});
Meteor.publish("investigations-summary", function () {
return investigations.find({},
{fields: {
_id:1,
summary:1,
phase:1,
dateOpened:1,
dateClosed:1,
creator:1
},
sort: {dateOpened: -1},
limit:100});
});
Meteor.publish("investigation-details",function(investigationid){
return investigations.find({'_id': investigationid});
});
Meteor.publish("incidents-summary", function () {
return incidents.find({},
{fields: {
_id:1,
summary:1,
phase:1,
dateOpened:1,
dateClosed:1,
creator:1
},
sort: {dateOpened: -1},
limit:100});
});
Meteor.publish("incident-details",function(incidentid){
return incidents.find({'_id': incidentid});
});
Meteor.publish("veris", function () {
return veris.find({}, {limit:0});
});
Meteor.publish("healthfrontend", function () {
return healthfrontend.find({}, {limit:0});
});
Meteor.publish("healthescluster", function () {
return healthescluster.find({}, {limit:0});
});
Meteor.publish("healthesnodes", function () {
return healthesnodes.find({}, {limit:0});
});
Meteor.publish("healtheshotthreads", function () {
return healtheshotthreads.find({}, {limit:0});
});
Meteor.publish("kibanadashboards", function () {
return kibanadashboards.find({},{sort:{name:1}, limit:30});
});
Meteor.publish("userActivity", function () {
return userActivity.find({},{sort:{userID:1}, limit:100});
});
//access rules from clients
//barebones to allow you to specify rules
//currently incidents collection is the only one updated by clients
//for speed of access
//the only rule is that the incident creator is the only one who can delete an incident.
incidents.allow({
insert: function (userId, doc) {
// the user must be logged in
return (userId);
},
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
},
remove: function (userId, doc) {
// can only remove one's own indicents
return doc.creator === Meteor.user().profile.email;
},
fetch: ['creator']
});
attackers.allow({
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
}
});
alerts.allow({
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
}
});
investigations.allow({
insert: function (userId, doc) {
// the user must be logged in
return (userId);
},
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
},
remove: function (userId, doc) {
// can only remove one's own items
return doc.creator === Meteor.user().profile.email;
},
fetch: ['creator']
});
userActivity.allow({
insert: function (userId, doc) {
// the user must be logged in
return (userId);
},
remove: function (userId, doc) {
// can only remove one's own items
return doc.userId === Meteor.user().profile.email;
},
});
};
if (Meteor.isClient) {
//client side collections:
alertsCount = new Meteor.Collection("alerts-count");
//client-side subscriptions
Meteor.subscribe("mozdefsettings");
Meteor.subscribe("veris");
Meteor.subscribe("kibanadashboards");
Meteor.subscribe("userActivity");
};
| ameihm0912/MozDef | meteor/app/lib/collections.js | JavaScript | mpl-2.0 | 12,418 |
//////////////////////////////////////////////////////////////////////////////////
// //
// This Source Code Form is subject to the terms of the Mozilla Public //
// License, v. 2.0. If a copy of the MPL was not distributed with this //
// file, You can obtain one at http://mozilla.org/MPL/2.0/. //
// //
//////////////////////////////////////////////////////////////////////////////////
package com.github.tiwindetea.raoulthegame.model.livings;
import com.github.tiwindetea.raoulthegame.model.space.Vector2i;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.lang.ref.WeakReference;
import java.util.Collection;
import java.util.List;
/**
* Created by organic-code on 7/3/16.
*/
public abstract class Pet extends LivingThing {
protected WeakReference<LivingThing> owner;
protected LivingThing target;
public Pet(LivingThing owner,
String name,
int level,
double maxHitPoints,
double attackPower,
double defensePower,
@Nonnull Vector2i position) {
super();
this.name = name;
this.level = level;
this.maxHitPoints = maxHitPoints;
this.hitPoints = 0;
this.attackPower = attackPower;
this.defensePower = defensePower;
this.position = position.copy();
this.owner = new WeakReference<>(owner);
}
protected LivingThing getOwner() {
return this.owner.get();
}
@Override
public final LivingThingType getType() {
return LivingThingType.PET;
}
/**
* This function should be called when you consider this should be upgraded
*/
public abstract void levelUp();
/**
* Handler. This function should be called each time the pet's owner is attacked
*
* @param source the source of the damages
*/
public abstract void ownerDamaged(@Nullable LivingThing source);
/**
* Handler. This function should be called time the pet's owner is attacking
*
* @param target the damages' target
*/
public abstract void ownerAttacking(@Nonnull LivingThing target);
@Override
public void live(List<Mob> mobs, Collection<Player> players, Collection<LivingThing> others, boolean[][] los) {
this.live(mobs, players, null);
}
public abstract void live(List<Mob> mobs, Collection<Player> players, Collection<LivingThing> all);
}
| TiWinDeTea/Raoul-the-Game | src/main/java/com/github/tiwindetea/raoulthegame/model/livings/Pet.java | Java | mpl-2.0 | 2,638 |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.background.bagheera;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import ch.boye.httpclientandroidlib.entity.AbstractHttpEntity;
import ch.boye.httpclientandroidlib.entity.ByteArrayEntity;
/**
* An entity that acts like {@link ByteArrayEntity}, but exposes a window onto
* the byte array that is a subsection of the array. The purpose of this is to
* allow a smaller entity to be created without having to resize the source
* array.
*/
public class BoundedByteArrayEntity extends AbstractHttpEntity implements
Cloneable {
protected final byte[] content;
protected final int start;
protected final int end;
protected final int length;
/**
* Create a new entity that behaves exactly like a {@link ByteArrayEntity}
* created with a copy of <code>b</code> truncated to (
* <code>end - start</code>) bytes, starting at <code>start</code>.
*
* @param b the byte array to use.
* @param start the start index.
* @param end the end index.
*/
public BoundedByteArrayEntity(final byte[] b, final int start, final int end) {
if (b == null) {
throw new IllegalArgumentException("Source byte array may not be null.");
}
if (end < start ||
start < 0 ||
end < 0 ||
start > b.length ||
end > b.length) {
throw new IllegalArgumentException("Bounds out of range.");
}
this.content = b;
this.start = start;
this.end = end;
this.length = end - start;
}
@Override
public boolean isRepeatable() {
return true;
}
@Override
public long getContentLength() {
return this.length;
}
@Override
public InputStream getContent() {
return new ByteArrayInputStream(this.content, this.start, this.length);
}
@Override
public void writeTo(final OutputStream outstream) throws IOException {
if (outstream == null) {
throw new IllegalArgumentException("Output stream may not be null.");
}
outstream.write(this.content);
outstream.flush();
}
@Override
public boolean isStreaming() {
return false;
}
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
} | mkodekar/Fennece-Browser | base/background/bagheera/BoundedByteArrayEntity.java | Java | mpl-2.0 | 2,490 |
////////////////////////////////////////////////////////////////////////////////
///
/// General FIR digital filter routines with MMX optimization.
///
/// Note : MMX optimized functions reside in a separate, platform-specific file,
/// e.g. 'mmx_win.cpp' or 'mmx_gcc.cpp'
///
/// Author : Copyright (c) Olli Parviainen
/// Author e-mail : oparviai 'at' iki.fi
/// SoundTouch WWW: http://www.surina.net/soundtouch
///
////////////////////////////////////////////////////////////////////////////////
//
// Last changed : $Date: 2015-02-21 21:24:29 +0000 (Sat, 21 Feb 2015) $
// File revision : $Revision: 4 $
//
// $Id: FIRFilter.cpp 202 2015-02-21 21:24:29Z oparviai $
//
////////////////////////////////////////////////////////////////////////////////
//
// License :
//
// SoundTouch audio processing library
// Copyright (c) Olli Parviainen
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
////////////////////////////////////////////////////////////////////////////////
#include <memory.h>
#include <assert.h>
#include <math.h>
#include <stdlib.h>
#include "FIRFilter.h"
#include "cpu_detect.h"
using namespace soundtouch;
/*****************************************************************************
*
* Implementation of the class 'FIRFilter'
*
*****************************************************************************/
FIRFilter::FIRFilter()
{
resultDivFactor = 0;
resultDivider = 0;
length = 0;
lengthDiv8 = 0;
filterCoeffs = NULL;
}
FIRFilter::~FIRFilter()
{
delete[] filterCoeffs;
}
// Usual C-version of the filter routine for stereo sound
uint FIRFilter::evaluateFilterStereo(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples) const
{
int j, end;
#ifdef SOUNDTOUCH_FLOAT_SAMPLES
// when using floating point samples, use a scaler instead of a divider
// because division is much slower operation than multiplying.
double dScaler = 1.0 / (double)resultDivider;
#endif
assert(length != 0);
assert(src != NULL);
assert(dest != NULL);
assert(filterCoeffs != NULL);
end = 2 * (numSamples - length);
#pragma omp parallel for
for (j = 0; j < end; j += 2)
{
const SAMPLETYPE *ptr;
LONG_SAMPLETYPE suml, sumr;
uint i;
suml = sumr = 0;
ptr = src + j;
for (i = 0; i < length; i += 4)
{
// loop is unrolled by factor of 4 here for efficiency
suml += ptr[2 * i + 0] * filterCoeffs[i + 0] +
ptr[2 * i + 2] * filterCoeffs[i + 1] +
ptr[2 * i + 4] * filterCoeffs[i + 2] +
ptr[2 * i + 6] * filterCoeffs[i + 3];
sumr += ptr[2 * i + 1] * filterCoeffs[i + 0] +
ptr[2 * i + 3] * filterCoeffs[i + 1] +
ptr[2 * i + 5] * filterCoeffs[i + 2] +
ptr[2 * i + 7] * filterCoeffs[i + 3];
}
#ifdef SOUNDTOUCH_INTEGER_SAMPLES
suml >>= resultDivFactor;
sumr >>= resultDivFactor;
// saturate to 16 bit integer limits
suml = (suml < -32768) ? -32768 : (suml > 32767) ? 32767 : suml;
// saturate to 16 bit integer limits
sumr = (sumr < -32768) ? -32768 : (sumr > 32767) ? 32767 : sumr;
#else
suml *= dScaler;
sumr *= dScaler;
#endif // SOUNDTOUCH_INTEGER_SAMPLES
dest[j] = (SAMPLETYPE)suml;
dest[j + 1] = (SAMPLETYPE)sumr;
}
return numSamples - length;
}
// Usual C-version of the filter routine for mono sound
uint FIRFilter::evaluateFilterMono(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples) const
{
int j, end;
#ifdef SOUNDTOUCH_FLOAT_SAMPLES
// when using floating point samples, use a scaler instead of a divider
// because division is much slower operation than multiplying.
double dScaler = 1.0 / (double)resultDivider;
#endif
assert(length != 0);
end = numSamples - length;
#pragma omp parallel for
for (j = 0; j < end; j ++)
{
const SAMPLETYPE *pSrc = src + j;
LONG_SAMPLETYPE sum;
uint i;
sum = 0;
for (i = 0; i < length; i += 4)
{
// loop is unrolled by factor of 4 here for efficiency
sum += pSrc[i + 0] * filterCoeffs[i + 0] +
pSrc[i + 1] * filterCoeffs[i + 1] +
pSrc[i + 2] * filterCoeffs[i + 2] +
pSrc[i + 3] * filterCoeffs[i + 3];
}
#ifdef SOUNDTOUCH_INTEGER_SAMPLES
sum >>= resultDivFactor;
// saturate to 16 bit integer limits
sum = (sum < -32768) ? -32768 : (sum > 32767) ? 32767 : sum;
#else
sum *= dScaler;
#endif // SOUNDTOUCH_INTEGER_SAMPLES
dest[j] = (SAMPLETYPE)sum;
}
return end;
}
uint FIRFilter::evaluateFilterMulti(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples, uint numChannels)
{
int j, end;
#ifdef SOUNDTOUCH_FLOAT_SAMPLES
// when using floating point samples, use a scaler instead of a divider
// because division is much slower operation than multiplying.
double dScaler = 1.0 / (double)resultDivider;
#endif
assert(length != 0);
assert(src != NULL);
assert(dest != NULL);
assert(filterCoeffs != NULL);
assert(numChannels < 16);
end = numChannels * (numSamples - length);
#pragma omp parallel for
for (j = 0; j < end; j += numChannels)
{
const SAMPLETYPE *ptr;
LONG_SAMPLETYPE sums[16];
uint c, i;
for (c = 0; c < numChannels; c ++)
{
sums[c] = 0;
}
ptr = src + j;
for (i = 0; i < length; i ++)
{
SAMPLETYPE coef=filterCoeffs[i];
for (c = 0; c < numChannels; c ++)
{
sums[c] += ptr[0] * coef;
ptr ++;
}
}
for (c = 0; c < numChannels; c ++)
{
#ifdef SOUNDTOUCH_INTEGER_SAMPLES
sums[c] >>= resultDivFactor;
#else
sums[c] *= dScaler;
#endif // SOUNDTOUCH_INTEGER_SAMPLES
dest[j+c] = (SAMPLETYPE)sums[c];
}
}
return numSamples - length;
}
// Set filter coeffiecients and length.
//
// Throws an exception if filter length isn't divisible by 8
void FIRFilter::setCoefficients(const SAMPLETYPE *coeffs, uint newLength, uint uResultDivFactor)
{
assert(newLength > 0);
if (newLength % 8) ST_THROW_RT_ERROR("FIR filter length not divisible by 8");
lengthDiv8 = newLength / 8;
length = lengthDiv8 * 8;
assert(length == newLength);
resultDivFactor = uResultDivFactor;
resultDivider = (SAMPLETYPE)::pow(2.0, (int)resultDivFactor);
delete[] filterCoeffs;
filterCoeffs = new SAMPLETYPE[length];
memcpy(filterCoeffs, coeffs, length * sizeof(SAMPLETYPE));
}
uint FIRFilter::getLength() const
{
return length;
}
// Applies the filter to the given sequence of samples.
//
// Note : The amount of outputted samples is by value of 'filter_length'
// smaller than the amount of input samples.
uint FIRFilter::evaluate(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples, uint numChannels)
{
assert(length > 0);
assert(lengthDiv8 * 8 == length);
if (numSamples < length) return 0;
#ifndef USE_MULTICH_ALWAYS
if (numChannels == 1)
{
return evaluateFilterMono(dest, src, numSamples);
}
else if (numChannels == 2)
{
return evaluateFilterStereo(dest, src, numSamples);
}
else
#endif // USE_MULTICH_ALWAYS
{
assert(numChannels > 0);
return evaluateFilterMulti(dest, src, numSamples, numChannels);
}
}
// Operator 'new' is overloaded so that it automatically creates a suitable instance
// depending on if we've a MMX-capable CPU available or not.
void * FIRFilter::operator new(size_t s)
{
// Notice! don't use "new FIRFilter" directly, use "newInstance" to create a new instance instead!
ST_THROW_RT_ERROR("Error in FIRFilter::new: Don't use 'new FIRFilter', use 'newInstance' member instead!");
return newInstance();
}
FIRFilter * FIRFilter::newInstance()
{
#if defined(SOUNDTOUCH_ALLOW_MMX) || defined(SOUNDTOUCH_ALLOW_SSE)
uint uExtensions;
uExtensions = detectCPUextensions();
#endif
// Check if MMX/SSE instruction set extensions supported by CPU
#ifdef SOUNDTOUCH_ALLOW_MMX
// MMX routines available only with integer sample types
if (uExtensions & SUPPORT_MMX)
{
return ::new FIRFilterMMX;
}
else
#endif // SOUNDTOUCH_ALLOW_MMX
#ifdef SOUNDTOUCH_ALLOW_SSE
if (uExtensions & SUPPORT_SSE)
{
// SSE support
return ::new FIRFilterSSE;
}
else
#endif // SOUNDTOUCH_ALLOW_SSE
{
// ISA optimizations not supported, use plain C version
return ::new FIRFilter;
}
}
| Yukarumya/Yukarum-Redfoxes | media/libsoundtouch/src/FIRFilter.cpp | C++ | mpl-2.0 | 9,499 |
package org.openmrs.module.appframework.test;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.SelfDescribing;
import org.openmrs.util.OpenmrsUtil;
import java.util.Map;
public class Matchers {
/**
* @param key
* @param expected a Matcher (or an Object, which will be tested with OpenmrsUtil.nullSafeEquals)
* @return a matcher that matches a Map entry (for key) matching expected
*/
public static Matcher<? super Map<String, ?>> hasEntry(final String key, final Object expected) {
return new BaseMatcher<Map<String, ?>>() {
@Override
public boolean matches(Object o) {
Object actual = ((Map) o).get(key);
if (expected instanceof Matcher) {
return ((Matcher) expected).matches(actual);
}
else {
return OpenmrsUtil.nullSafeEquals(actual, expected);
}
}
@Override
public void describeTo(Description description) {
description.appendText("map entry " + key + " should ");
if (expected instanceof Matcher) {
description.appendDescriptionOf((SelfDescribing) expected);
}
else {
description.appendText("equal " + expected);
}
}
};
}
}
| openmrs-gci/openmrs-module-appframework | omod/src/test/java/org/openmrs/module/appframework/test/Matchers.java | Java | mpl-2.0 | 1,461 |
var FileBrowserComponent = BaseComponent.extend({
update: function(){
var myself = this,
$ph = $("#"+this.htmlObject),
root = this.rootFolder.charAt(this.rootFolder.length - 1) == "/" ? this.rootFolder : this.rootFolder+"/",
$content;
if (!this.fileExtensions)
this.fileExtensions = "";
$ph.addClass('fileBrowserComponent');
if(this.chartDefinition.height != undefined){
$ph.css('height',this.chartDefinition.height+'px');
}
if(this.chartDefinition.width != undefined){
$ph.css('width',this.chartDefinition.width+'px');
}
$ph.css('overflow','auto');
$ph.fileTree(
{
root: root,
script: myself.buildTreeURL(),
expandSpeed: 1,
collapseSpeed: 1,
multiFolder: true,
htmlTreeModifier: function(content){
return myself.modifyTree(content);
}
},
function(){});
},
getValue: function() {
},
buildTreeURL: function(){
return Endpoints.getListFiles() + "?fileExtensions=" + this.fileExtensions;
},
buildGetURL: function(rel){
return Endpoints.getFile() + "?fileName=" + rel;
},
modifyTree: function(content){
var myself = this;
var $content = content;
if(!$content.hasClass('directory'))
$content.find('ul').addClass("treeview filetree");
$content.find('li:last').addClass("last");
$.each($content.find('li.directory'),function(){
//get rel from a
var rel = $(this).find('a').attr('rel');
$("<div/>").addClass("hitarea expandable-hitarea").attr('rel',rel).prependTo($(this));
});
$.each($content.find('li.directory a'), function(){
$(this).addClass('folder');
});
$.each($content.find('li.file'), function(){
$("<div/>").addClass("file").prependTo($(this));
});
$.each($content.find('li.file a'), function(){
var rel = $(this).attr('rel');
//$(this).attr({target: '_blank', href : myself.buildGetURL(rel)});
$(this).click(function(){
window.location.href = myself.buildGetURL(rel);
});
});
return $content;
},
downloadDataURI :function(options) {
if(!options) {
return;
}
$.isPlainObject(options) || (options = {data: options});
if(!$.browser.webkit) {
location.href = options.data;
}
options.filename || (options.filename = "download." + options.data.split(",")[0].split(";")[0].substring(5).split("/")[1]);
$('<form method="post" action="'+options.url+'" style="display:none"><input type="hidden" name="filename" value="'+options.filename+'"/><input type="hidden" name="data" value="'+options.data+'"/></form>').submit().remove();
}
});
| jvelasques/cfr | cfr-core/resources/cdeComponents/FileBrowserComponent/FileBrowserComponent.js | JavaScript | mpl-2.0 | 2,715 |
/*
MusicXML Library
Copyright (C) Grame 2006-2013
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
Grame Research Laboratory, 11, cours de Verdun Gensoul 69002 Lyon - France
research@grame.fr
*/
#include "utilities.h"
#include "messagesHandling.h"
#include "musicXMLOah.h"
#include "generalOah.h"
using namespace std;
namespace MusicXML2
{
//______________________________________________________________________________
void msrAssert (
bool condition,
string messageIfFalse)
{
if (! condition) {
gLogOstream <<
"#### msrAssert failure: " << messageIfFalse <<
", aborting." <<
endl;
abort ();
}
}
//______________________________________________________________________________
void msrWarning (
string context,
string inputSourceName,
int inputLineNumber,
string message)
{
if (! gGeneralOah->fQuiet) {
gLogOstream <<
"*** " << context << " warning *** " <<
inputSourceName << ":" << inputLineNumber << ": " <<message <<
endl;
gWarningsInputLineNumbers.insert (inputLineNumber);
}
}
//______________________________________________________________________________
void lpsrMusicXMLWarning (
string inputSourceName,
int inputLineNumber,
string message)
{
msrWarning (
"LPSR",
inputSourceName,
inputLineNumber,
message);
}
//______________________________________________________________________________
void msrMusicXMLWarning (
string inputSourceName,
int inputLineNumber,
string message)
{
msrWarning (
"MusicXML",
inputSourceName,
inputLineNumber,
message);
}
//______________________________________________________________________________
void msrInternalWarning (
string inputSourceName,
int inputLineNumber,
string message)
{
msrWarning (
"INTERNAL",
inputSourceName,
inputLineNumber,
message);
}
//______________________________________________________________________________
void msrError (
string context,
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! gGeneralOah->fQuiet) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
if (! gGeneralOah->fDontShowErrors) {
gLogOstream <<
"### " << context << " ERROR ### " <<
inputSourceName << ":" << inputLineNumber << ": " << message <<
endl;
gErrorsInputLineNumbers.insert (inputLineNumber);
}
}
}
//______________________________________________________________________________
void msrMusicXMLError (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
msrError (
"MusicXML",
inputSourceName,
inputLineNumber,
sourceCodeFileName,
sourceCodeLineNumber,
message);
if (! gGeneralOah->fDontShowErrors) {
if (! gGeneralOah->fDontAbortOnErrors) {
abort ();
}
else {
exit (15);
}
}
}
//______________________________________________________________________________
void lpsrMusicXMLError (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
msrError (
"LPSR",
inputSourceName,
inputLineNumber,
sourceCodeFileName,
sourceCodeLineNumber,
message);
if (! gGeneralOah->fDontShowErrors) {
exit (16);
}
}
//______________________________________________________________________________
void msrInternalError (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
msrError (
"MSR INTERNAL",
inputSourceName,
inputLineNumber,
sourceCodeFileName,
sourceCodeLineNumber,
message);
abort ();
}
void msrLimitation (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! (gGeneralOah->fQuiet && gGeneralOah->fDontShowErrors)) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
gLogOstream <<
"### MSR LIMITATION ### " <<
inputSourceName << ":" << inputLineNumber << ": " << message <<
endl;
abort ();
}
}
//______________________________________________________________________________
void msrStreamsError (
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! (gGeneralOah->fQuiet && gGeneralOah->fDontShowErrors)) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
gLogOstream <<
"### " << "MSR STREAMS" << " ERROR ### " <<
"fake line number" << ":" << inputLineNumber << ": " << message <<
endl;
}
abort ();
}
void msrStreamsWarning (
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! (gGeneralOah->fQuiet && gGeneralOah->fDontShowErrors)) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
gLogOstream <<
"*** " << "MSR STREAMS" << " warning *** " <<
" ### " << "MSR STREAMS" << " ERROR ### " <<
"fake line number" << ":" << inputLineNumber << ": " << message <<
endl;
}
abort ();
}
//______________________________________________________________________________
std::set<int> gWarningsInputLineNumbers;
std::set<int> gErrorsInputLineNumbers;
void displayWarningsAndErrorsInputLineNumbers ()
{
int warningsInputLineNumbersSize =
gWarningsInputLineNumbers.size ();
if (warningsInputLineNumbersSize && ! gGeneralOah->fQuiet) {
gLogOstream <<
"Warning message(s) were issued for input " <<
singularOrPluralWithoutNumber (
warningsInputLineNumbersSize, "line", "lines") <<
" ";
set<int>::const_iterator
iBegin = gWarningsInputLineNumbers.begin (),
iEnd = gWarningsInputLineNumbers.end (),
i = iBegin;
for ( ; ; ) {
gLogOstream << (*i);
if (++i == iEnd) break;
gLogOstream << ", ";
} // for
gLogOstream << endl;
}
int errorsInputLineNumbersSize =
gErrorsInputLineNumbers.size ();
if (errorsInputLineNumbersSize) {
gLogOstream <<
endl <<
"Error message(s) were issued for input " <<
singularOrPluralWithoutNumber (
errorsInputLineNumbersSize, "line", "lines") <<
" ";
set<int>::const_iterator
iBegin = gErrorsInputLineNumbers.begin (),
iEnd = gErrorsInputLineNumbers.end (),
i = iBegin;
for ( ; ; ) {
gLogOstream << (*i);
if (++i == iEnd) break;
gLogOstream << ", ";
} // for
gLogOstream << endl;
}
}
}
| dfober/libmusicxml | src/lilypond/messagesHandling.cpp | C++ | mpl-2.0 | 7,291 |
package terraform
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"log"
"reflect"
"runtime"
"sort"
"strings"
"sync"
"sync/atomic"
"testing"
"time"
"github.com/davecgh/go-spew/spew"
"github.com/go-test/deep"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/configs"
"github.com/hashicorp/terraform/configs/configschema"
"github.com/hashicorp/terraform/configs/hcl2shim"
"github.com/hashicorp/terraform/plans"
"github.com/hashicorp/terraform/providers"
"github.com/hashicorp/terraform/provisioners"
"github.com/hashicorp/terraform/states"
"github.com/hashicorp/terraform/states/statefile"
"github.com/hashicorp/terraform/tfdiags"
"github.com/zclconf/go-cty/cty"
)
func TestContext2Apply_basic(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_unstable(t *testing.T) {
// This tests behavior when the configuration contains an unstable value,
// such as the result of uuid() or timestamp(), where each call produces
// a different result.
//
// This is an important case to test because we need to ensure that
// we don't re-call the function during the apply phase: the value should
// be fixed during plan
m := testModule(t, "apply-unstable")
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected error during Plan: %s", diags.Err())
}
addr := addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_resource",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance)
schema := p.GetSchemaReturn.ResourceTypes["test_resource"] // automatically available in mock
rds := plan.Changes.ResourceInstance(addr)
rd, err := rds.Decode(schema.ImpliedType())
if err != nil {
t.Fatal(err)
}
if rd.After.GetAttr("random").IsKnown() {
t.Fatalf("Attribute 'random' has known value %#v; should be unknown in plan", rd.After.GetAttr("random"))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("unexpected error during Apply: %s", diags.Err())
}
mod := state.Module(addr.Module)
rss := state.ResourceInstance(addr)
if len(mod.Resources) != 1 {
t.Fatalf("wrong number of resources %d; want 1", len(mod.Resources))
}
rs, err := rss.Current.Decode(schema.ImpliedType())
got := rs.Value.GetAttr("random")
if !got.IsKnown() {
t.Fatalf("random is still unknown after apply")
}
if got, want := len(got.AsString()), 36; got != want {
t.Fatalf("random string has wrong length %d; want %d", got, want)
}
}
func TestContext2Apply_escape(t *testing.T) {
m := testModule(t, "apply-escape")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = foo
provider = provider.aws
foo = "bar"
type = aws_instance
`)
}
func TestContext2Apply_resourceCountOneList(t *testing.T) {
m := testModule(t, "apply-resource-count-one-list")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
assertNoDiagnostics(t, diags)
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`null_resource.foo.0:
ID = foo
provider = provider.null
Outputs:
test = [foo]`)
if got != want {
t.Fatalf("got:\n%s\n\nwant:\n%s\n", got, want)
}
}
func TestContext2Apply_resourceCountZeroList(t *testing.T) {
m := testModule(t, "apply-resource-count-zero-list")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`Outputs:
test = []`)
if got != want {
t.Fatalf("wrong state\n\ngot:\n%s\n\nwant:\n%s\n", got, want)
}
}
func TestContext2Apply_resourceDependsOnModule(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module")
p := testProvider("aws")
p.DiffFn = testDiffFn
// verify the apply happens in the correct order
var mu sync.Mutex
var order []string
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if id.Attributes["ami"].New == "child" {
// make the child slower than the parent
time.Sleep(50 * time.Millisecond)
mu.Lock()
order = append(order, "child")
mu.Unlock()
} else {
mu.Lock()
order = append(order, "parent")
mu.Unlock()
}
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !reflect.DeepEqual(order, []string{"child", "parent"}) {
t.Fatal("resources applied out of order")
}
checkStateString(t, state, testTerraformApplyResourceDependsOnModuleStr)
}
// Test that without a config, the Dependencies in the state are enough
// to maintain proper ordering.
func TestContext2Apply_resourceDependsOnModuleStateOnly(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module-empty")
p := testProvider("aws")
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "parent",
},
Dependencies: []string{"module.child"},
Provider: "provider.aws",
},
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.child": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "child",
},
Provider: "provider.aws",
},
},
},
},
})
{
// verify the apply happens in the correct order
var mu sync.Mutex
var order []string
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if is.ID == "parent" {
// make the dep slower than the parent
time.Sleep(50 * time.Millisecond)
mu.Lock()
order = append(order, "child")
mu.Unlock()
} else {
mu.Lock()
order = append(order, "parent")
mu.Unlock()
}
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
if !reflect.DeepEqual(order, []string{"child", "parent"}) {
t.Fatal("resources applied out of order")
}
checkStateString(t, state, "<no state>")
}
}
func TestContext2Apply_resourceDependsOnModuleDestroy(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module")
p := testProvider("aws")
p.DiffFn = testDiffFn
var globalState *states.State
{
p.ApplyFn = testApplyFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
globalState = state
}
{
// Wait for the dependency, sleep, and verify the graph never
// called a child.
var called int32
var checked bool
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if is.Attributes["ami"] == "parent" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("module child should not be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: globalState,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should check")
}
checkStateString(t, state, `<no state>`)
}
}
func TestContext2Apply_resourceDependsOnModuleGrandchild(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module-deep")
p := testProvider("aws")
p.DiffFn = testDiffFn
{
// Wait for the dependency, sleep, and verify the graph never
// called a child.
var called int32
var checked bool
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if id.Attributes["ami"].New == "grandchild" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("aws_instance.a should not be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should check")
}
checkStateString(t, state, testTerraformApplyResourceDependsOnModuleDeepStr)
}
}
func TestContext2Apply_resourceDependsOnModuleInModule(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module-in-module")
p := testProvider("aws")
p.DiffFn = testDiffFn
{
// Wait for the dependency, sleep, and verify the graph never
// called a child.
var called int32
var checked bool
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
if id.Attributes["ami"].New == "grandchild" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("something else was applied before grandchild; grandchild should be first")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should check")
}
checkStateString(t, state, testTerraformApplyResourceDependsOnModuleInModuleStr)
}
}
func TestContext2Apply_mapVarBetweenModules(t *testing.T) {
m := testModule(t, "apply-map-var-through-module")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`<no state>
Outputs:
amis_from_module = {eu-west-1:ami-789012 eu-west-2:ami-989484 us-west-1:ami-123456 us-west-2:ami-456789 }
module.test:
null_resource.noop:
ID = foo
provider = provider.null
Outputs:
amis_out = {eu-west-1:ami-789012 eu-west-2:ami-989484 us-west-1:ami-123456 us-west-2:ami-456789 }`)
if actual != expected {
t.Fatalf("expected: \n%s\n\ngot: \n%s\n", expected, actual)
}
}
func TestContext2Apply_refCount(t *testing.T) {
m := testModule(t, "apply-ref-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyRefCountStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_providerAlias(t *testing.T) {
m := testModule(t, "apply-provider-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProviderAliasStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// Two providers that are configured should both be configured prior to apply
func TestContext2Apply_providerAliasConfigure(t *testing.T) {
m := testModule(t, "apply-provider-alias-configure")
p2 := testProvider("another")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"another": testProviderFuncFixed(p2),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
// Configure to record calls AFTER Plan above
var configCount int32
p2.ConfigureFn = func(c *ResourceConfig) error {
atomic.AddInt32(&configCount, 1)
foo, ok := c.Get("foo")
if !ok {
return fmt.Errorf("foo is not found")
}
if foo != "bar" {
return fmt.Errorf("foo: %#v", foo)
}
return nil
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if configCount != 2 {
t.Fatalf("provider config expected 2 calls, got: %d", configCount)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProviderAliasConfigStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// GH-2870
func TestContext2Apply_providerWarning(t *testing.T) {
m := testModule(t, "apply-provider-warning")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ValidateFn = func(c *ResourceConfig) (ws []string, es []error) {
ws = append(ws, "Just a warning")
return
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
aws_instance.foo:
ID = foo
provider = provider.aws
`)
if actual != expected {
t.Fatalf("got: \n%s\n\nexpected:\n%s", actual, expected)
}
if !p.ConfigureCalled {
t.Fatalf("provider Configure() was never called!")
}
}
func TestContext2Apply_emptyModule(t *testing.T) {
m := testModule(t, "apply-empty-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
actual = strings.Replace(actual, " ", "", -1)
expected := strings.TrimSpace(testTerraformApplyEmptyModuleStr)
if actual != expected {
t.Fatalf("bad: \n%s\nexpect:\n%s", actual, expected)
}
}
func TestContext2Apply_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-good-create-before")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if got, want := len(mod.Resources), 1; got != want {
t.Logf("state:\n%s", state)
t.Fatalf("wrong number of resources %d; want %d", got, want)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCreateBeforeStr)
if actual != expected {
t.Fatalf("expected:\n%s\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_createBeforeDestroyUpdate(t *testing.T) {
m := testModule(t, "apply-good-create-before-update")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "bar",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("bad: %s", state)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCreateBeforeUpdateStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// This tests that when a CBD resource depends on a non-CBD resource,
// we can still properly apply changes that require new for both.
func TestContext2Apply_createBeforeDestroy_dependsNonCBD(t *testing.T) {
m := testModule(t, "apply-cbd-depends-non-cbd")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = foo
provider = provider.aws
require_new = yes
type = aws_instance
value = foo
Dependencies:
aws_instance.foo
aws_instance.foo:
ID = foo
provider = provider.aws
require_new = yes
type = aws_instance
`)
}
func TestContext2Apply_createBeforeDestroy_hook(t *testing.T) {
h := new(MockHook)
m := testModule(t, "apply-good-create-before")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
Provider: "provider.aws",
},
},
},
},
})
var actual []cty.Value
var actualLock sync.Mutex
h.PostApplyFn = func(addr addrs.AbsResourceInstance, gen states.Generation, sv cty.Value, e error) (HookAction, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, sv)
return HookActionContinue, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []cty.Value{
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("foo"),
"require_new": cty.StringVal("xyz"),
"type": cty.StringVal("aws_instance"),
}),
cty.NullVal(cty.DynamicPseudoType),
}
cmpOpt := cmp.Transformer("ctyshim", hcl2shim.ConfigValueFromHCL2)
if !cmp.Equal(actual, expected, cmpOpt) {
t.Fatalf("wrong state snapshot sequence\n%s", cmp.Diff(expected, actual, cmpOpt))
}
}
// Test that we can perform an apply with CBD in a count with deposed instances.
func TestContext2Apply_createBeforeDestroy_deposedCount(t *testing.T) {
m := testModule(t, "apply-cbd-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Tainted: true,
},
Deposed: []*InstanceState{
&InstanceState{
ID: "foo",
},
},
},
"aws_instance.bar.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Tainted: true,
},
Deposed: []*InstanceState{
&InstanceState{
ID: "bar",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar.0:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
aws_instance.bar.1:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
`)
}
// Test that when we have a deposed instance but a good primary, we still
// destroy the deposed instance.
func TestContext2Apply_createBeforeDestroy_deposedOnly(t *testing.T) {
m := testModule(t, "apply-cbd-deposed-only")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Deposed: []*InstanceState{
&InstanceState{
ID: "foo",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = bar
provider = provider.aws
`)
}
func TestContext2Apply_destroyComputed(t *testing.T) {
m := testModule(t, "apply-destroy-computed")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"output": "value",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
} else {
t.Logf("plan:\n\n%s", legacyDiffComparisonString(p.Changes))
}
if _, diags := ctx.Apply(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
}
// Test that the destroy operation uses depends_on as a source of ordering.
func TestContext2Apply_destroyDependsOn(t *testing.T) {
// It is possible for this to be racy, so we loop a number of times
// just to check.
for i := 0; i < 10; i++ {
testContext2Apply_destroyDependsOn(t)
}
}
func testContext2Apply_destroyDependsOn(t *testing.T) {
m := testModule(t, "apply-destroy-depends-on")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{},
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{},
},
},
},
},
},
})
// Record the order we see Apply
var actual []string
var actualLock sync.Mutex
p.ApplyFn = func(
_ *InstanceInfo, is *InstanceState, _ *InstanceDiff) (*InstanceState, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, is.ID)
return nil, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Parallelism: 1, // To check ordering
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []string{"foo", "bar"}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("wrong order\ngot: %#v\nwant: %#v", actual, expected)
}
}
// Test that destroy ordering is correct with dependencies only
// in the state.
func TestContext2Apply_destroyDependsOnStateOnly(t *testing.T) {
legacyState := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{},
},
Provider: "provider.aws",
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{},
},
Dependencies: []string{"aws_instance.foo"},
Provider: "provider.aws",
},
},
},
},
})
newState := states.NewState()
root := newState.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo"}`),
Dependencies: []addrs.AbsResource{},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "bar",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
},
Module: root.Addr,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
// It is possible for this to be racy, so we loop a number of times
// just to check.
for i := 0; i < 10; i++ {
t.Run("legacy", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnly(t, legacyState)
})
t.Run("new", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnly(t, newState)
})
}
}
func testContext2Apply_destroyDependsOnStateOnly(t *testing.T, state *states.State) {
m := testModule(t, "empty")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Record the order we see Apply
var actual []string
var actualLock sync.Mutex
p.ApplyFn = func(
_ *InstanceInfo, is *InstanceState, _ *InstanceDiff) (*InstanceState, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, is.ID)
return nil, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Parallelism: 1, // To check ordering
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []string{"bar", "foo"}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("wrong order\ngot: %#v\nwant: %#v", actual, expected)
}
}
// Test that destroy ordering is correct with dependencies only
// in the state within a module (GH-11749)
func TestContext2Apply_destroyDependsOnStateOnlyModule(t *testing.T) {
legacyState := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{},
},
Provider: "provider.aws",
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{},
},
Dependencies: []string{"aws_instance.foo"},
Provider: "provider.aws",
},
},
},
},
})
newState := states.NewState()
child := newState.EnsureModule(addrs.RootModuleInstance.Child("child", addrs.NoKey))
child.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo"}`),
Dependencies: []addrs.AbsResource{},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
child.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "bar",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
},
Module: child.Addr,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
// It is possible for this to be racy, so we loop a number of times
// just to check.
for i := 0; i < 10; i++ {
t.Run("legacy", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnlyModule(t, legacyState)
})
t.Run("new", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnlyModule(t, newState)
})
}
}
func testContext2Apply_destroyDependsOnStateOnlyModule(t *testing.T, state *states.State) {
m := testModule(t, "empty")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Record the order we see Apply
var actual []string
var actualLock sync.Mutex
p.ApplyFn = func(
_ *InstanceInfo, is *InstanceState, _ *InstanceDiff) (*InstanceState, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, is.ID)
return nil, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Parallelism: 1, // To check ordering
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []string{"bar", "foo"}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("wrong order\ngot: %#v\nwant: %#v", actual, expected)
}
}
func TestContext2Apply_dataBasic(t *testing.T) {
m := testModule(t, "apply-data-basic")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ReadDataSourceResponse = providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("yo"),
"foo": cty.NullVal(cty.String),
}),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyDataBasicStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_destroyData(t *testing.T) {
m := testModule(t, "apply-destroy-data-resource")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"data.null_data_source.testing": &ResourceState{
Type: "null_data_source",
Primary: &InstanceState{
ID: "-",
Attributes: map[string]string{
"inputs.#": "1",
"inputs.test": "yes",
},
},
},
},
},
},
})
hook := &testHook{}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Hooks: []Hook{hook},
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
newState, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if got := len(newState.Modules); got != 1 {
t.Fatalf("state has %d modules after destroy; want 1", got)
}
if got := len(newState.RootModule().Resources); got != 0 {
t.Fatalf("state has %d resources after destroy; want 0", got)
}
wantHookCalls := []*testHookCall{
{"PreDiff", "data.null_data_source.testing"},
{"PostDiff", "data.null_data_source.testing"},
{"PostStateUpdate", ""},
}
if !reflect.DeepEqual(hook.Calls, wantHookCalls) {
t.Errorf("wrong hook calls\ngot: %swant: %s", spew.Sdump(hook.Calls), spew.Sdump(wantHookCalls))
}
}
// https://github.com/hashicorp/terraform/pull/5096
func TestContext2Apply_destroySkipsCBD(t *testing.T) {
// Config contains CBD resource depending on non-CBD resource, which triggers
// a cycle if they are both replaced, but should _not_ trigger a cycle when
// just doing a `terraform destroy`.
m := testModule(t, "apply-destroy-cbd")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_destroyModuleVarProviderConfig(t *testing.T) {
m := testModule(t, "apply-destroy-mod-var-provider-config")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
// https://github.com/hashicorp/terraform/issues/2892
func TestContext2Apply_destroyCrossProviders(t *testing.T) {
m := testModule(t, "apply-destroy-cross-providers")
p_aws := testProvider("aws")
p_aws.ApplyFn = testApplyFn
p_aws.DiffFn = testDiffFn
p_aws.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Computed: true,
},
},
},
"aws_vpc": {
Attributes: map[string]*configschema.Attribute{
"value": {
Type: cty.String,
Optional: true,
},
},
},
},
}
providers := map[string]providers.Factory{
"aws": testProviderFuncFixed(p_aws),
}
// Bug only appears from time to time,
// so we run this test multiple times
// to check for the race-condition
// FIXME: this test flaps now, so run it more times
for i := 0; i <= 100; i++ {
ctx := getContextForApply_destroyCrossProviders(t, m, providers)
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
if _, diags := ctx.Apply(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
}
}
func getContextForApply_destroyCrossProviders(t *testing.T, m *configs.Config, providerFactories map[string]providers.Factory) *Context {
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.shared": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "remote-2652591293",
Attributes: map[string]string{
"id": "test",
},
},
Provider: "provider.aws",
},
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_vpc.bar": &ResourceState{
Type: "aws_vpc",
Primary: &InstanceState{
ID: "vpc-aaabbb12",
Attributes: map[string]string{
"value": "test",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(providerFactories),
State: state,
Destroy: true,
})
return ctx
}
func TestContext2Apply_minimal(t *testing.T) {
m := testModule(t, "apply-minimal")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyMinimalStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_badDiff(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"newp": &ResourceAttrDiff{
Old: "",
New: "",
NewComputed: true,
},
},
}, nil
}
if _, diags := ctx.Apply(); diags == nil {
t.Fatal("should error")
}
}
func TestContext2Apply_cancel(t *testing.T) {
stopped := false
m := testModule(t, "apply-cancel")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) {
if !stopped {
stopped = true
go ctx.Stop()
for {
if ctx.sh.Stopped() {
break
}
time.Sleep(10 * time.Millisecond)
}
}
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "2",
},
}, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Start the Apply in a goroutine
var applyDiags tfdiags.Diagnostics
stateCh := make(chan *states.State)
go func() {
state, diags := ctx.Apply()
applyDiags = diags
stateCh <- state
}()
state := <-stateCh
if applyDiags.HasErrors() {
t.Fatalf("unexpected errors: %s", applyDiags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCancelStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
if !p.StopCalled {
t.Fatal("stop should be called")
}
}
func TestContext2Apply_cancelBlock(t *testing.T) {
m := testModule(t, "apply-cancel-block")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
applyCh := make(chan struct{})
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"id": &ResourceAttrDiff{
New: "foo",
},
"num": &ResourceAttrDiff{
New: "2",
},
},
}, nil
}
p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) {
close(applyCh)
for !ctx.sh.Stopped() {
// Wait for stop to be called. We call Gosched here so that
// the other goroutines can always be scheduled to set Stopped.
runtime.Gosched()
}
// Sleep
time.Sleep(100 * time.Millisecond)
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"num": "2",
},
}, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Start the Apply in a goroutine
var applyDiags tfdiags.Diagnostics
stateCh := make(chan *states.State)
go func() {
state, diags := ctx.Apply()
applyDiags = diags
stateCh <- state
}()
stopDone := make(chan struct{})
go func() {
defer close(stopDone)
<-applyCh
ctx.Stop()
}()
// Make sure that stop blocks
select {
case <-stopDone:
t.Fatal("stop should block")
case <-time.After(10 * time.Millisecond):
}
// Wait for stop
select {
case <-stopDone:
case <-time.After(500 * time.Millisecond):
t.Fatal("stop should be done")
}
// Wait for apply to complete
state := <-stateCh
if applyDiags.HasErrors() {
t.Fatalf("unexpected error: %s", applyDiags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
`)
}
// for_each values cannot be used in the provisioner during destroy.
// There may be a way to handle this, but for now make sure we print an error
// rather than crashing with an invalid config.
func TestContext2Apply_provisionerDestroyForEach(t *testing.T) {
m := testModule(t, "apply-provisioner-each")
p := testProvider("aws")
pr := testProvisioner()
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
s := &states.State{
Modules: map[string]*states.Module{
"": &states.Module{
Resources: map[string]*states.Resource{
"aws_instance.bar": &states.Resource{
Addr: addrs.Resource{Mode: 77, Type: "aws_instance", Name: "bar"},
EachMode: states.EachMap,
Instances: map[addrs.InstanceKey]*states.ResourceInstance{
addrs.StringKey("a"): &states.ResourceInstance{
Current: &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"foo":"bar","id":"foo"}`),
},
},
addrs.StringKey("b"): &states.ResourceInstance{
Current: &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"foo":"bar","id":"foo"}`),
},
},
},
ProviderConfig: addrs.AbsProviderConfig{
Module: addrs.ModuleInstance(nil),
ProviderConfig: addrs.ProviderConfig{Type: "aws", Alias: ""},
},
},
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: s,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
if !strings.Contains(diags.Err().Error(), "each.value cannot be used in this context") {
t.Fatal("unexpected error:", diags.Err())
}
}
func TestContext2Apply_cancelProvisioner(t *testing.T) {
m := testModule(t, "apply-cancel-provisioner")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"foo": {
Type: cty.String,
Optional: true,
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
prStopped := make(chan struct{})
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
// Start the stop process
go ctx.Stop()
<-prStopped
return nil
}
pr.StopFn = func() error {
close(prStopped)
return nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Start the Apply in a goroutine
var applyDiags tfdiags.Diagnostics
stateCh := make(chan *states.State)
go func() {
state, diags := ctx.Apply()
applyDiags = diags
stateCh <- state
}()
// Wait for completion
state := <-stateCh
assertNoErrors(t, applyDiags)
checkStateString(t, state, `
aws_instance.foo: (tainted)
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
if !pr.StopCalled {
t.Fatal("stop should be called")
}
}
func TestContext2Apply_compute(t *testing.T) {
m := testModule(t, "apply-compute")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"num": {
Type: cty.Number,
Optional: true,
},
"compute": {
Type: cty.String,
Optional: true,
},
"compute_value": {
Type: cty.String,
Optional: true,
},
"foo": {
Type: cty.String,
Optional: true,
},
"id": {
Type: cty.String,
Computed: true,
},
"type": {
Type: cty.String,
Computed: true,
},
"value": { // Populated from compute_value because compute = "value" in the config fixture
Type: cty.String,
Computed: true,
},
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
ctx.variables = InputValues{
"value": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyComputeStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countDecrease(t *testing.T) {
m := testModule(t, "apply-count-dec")
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.2": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountDecStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countDecreaseToOneX(t *testing.T) {
m := testModule(t, "apply-count-dec-one")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
"aws_instance.foo.2": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountDecToOneStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// https://github.com/PeoplePerHour/terraform/pull/11
//
// This tests a case where both a "resource" and "resource.0" are in
// the state file, which apparently is a reasonable backwards compatibility
// concern found in the above 3rd party repo.
func TestContext2Apply_countDecreaseToOneCorrupted(t *testing.T) {
m := testModule(t, "apply-count-dec-one")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
got := strings.TrimSpace(legacyPlanComparisonString(ctx.State(), p.Changes))
want := strings.TrimSpace(testTerraformApplyCountDecToOneCorruptedPlanStr)
if got != want {
t.Fatalf("wrong plan result\ngot:\n%s\nwant:\n%s", got, want)
}
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountDecToOneCorruptedStr)
if actual != expected {
t.Fatalf("wrong final state\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countTainted(t *testing.T) {
m := testModule(t, "apply-count-tainted")
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
{
plan, diags := ctx.Plan()
assertNoErrors(t, diags)
got := strings.TrimSpace(legacyDiffComparisonString(plan.Changes))
want := strings.TrimSpace(`
DESTROY/CREATE: aws_instance.foo[0]
foo: "foo" => "foo"
id: "bar" => "<computed>"
type: "aws_instance" => "aws_instance"
CREATE: aws_instance.foo[1]
foo: "" => "foo"
id: "" => "<computed>"
type: "" => "aws_instance"
`)
if got != want {
t.Fatalf("wrong plan\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`
aws_instance.foo.0:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
aws_instance.foo.1:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
`)
if got != want {
t.Fatalf("wrong final state\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
func TestContext2Apply_countVariable(t *testing.T) {
m := testModule(t, "apply-count-variable")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountVariableStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countVariableRef(t *testing.T) {
m := testModule(t, "apply-count-variable-ref")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountVariableRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_provisionerInterpCount(t *testing.T) {
// This test ensures that a provisioner can interpolate a resource count
// even though the provisioner expression is evaluated during the plan
// walk. https://github.com/hashicorp/terraform/issues/16840
m, snap := testModuleWithSnapshot(t, "apply-provisioner-interp-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
provisioners := map[string]ProvisionerFactory{
"local-exec": testProvisionerFuncFixed(pr),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
Provisioners: provisioners,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan failed unexpectedly: %s", diags.Err())
}
state := ctx.State()
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatal(err)
}
ctxOpts.ProviderResolver = providerResolver
ctxOpts.Provisioners = provisioners
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("failed to create context for plan: %s", diags.Err())
}
// Applying the plan should now succeed
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply failed unexpectedly: %s", diags.Err())
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner was not called")
}
}
func TestContext2Apply_foreachVariable(t *testing.T) {
m := testModule(t, "plan-for-each-unknown-value")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("hello"),
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyForEachVariableStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleBasic(t *testing.T) {
m := testModule(t, "apply-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleStr)
if actual != expected {
t.Fatalf("bad, expected:\n%s\n\nactual:\n%s", expected, actual)
}
}
func TestContext2Apply_moduleDestroyOrder(t *testing.T) {
m := testModule(t, "apply-module-destroy-order")
p := testProvider("aws")
p.DiffFn = testDiffFn
// Create a custom apply function to track the order they were destroyed
var order []string
var orderLock sync.Mutex
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if is.ID == "b" {
// Pause briefly to make any race conditions more visible, since
// missing edges here can cause undeterministic ordering.
time.Sleep(100 * time.Millisecond)
}
orderLock.Lock()
defer orderLock.Unlock()
order = append(order, is.ID)
return nil, nil
}
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Required: true},
"blah": {Type: cty.String, Optional: true},
"value": {Type: cty.String, Optional: true},
},
},
},
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.b": resourceState("aws_instance", "b"),
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.a": resourceState("aws_instance", "a"),
},
Outputs: map[string]*OutputState{
"a_output": &OutputState{
Type: "string",
Sensitive: false,
Value: "a",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
expected := []string{"b", "a"}
if !reflect.DeepEqual(order, expected) {
t.Errorf("wrong order\ngot: %#v\nwant: %#v", order, expected)
}
{
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleDestroyOrderStr)
if actual != expected {
t.Errorf("wrong final state\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
}
func TestContext2Apply_moduleInheritAlias(t *testing.T) {
m := testModule(t, "apply-module-provider-inherit-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return nil
}
if _, ok := c.Get("root"); ok {
return fmt.Errorf("child should not get root")
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
<no state>
module.child:
aws_instance.foo:
ID = foo
provider = provider.aws.eu
`)
}
func TestContext2Apply_orphanResource(t *testing.T) {
// This is a two-step test:
// 1. Apply a configuration with resources that have count set.
// This should place the empty resource object in the state to record
// that each exists, and record any instances.
// 2. Apply an empty configuration against the same state, which should
// then clean up both the instances and the containing resource objects.
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {},
},
}
// Step 1: create the resources and instances
m := testModule(t, "apply-orphan-resource")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
state, diags := ctx.Apply()
assertNoErrors(t, diags)
// At this point both resources should be recorded in the state, along
// with the single instance associated with test_thing.one.
want := states.BuildState(func(s *states.SyncState) {
providerAddr := addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance)
zeroAddr := addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_thing",
Name: "zero",
}.Absolute(addrs.RootModuleInstance)
oneAddr := addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_thing",
Name: "one",
}.Absolute(addrs.RootModuleInstance)
s.SetResourceMeta(zeroAddr, states.EachList, providerAddr)
s.SetResourceMeta(oneAddr, states.EachList, providerAddr)
s.SetResourceInstanceCurrent(oneAddr.Instance(addrs.IntKey(0)), &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{}`),
}, providerAddr)
})
// compare the marshaled form to easily remove empty and nil slices
if !statefile.StatesMarshalEqual(state, want) {
t.Fatalf("wrong state after step 1\n%s", cmp.Diff(want, state))
}
// Step 2: update with an empty config, to destroy everything
m = testModule(t, "empty")
ctx = testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
_, diags = ctx.Plan()
assertNoErrors(t, diags)
state, diags = ctx.Apply()
assertNoErrors(t, diags)
// The state should now be _totally_ empty, with just an empty root module
// (since that always exists) and no resources at all.
want = states.NewState()
if !cmp.Equal(state, want) {
t.Fatalf("wrong state after step 2\ngot: %swant: %s", spew.Sdump(state), spew.Sdump(want))
}
}
func TestContext2Apply_moduleOrphanInheritAlias(t *testing.T) {
m := testModule(t, "apply-module-provider-inherit-alias-orphan")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
called := false
p.ConfigureFn = func(c *ResourceConfig) error {
called = true
if _, ok := c.Get("child"); !ok {
return nil
}
if _, ok := c.Get("root"); ok {
return fmt.Errorf("child should not get root")
}
return nil
}
// Create a state with an orphan module
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws.eu",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !called {
t.Fatal("must call configure")
}
checkStateString(t, state, "<no state>")
}
func TestContext2Apply_moduleOrphanProvider(t *testing.T) {
m := testModule(t, "apply-module-orphan-provider-inherit")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
return nil
}
// Create a state with an orphan module
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_moduleOrphanGrandchildProvider(t *testing.T) {
m := testModule(t, "apply-module-orphan-provider-inherit")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
return nil
}
// Create a state with an orphan module that is nested (grandchild)
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "parent", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_moduleGrandchildProvider(t *testing.T) {
m := testModule(t, "apply-module-grandchild-provider-inherit")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var callLock sync.Mutex
called := false
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
callLock.Lock()
called = true
callLock.Unlock()
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
callLock.Lock()
defer callLock.Unlock()
if called != true {
t.Fatalf("err: configure never called")
}
}
// This tests an issue where all the providers in a module but not
// in the root weren't being added to the root properly. In this test
// case: aws is explicitly added to root, but "test" should be added to.
// With the bug, it wasn't.
func TestContext2Apply_moduleOnlyProvider(t *testing.T) {
m := testModule(t, "apply-module-only-provider")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pTest := testProvider("test")
pTest.ApplyFn = testApplyFn
pTest.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"test": testProviderFuncFixed(pTest),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleOnlyProviderStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleProviderAlias(t *testing.T) {
m := testModule(t, "apply-module-provider-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleProviderAliasStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleProviderAliasTargets(t *testing.T) {
m := testModule(t, "apply-module-provider-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.AbsResource{
Module: addrs.RootModuleInstance,
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "nonexistent",
Name: "thing",
},
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
<no state>
`)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleProviderCloseNested(t *testing.T) {
m := testModule(t, "apply-module-provider-close-nested")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child", "subchild"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
}),
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
// Tests that variables used as module vars that reference data that
// already exists in the state and requires no diff works properly. This
// fixes an issue faced where module variables were pruned because they were
// accessing "non-existent" resources (they existed, just not in the graph
// cause they weren't in the diff).
func TestContext2Apply_moduleVarRefExisting(t *testing.T) {
m := testModule(t, "apply-ref-existing")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"foo": "bar",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleVarRefExistingStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleVarResourceCount(t *testing.T) {
m := testModule(t, "apply-module-var-resource-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(2),
SourceType: ValueFromCaller,
},
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(5),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
// GH-819
func TestContext2Apply_moduleBool(t *testing.T) {
m := testModule(t, "apply-module-bool")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleBoolStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// Tests that a module can be targeted and everything is properly created.
// This adds to the plan test to also just verify that apply works.
func TestContext2Apply_moduleTarget(t *testing.T) {
m := testModule(t, "plan-targeted-cross-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("B", addrs.NoKey),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
<no state>
module.A:
aws_instance.foo:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
Outputs:
value = foo
module.B:
aws_instance.bar:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
Dependencies:
module.A.aws_instance.foo
`)
}
func TestContext2Apply_multiProvider(t *testing.T) {
m := testModule(t, "apply-multi-provider")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pDO := testProvider("do")
pDO.ApplyFn = testApplyFn
pDO.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"do": testProviderFuncFixed(pDO),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyMultiProviderStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_multiProviderDestroy(t *testing.T) {
m := testModule(t, "apply-multi-provider-destroy")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"addr": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p2 := testProvider("vault")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
p2.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"vault_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
},
},
},
}
var state *states.State
// First, create the instances
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("errors during create plan: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("errors during create apply: %s", diags.Err())
}
state = s
}
// Destroy them
{
// Verify that aws_instance.bar is destroyed first
var checked bool
var called int32
var lock sync.Mutex
applyFn := func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
lock.Lock()
defer lock.Unlock()
if info.Type == "aws_instance" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("nothing else should be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
// Set the apply functions
p.ApplyFn = applyFn
p2.ApplyFn = applyFn
ctx := testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("errors during destroy plan: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("errors during destroy apply: %s", diags.Err())
}
if !checked {
t.Fatal("should be checked")
}
state = s
}
checkStateString(t, state, `<no state>`)
}
// This is like the multiProviderDestroy test except it tests that
// dependent resources within a child module that inherit provider
// configuration are still destroyed first.
func TestContext2Apply_multiProviderDestroyChild(t *testing.T) {
m := testModule(t, "apply-multi-provider-destroy-child")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p2 := testProvider("vault")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
p2.GetSchemaReturn = &ProviderSchema{
Provider: &configschema.Block{},
ResourceTypes: map[string]*configschema.Block{
"vault_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
},
},
},
}
var state *states.State
// First, create the instances
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state = s
}
// Destroy them
{
// Verify that aws_instance.bar is destroyed first
var checked bool
var called int32
var lock sync.Mutex
applyFn := func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
lock.Lock()
defer lock.Unlock()
if info.Type == "aws_instance" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("nothing else should be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
// Set the apply functions
p.ApplyFn = applyFn
p2.ApplyFn = applyFn
ctx := testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should be checked")
}
state = s
}
checkStateString(t, state, `
<no state>
`)
}
func TestContext2Apply_multiVar(t *testing.T) {
m := testModule(t, "apply-multi-var")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(3),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := state.RootModule().OutputValues["output"]
expected := cty.StringVal("bar0,bar1,bar2")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
t.Logf("Initial state: %s", state.String())
// Apply again, reduce the count to 1
{
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("End state: %s", state.String())
actual := state.RootModule().OutputValues["output"]
if actual == nil {
t.Fatal("missing output")
}
expected := cty.StringVal("bar0")
if actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
}
// This is a holistic test of multi-var (aka "splat variable") handling
// across several different Terraform subsystems. This is here because
// historically there were quirky differences in handling across different
// parts of Terraform and so here we want to assert the expected behavior and
// ensure that it remains consistent in future.
func TestContext2Apply_multiVarComprehensive(t *testing.T) {
m := testModule(t, "apply-multi-var-comprehensive")
p := testProvider("test")
configs := map[string]*ResourceConfig{}
var configsLock sync.Mutex
p.ApplyFn = testApplyFn
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
proposed := req.ProposedNewState
configsLock.Lock()
defer configsLock.Unlock()
key := proposed.GetAttr("key").AsString()
// This test was originally written using the legacy p.DiffFn interface,
// and so the assertions below expect an old-style ResourceConfig, which
// we'll construct via our shim for now to avoid rewriting all of the
// assertions.
configs[key] = NewResourceConfigShimmed(req.Config, p.GetSchemaReturn.ResourceTypes["test_thing"])
retVals := make(map[string]cty.Value)
for it := proposed.ElementIterator(); it.Next(); {
idxVal, val := it.Element()
idx := idxVal.AsString()
switch idx {
case "id":
retVals[idx] = cty.UnknownVal(cty.String)
case "name":
retVals[idx] = cty.StringVal(key)
default:
retVals[idx] = val
}
}
return providers.PlanResourceChangeResponse{
PlannedState: cty.ObjectVal(retVals),
}
}
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {
Attributes: map[string]*configschema.Attribute{
"key": {Type: cty.String, Required: true},
"source_id": {Type: cty.String, Optional: true},
"source_name": {Type: cty.String, Optional: true},
"first_source_id": {Type: cty.String, Optional: true},
"first_source_name": {Type: cty.String, Optional: true},
"source_ids": {Type: cty.List(cty.String), Optional: true},
"source_names": {Type: cty.List(cty.String), Optional: true},
"source_ids_from_func": {Type: cty.List(cty.String), Optional: true},
"source_names_from_func": {Type: cty.List(cty.String), Optional: true},
"source_ids_wrapped": {Type: cty.List(cty.List(cty.String)), Optional: true},
"source_names_wrapped": {Type: cty.List(cty.List(cty.String)), Optional: true},
"id": {Type: cty.String, Computed: true},
"name": {Type: cty.String, Computed: true},
},
},
},
}
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(3),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatalf("errors during plan")
}
checkConfig := func(key string, want map[string]interface{}) {
configsLock.Lock()
defer configsLock.Unlock()
if _, ok := configs[key]; !ok {
t.Errorf("no config recorded for %s; expected a configuration", key)
return
}
got := configs[key].Config
t.Run("config for "+key, func(t *testing.T) {
want["key"] = key // to avoid doing this for every example
for _, problem := range deep.Equal(got, want) {
t.Errorf(problem)
}
})
}
checkConfig("multi_count_var.0", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.0",
})
checkConfig("multi_count_var.2", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.2",
})
checkConfig("multi_count_derived.0", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.0",
})
checkConfig("multi_count_derived.2", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.2",
})
checkConfig("whole_splat", map[string]interface{}{
"source_ids": []interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
"source_names": []interface{}{
"source.0",
"source.1",
"source.2",
},
"source_ids_from_func": hcl2shim.UnknownVariableValue,
"source_names_from_func": []interface{}{
"source.0",
"source.1",
"source.2",
},
"source_ids_wrapped": []interface{}{
[]interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
},
"source_names_wrapped": []interface{}{
[]interface{}{
"source.0",
"source.1",
"source.2",
},
},
"first_source_id": hcl2shim.UnknownVariableValue,
"first_source_name": "source.0",
})
checkConfig("child.whole_splat", map[string]interface{}{
"source_ids": []interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
"source_names": []interface{}{
"source.0",
"source.1",
"source.2",
},
"source_ids_wrapped": []interface{}{
[]interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
},
"source_names_wrapped": []interface{}{
[]interface{}{
"source.0",
"source.1",
"source.2",
},
},
})
t.Run("apply", func(t *testing.T) {
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
want := map[string]interface{}{
"source_ids": []interface{}{"foo", "foo", "foo"},
"source_names": []interface{}{
"source.0",
"source.1",
"source.2",
},
}
got := map[string]interface{}{}
for k, s := range state.RootModule().OutputValues {
got[k] = hcl2shim.ConfigValueFromHCL2(s.Value)
}
if !reflect.DeepEqual(got, want) {
t.Errorf(
"wrong outputs\ngot: %s\nwant: %s",
spew.Sdump(got), spew.Sdump(want),
)
}
})
}
// Test that multi-var (splat) access is ordered by count, not by
// value.
func TestContext2Apply_multiVarOrder(t *testing.T) {
m := testModule(t, "apply-multi-var-order")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("State: %s", state.String())
actual := state.RootModule().OutputValues["should-be-11"]
expected := cty.StringVal("index-11")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
// Test that multi-var (splat) access is ordered by count, not by
// value, through interpolations.
func TestContext2Apply_multiVarOrderInterp(t *testing.T) {
m := testModule(t, "apply-multi-var-order-interp")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("State: %s", state.String())
actual := state.RootModule().OutputValues["should-be-11"]
expected := cty.StringVal("baz-index-11")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
// Based on GH-10440 where a graph edge wasn't properly being created
// between a modified resource and a count instance being destroyed.
func TestContext2Apply_multiVarCountDec(t *testing.T) {
var s *states.State
// First create resources. Nothing sneaky here.
{
m := testModule(t, "apply-multi-var-count-dec")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(2),
SourceType: ValueFromCaller,
},
},
})
log.Print("\n========\nStep 1 Plan\n========")
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
log.Print("\n========\nStep 1 Apply\n========")
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("Step 1 state:\n%s", state)
s = state
}
// Decrease the count by 1 and verify that everything happens in the
// right order.
{
m := testModule(t, "apply-multi-var-count-dec")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Verify that aws_instance.bar is modified first and nothing
// else happens at the same time.
var checked bool
var called int32
var lock sync.Mutex
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
lock.Lock()
defer lock.Unlock()
if id != nil && id.Attributes != nil && id.Attributes["ami"] != nil && id.Attributes["ami"].New == "special" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 1 {
return nil, fmt.Errorf("nothing else should be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
State: s,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
},
})
log.Print("\n========\nStep 2 Plan\n========")
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
t.Logf("Step 2 plan:\n%s", legacyDiffComparisonString(plan.Changes))
log.Print("\n========\nStep 2 Apply\n========")
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !checked {
t.Error("apply never called")
}
t.Logf("Step 2 state:\n%s", state)
s = state
}
}
// Test that we can resolve a multi-var (splat) for the first resource
// created in a non-root module, which happens when the module state doesn't
// exist yet.
// https://github.com/hashicorp/terraform/issues/14438
func TestContext2Apply_multiVarMissingState(t *testing.T) {
m := testModule(t, "apply-multi-var-missing-state")
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {
Attributes: map[string]*configschema.Attribute{
"a_ids": {Type: cty.String, Optional: true},
"id": {Type: cty.String, Computed: true},
},
},
},
}
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan failed: %s", diags.Err())
}
// Before the relevant bug was fixed, Tdiagsaform would panic during apply.
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply failed: %s", diags.Err())
}
// If we get here with no errors or panics then our test was successful.
}
func TestContext2Apply_nilDiff(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return nil, nil
}
if _, diags := ctx.Apply(); diags == nil {
t.Fatal("should error")
}
}
func TestContext2Apply_outputDependsOn(t *testing.T) {
m := testModule(t, "apply-output-depends-on")
p := testProvider("aws")
p.DiffFn = testDiffFn
{
// Create a custom apply function that sleeps a bit (to allow parallel
// graph execution) and then returns an error to force a partial state
// return. We then verify the output is NOT there.
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Return error to force partial state
return nil, fmt.Errorf("abcd")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if !diags.HasErrors() || !strings.Contains(diags.Err().Error(), "abcd") {
t.Fatalf("err: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
}
{
// Create the standard apply function and verify we get the output
p.ApplyFn = testApplyFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
Outputs:
value = result
`)
}
}
func TestContext2Apply_outputOrphan(t *testing.T) {
m := testModule(t, "apply-output-orphan")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Outputs: map[string]*OutputState{
"foo": &OutputState{
Type: "string",
Sensitive: false,
Value: "bar",
},
"bar": &OutputState{
Type: "string",
Sensitive: false,
Value: "baz",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputOrphanStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputOrphanModule(t *testing.T) {
m := testModule(t, "apply-output-orphan-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Outputs: map[string]*OutputState{
"foo": &OutputState{
Type: "string",
Value: "bar",
},
"bar": &OutputState{
Type: "string",
Value: "baz",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state.DeepCopy(),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputOrphanModuleStr)
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
// now apply with no module in the config, which should remove the
// remaining output
ctx = testContext2(t, &ContextOpts{
Config: configs.NewEmptyConfig(),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state.DeepCopy(),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !state.Empty() {
t.Fatalf("wrong final state %s\nwant empty state", spew.Sdump(state))
}
}
func TestContext2Apply_providerComputedVar(t *testing.T) {
m := testModule(t, "apply-provider-computed")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pTest := testProvider("test")
pTest.ApplyFn = testApplyFn
pTest.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"test": testProviderFuncFixed(pTest),
},
),
})
p.ConfigureFn = func(c *ResourceConfig) error {
if c.IsComputed("value") {
return fmt.Errorf("value is computed")
}
v, ok := c.Get("value")
if !ok {
return fmt.Errorf("value is not found")
}
if v != "yes" {
return fmt.Errorf("value is not 'yes': %v", v)
}
return nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_providerConfigureDisabled(t *testing.T) {
m := testModule(t, "apply-provider-configure-disabled")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
called := false
p.ConfigureFn = func(c *ResourceConfig) error {
called = true
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !called {
t.Fatal("configure never called")
}
}
func TestContext2Apply_provisionerModule(t *testing.T) {
m := testModule(t, "apply-provisioner-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerModuleStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_Provisioner_compute(t *testing.T) {
m := testModule(t, "apply-provisioner-compute")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "computed_value" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
Variables: InputValues{
"value": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerCreateFail(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-create")
p := testProvider("aws")
pr := testProvisioner()
p.DiffFn = testDiffFn
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
is.ID = "foo"
return is, fmt.Errorf("error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(testTerraformApplyProvisionerFailCreateStr)
if got != want {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
func TestContext2Apply_provisionerCreateFailNoId(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-create")
p := testProvider("aws")
pr := testProvisioner()
p.DiffFn = testDiffFn
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
return nil, fmt.Errorf("error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerFailCreateNoIdStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_provisionerFail(t *testing.T) {
m := testModule(t, "apply-provisioner-fail")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(*InstanceState, *ResourceConfig) error {
return fmt.Errorf("EXPLOSION")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerFailStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_provisionerFail_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-create-before")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(*InstanceState, *ResourceConfig) error {
return fmt.Errorf("EXPLOSION")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerFailCreateBeforeDestroyStr)
if actual != expected {
t.Fatalf("expected:\n%s\n:got\n%s", expected, actual)
}
}
func TestContext2Apply_error_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-error-create-before")
p := testProvider("aws")
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
return nil, fmt.Errorf("error")
}
p.DiffFn = testDiffFn
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorCreateBeforeDestroyStr)
if actual != expected {
t.Fatalf("wrong final state\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_errorDestroy_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-error-create-before")
p := testProvider("aws")
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
// Fail the destroy!
if id.Destroy {
return is, fmt.Errorf("error")
}
// Create should work
is = &InstanceState{
ID: "foo",
Attributes: map[string]string{
"type": "aws_instance",
"require_new": "xyz",
},
}
return is, nil
}
p.DiffFn = testDiffFn
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorDestroyCreateBeforeDestroyStr)
if actual != expected {
t.Fatalf("bad: actual:\n%s\n\nexpected:\n%s", actual, expected)
}
}
func TestContext2Apply_multiDepose_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-multi-depose-create-before-destroy")
p := testProvider("aws")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"require_new": {Type: cty.String, Optional: true},
"id": {Type: cty.String, Computed: true},
},
},
},
}
ps := map[string]providers.Factory{"aws": testProviderFuncFixed(p)}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.web": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{ID: "foo"},
},
},
},
},
})
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
if rc == nil {
return &InstanceDiff{
Destroy: true,
}, nil
}
rn, _ := rc.Get("require_new")
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"id": {
New: hcl2shim.UnknownVariableValue,
NewComputed: true,
RequiresNew: true,
},
"require_new": {
Old: s.Attributes["require_new"],
New: rn.(string),
RequiresNew: true,
},
},
}, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
createdInstanceId := "bar"
// Create works
createFunc := func(is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
return &InstanceState{
ID: createdInstanceId,
Attributes: map[string]string{
"require_new": id.Attributes["require_new"].New,
},
}, nil
}
// Destroy starts broken
destroyFunc := func(is *InstanceState) (*InstanceState, error) {
return is, fmt.Errorf("destroy failed")
}
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
if id.Destroy {
return destroyFunc(is)
} else {
return createFunc(is, id)
}
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Destroy is broken, so even though CBD successfully replaces the instance,
// we'll have to save the Deposed instance to destroy later
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
checkStateString(t, state, `
aws_instance.web: (1 deposed)
ID = bar
provider = provider.aws
require_new = yes
Deposed ID 1 = foo
`)
createdInstanceId = "baz"
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// We're replacing the primary instance once again. Destroy is _still_
// broken, so the Deposed list gets longer
state, diags = ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
// For this one we can't rely on checkStateString because its result is
// not deterministic when multiple deposed objects are present. Instead,
// we will probe the state object directly.
{
is := state.RootModule().Resources["aws_instance.web"].Instances[addrs.NoKey]
t.Logf("aws_instance.web is %s", spew.Sdump(is))
if is.Current == nil {
t.Fatalf("no current object for aws_instance web; should have one")
}
if !bytes.Contains(is.Current.AttrsJSON, []byte("baz")) {
t.Fatalf("incorrect current object attrs %s; want id=baz", is.Current.AttrsJSON)
}
if got, want := len(is.Deposed), 2; got != want {
t.Fatalf("wrong number of deposed instances %d; want %d", got, want)
}
var foos, bars int
for _, obj := range is.Deposed {
if bytes.Contains(obj.AttrsJSON, []byte("foo")) {
foos++
}
if bytes.Contains(obj.AttrsJSON, []byte("bar")) {
bars++
}
}
if got, want := foos, 1; got != want {
t.Fatalf("wrong number of deposed instances with id=foo %d; want %d", got, want)
}
if got, want := bars, 1; got != want {
t.Fatalf("wrong number of deposed instances with id=bar %d; want %d", got, want)
}
}
// Destroy partially fixed!
destroyFunc = func(is *InstanceState) (*InstanceState, error) {
if is.ID == "foo" || is.ID == "baz" {
return nil, nil
} else {
return is, fmt.Errorf("destroy partially failed")
}
}
createdInstanceId = "qux"
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
// Expect error because 1/2 of Deposed destroys failed
if diags == nil {
t.Fatal("should have error")
}
// foo and baz are now gone, bar sticks around
checkStateString(t, state, `
aws_instance.web: (1 deposed)
ID = qux
provider = provider.aws
require_new = yes
Deposed ID 1 = bar
`)
// Destroy working fully!
destroyFunc = func(is *InstanceState) (*InstanceState, error) {
return nil, nil
}
createdInstanceId = "quux"
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatal("should not have error:", diags.Err())
}
// And finally the state is clean
checkStateString(t, state, `
aws_instance.web:
ID = quux
provider = provider.aws
require_new = yes
`)
}
// Verify that a normal provisioner with on_failure "continue" set won't
// taint the resource and continues executing.
func TestContext2Apply_provisionerFailContinue(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-continue")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return fmt.Errorf("provisioner error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Verify that a normal provisioner with on_failure "continue" records
// the error with the hook.
func TestContext2Apply_provisionerFailContinueHook(t *testing.T) {
h := new(MockHook)
m := testModule(t, "apply-provisioner-fail-continue")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return fmt.Errorf("provisioner error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !h.PostProvisionInstanceStepCalled {
t.Fatal("PostProvisionInstanceStep not called")
}
if h.PostProvisionInstanceStepErrorArg == nil {
t.Fatal("should have error")
}
}
func TestContext2Apply_provisionerDestroy(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "destroy" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Verify that on destroy provisioner failure, nothing happens to the instance
func TestContext2Apply_provisionerDestroyFail(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return fmt.Errorf("provisioner error")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
checkStateString(t, state, `
aws_instance.foo:
ID = bar
provider = provider.aws
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Verify that on destroy provisioner failure with "continue" that
// we continue to the next provisioner.
func TestContext2Apply_provisionerDestroyFailContinue(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-continue")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var l sync.Mutex
var calls []string
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
l.Lock()
defer l.Unlock()
calls = append(calls, val.(string))
return fmt.Errorf("provisioner error")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
expected := []string{"one", "two"}
if !reflect.DeepEqual(calls, expected) {
t.Fatalf("wrong commands\ngot: %#v\nwant: %#v", calls, expected)
}
}
// Verify that on destroy provisioner failure with "continue" that
// we continue to the next provisioner. But if the next provisioner defines
// to fail, then we fail after running it.
func TestContext2Apply_provisionerDestroyFailContinueFail(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-fail")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var l sync.Mutex
var calls []string
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
l.Lock()
defer l.Unlock()
calls = append(calls, val.(string))
return fmt.Errorf("provisioner error")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("apply succeeded; wanted error from second provisioner")
}
checkStateString(t, state, `
aws_instance.foo:
ID = bar
provider = provider.aws
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
expected := []string{"one", "two"}
if !reflect.DeepEqual(calls, expected) {
t.Fatalf("bad: %#v", calls)
}
}
// Verify destroy provisioners are not run for tainted instances.
func TestContext2Apply_provisionerDestroyTainted(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
destroyCalled := false
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
expected := "create"
if rs.ID == "bar" {
destroyCalled = true
return nil
}
val, ok := c.Config["command"]
if !ok || val != expected {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
if destroyCalled {
t.Fatal("destroy should not be called")
}
}
func TestContext2Apply_provisionerDestroyModule(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-module")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "value" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerDestroyRef(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "hello" {
return fmt.Errorf("bad value for command: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"value": "hello",
},
},
Provider: "provider.aws",
},
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Test that a destroy provisioner referencing an invalid key errors.
func TestContext2Apply_provisionerDestroyRefInvalid(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-ref-invalid")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
// this was an apply test, but this is now caught in Validation
if diags := ctx.Validate(); !diags.HasErrors() {
t.Fatal("expected error")
}
}
func TestContext2Apply_provisionerResourceRef(t *testing.T) {
m := testModule(t, "apply-provisioner-resource-ref")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "2" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerResourceRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerSelfRef(t *testing.T) {
m := testModule(t, "apply-provisioner-self-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "bar" {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerSelfRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerMultiSelfRef(t *testing.T) {
var lock sync.Mutex
commands := make([]string, 0, 5)
m := testModule(t, "apply-provisioner-multi-self-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
lock.Lock()
defer lock.Unlock()
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for command: %v %#v", val, c)
}
commands = append(commands, val.(string))
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerMultiSelfRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
// Verify our result
sort.Strings(commands)
expectedCommands := []string{"number 0", "number 1", "number 2"}
if !reflect.DeepEqual(commands, expectedCommands) {
t.Fatalf("bad: %#v", commands)
}
}
func TestContext2Apply_provisionerMultiSelfRefSingle(t *testing.T) {
var lock sync.Mutex
order := make([]string, 0, 5)
m := testModule(t, "apply-provisioner-multi-self-ref-single")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
lock.Lock()
defer lock.Unlock()
val, ok := c.Config["order"]
if !ok {
t.Fatalf("bad value for order: %v %#v", val, c)
}
order = append(order, val.(string))
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerMultiSelfRefSingleStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
// Verify our result
sort.Strings(order)
expectedOrder := []string{"0", "1", "2"}
if !reflect.DeepEqual(order, expectedOrder) {
t.Fatalf("bad: %#v", order)
}
}
func TestContext2Apply_provisionerExplicitSelfRef(t *testing.T) {
m := testModule(t, "apply-provisioner-explicit-self-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "bar" {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
var state *states.State
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
{
ctx := testContext2(t, &ContextOpts{
Config: m,
Destroy: true,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
}
}
func TestContext2Apply_provisionerForEachSelfRef(t *testing.T) {
m := testModule(t, "apply-provisioner-for-each-self")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
// Provisioner should NOT run on a diff, only create
func TestContext2Apply_Provisioner_Diff(t *testing.T) {
m := testModule(t, "apply-provisioner-diff")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
state, diags := ctx.Apply()
if diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerDiffStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner was not called on first apply")
}
pr.ProvisionResourceCalled = false
// Change the state to force a diff
mod := state.RootModule()
obj := mod.Resources["aws_instance.bar"].Instances[addrs.NoKey].Current
var attrs map[string]interface{}
err := json.Unmarshal(obj.AttrsJSON, &attrs)
if err != nil {
t.Fatal(err)
}
attrs["foo"] = "baz"
obj.AttrsJSON, err = json.Marshal(attrs)
if err != nil {
t.Fatal(err)
}
// Re-create context with state
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
state2, diags := ctx.Apply()
if diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
actual = strings.TrimSpace(state2.String())
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was NOT invoked
if pr.ProvisionResourceCalled {
t.Fatalf("provisioner was called on second apply; should not have been")
}
}
func TestContext2Apply_outputDiffVars(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.baz": &ResourceState{ // This one is not in config, so should be destroyed
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
if d.Destroy {
return nil, nil
}
result := s.MergeDiff(d)
result.ID = "foo"
return result, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
} else if rc.IsComputed("foo") {
d.Attributes["foo"] = &ResourceAttrDiff{
NewComputed: true,
Type: DiffAttrOutput, // This doesn't actually really do anything anymore, but this test originally set it.
}
}
if new, ok := rc.Get("num"); ok {
d.Attributes["num"] = &ResourceAttrDiff{
New: fmt.Sprintf("%#v", new),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
if _, diags := ctx.Apply(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
}
func TestContext2Apply_destroyX(t *testing.T) {
m := testModule(t, "apply-destroy")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Next, plan and apply a destroy operation
h.Active = true
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyDestroyStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Test that things were destroyed _in the right order_
expected2 := []string{"aws_instance.bar", "aws_instance.foo"}
actual2 := h.IDs
if !reflect.DeepEqual(actual2, expected2) {
t.Fatalf("expected: %#v\n\ngot:%#v", expected2, actual2)
}
}
func TestContext2Apply_destroyOrder(t *testing.T) {
m := testModule(t, "apply-destroy")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("State 1: %s", state)
// Next, plan and apply a destroy
h.Active = true
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyDestroyStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Test that things were destroyed _in the right order_
expected2 := []string{"aws_instance.bar", "aws_instance.foo"}
actual2 := h.IDs
if !reflect.DeepEqual(actual2, expected2) {
t.Fatalf("expected: %#v\n\ngot:%#v", expected2, actual2)
}
}
// https://github.com/hashicorp/terraform/issues/2767
func TestContext2Apply_destroyModulePrefix(t *testing.T) {
m := testModule(t, "apply-destroy-module-resource-prefix")
h := new(MockHook)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Verify that we got the apply info correct
if v := h.PreApplyAddr.String(); v != "module.child.aws_instance.foo" {
t.Fatalf("bad: %s", v)
}
// Next, plan and apply a destroy operation and reset the hook
h = new(MockHook)
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
if v := h.PreApplyAddr.String(); v != "module.child.aws_instance.foo" {
t.Fatalf("bad: %s", v)
}
}
func TestContext2Apply_destroyNestedModule(t *testing.T) {
m := testModule(t, "apply-destroy-nested-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child", "subchild"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
actual := strings.TrimSpace(state.String())
if actual != "<no state>" {
t.Fatalf("expected no state, got: %s", actual)
}
}
func TestContext2Apply_destroyDeeplyNestedModule(t *testing.T) {
m := testModule(t, "apply-destroy-deeply-nested-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child", "subchild", "subsubchild"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
if !state.Empty() {
t.Fatalf("wrong final state %s\nwant empty state", spew.Sdump(state))
}
}
// https://github.com/hashicorp/terraform/issues/5440
func TestContext2Apply_destroyModuleWithAttrsReferencingResource(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-module-with-attrs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan diags: %s", diags.Err())
} else {
t.Logf("Step 1 plan: %s", legacyDiffComparisonString(p.Changes))
}
var diags tfdiags.Diagnostics
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply errs: %s", diags.Err())
}
t.Logf("Step 1 state: %s", state)
}
h := new(HookRecordApplyOrder)
h.Active = true
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
t.Logf("Step 2 plan: %s", legacyDiffComparisonString(plan.Changes))
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
t.Logf("Step 2 state: %s", state)
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`<no state>`)
if actual != expected {
t.Fatalf("expected:\n\n%s\n\nactual:\n\n%s", expected, actual)
}
}
func TestContext2Apply_destroyWithModuleVariableAndCount(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-mod-var-and-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
h := new(HookRecordApplyOrder)
h.Active = true
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
<no state>
module.child:
`)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_destroyTargetWithModuleVariableAndCount(t *testing.T) {
m := testModule(t, "apply-destroy-mod-var-and-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan err: %s", diags)
}
if len(diags) != 1 {
// Should have one warning that -target is in effect.
t.Fatalf("got %d diagnostics in plan; want 1", len(diags))
}
if got, want := diags[0].Severity(), tfdiags.Warning; got != want {
t.Errorf("wrong diagnostic severity %#v; want %#v", got, want)
}
if got, want := diags[0].Description().Summary, "Resource targeting is in effect"; got != want {
t.Errorf("wrong diagnostic summary %#v; want %#v", got, want)
}
// Destroy, targeting the module explicitly
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags)
}
if len(diags) != 1 {
t.Fatalf("got %d diagnostics; want 1", len(diags))
}
if got, want := diags[0].Severity(), tfdiags.Warning; got != want {
t.Errorf("wrong diagnostic severity %#v; want %#v", got, want)
}
if got, want := diags[0].Description().Summary, "Applied changes may be incomplete"; got != want {
t.Errorf("wrong diagnostic summary %#v; want %#v", got, want)
}
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`<no state>`)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_destroyWithModuleVariableAndCountNested(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-mod-var-and-count-nested")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
h := new(HookRecordApplyOrder)
h.Active = true
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
<no state>
module.child.child2:
`)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_destroyOutputs(t *testing.T) {
m := testModule(t, "apply-destroy-outputs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Next, plan and apply a destroy operation
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) > 0 {
t.Fatalf("expected no resources, got: %#v", mod)
}
// destroying again should produce no errors
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatal(diags.Err())
}
}
func TestContext2Apply_destroyOrphan(t *testing.T) {
m := testModule(t, "apply-error")
p := testProvider("aws")
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.baz": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
if d.Destroy {
return nil, nil
}
result := s.MergeDiff(d)
result.ID = "foo"
return result, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if _, ok := mod.Resources["aws_instance.baz"]; ok {
t.Fatalf("bad: %#v", mod.Resources)
}
}
func TestContext2Apply_destroyTaintedProvisioner(t *testing.T) {
m := testModule(t, "apply-destroy-provisioner")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
called := false
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
called = true
return nil
}
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"id": "bar",
},
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: s,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if called {
t.Fatal("provisioner should not be called")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace("<no state>")
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_error(t *testing.T) {
errored := false
m := testModule(t, "apply-error")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) {
if errored {
state := &InstanceState{
ID: "bar",
}
return state, fmt.Errorf("error")
}
errored = true
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "2",
},
}, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorStr)
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_errorDestroy(t *testing.T) {
m := testModule(t, "empty")
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Optional: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
// Should actually be called for this test, because Terraform Core
// constructs the plan for a destroy operation itself.
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
// The apply (in this case, a destroy) always fails, so we can verify
// that the object stays in the state after a destroy fails even though
// we aren't returning a new state object here.
return providers.ApplyResourceChangeResponse{
Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("failed")),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.BuildState(func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_thing",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
}),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
test_thing.foo:
ID = baz
provider = provider.test
`) // test_thing.foo is still here, even though provider returned no new state along with its error
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_errorCreateInvalidNew(t *testing.T) {
m := testModule(t, "apply-error")
p := testProvider("aws")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
// We're intentionally returning an inconsistent new state here
// because we want to test that Terraform ignores the inconsistency
// when accompanied by another error.
return providers.ApplyResourceChangeResponse{
NewState: cty.ObjectVal(map[string]cty.Value{
"value": cty.StringVal("wrong wrong wrong wrong"),
"foo": cty.StringVal("absolutely brimming over with wrongability"),
}),
Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("forced error")),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
if got, want := len(diags), 1; got != want {
// There should be no additional diagnostics generated by Terraform's own eval logic,
// because the provider's own error supersedes them.
t.Errorf("wrong number of diagnostics %d; want %d\n%s", got, want, diags.Err())
}
if got, want := diags.Err().Error(), "forced error"; !strings.Contains(got, want) {
t.Errorf("returned error does not contain %q, but it should\n%s", want, diags.Err())
}
if got, want := len(state.RootModule().Resources), 2; got != want {
t.Errorf("%d resources in state before prune; should have %d\n%s", got, want, spew.Sdump(state))
}
state.PruneResourceHusks() // aws_instance.bar with no instances gets left behind when we bail out, but that's okay
if got, want := len(state.RootModule().Resources), 1; got != want {
t.Errorf("%d resources in state after prune; should have only one (aws_instance.foo, tainted)\n%s", got, spew.Sdump(state))
}
}
func TestContext2Apply_errorUpdateNullNew(t *testing.T) {
m := testModule(t, "apply-error")
p := testProvider("aws")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
// We're intentionally returning no NewState here because we want to
// test that Terraform retains the prior state, rather than treating
// the returned null as "no state" (object deleted).
return providers.ApplyResourceChangeResponse{
Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("forced error")),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.BuildState(func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"value":"old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
}),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
if got, want := len(diags), 1; got != want {
// There should be no additional diagnostics generated by Terraform's own eval logic,
// because the provider's own error supersedes them.
t.Errorf("wrong number of diagnostics %d; want %d\n%s", got, want, diags.Err())
}
if got, want := diags.Err().Error(), "forced error"; !strings.Contains(got, want) {
t.Errorf("returned error does not contain %q, but it should\n%s", want, diags.Err())
}
state.PruneResourceHusks()
if got, want := len(state.RootModule().Resources), 1; got != want {
t.Fatalf("%d resources in state; should have only one (aws_instance.foo, unmodified)\n%s", got, spew.Sdump(state))
}
is := state.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance))
if is == nil {
t.Fatalf("aws_instance.foo is not in the state after apply")
}
if got, want := is.Current.AttrsJSON, []byte(`"old"`); !bytes.Contains(got, want) {
t.Fatalf("incorrect attributes for aws_instance.foo\ngot: %s\nwant: JSON containing %s\n\n%s", got, want, spew.Sdump(is))
}
}
func TestContext2Apply_errorPartial(t *testing.T) {
errored := false
m := testModule(t, "apply-error")
p := testProvider("aws")
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
if errored {
return s, fmt.Errorf("error")
}
errored = true
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "2",
},
}, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
mod := state.RootModule()
if len(mod.Resources) != 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorPartialStr)
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_hook(t *testing.T) {
m := testModule(t, "apply-good")
h := new(MockHook)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !h.PreApplyCalled {
t.Fatal("should be called")
}
if !h.PostApplyCalled {
t.Fatal("should be called")
}
if !h.PostStateUpdateCalled {
t.Fatalf("should call post state update")
}
}
func TestContext2Apply_hookOrphan(t *testing.T) {
m := testModule(t, "apply-blank")
h := new(MockHook)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !h.PreApplyCalled {
t.Fatal("should be called")
}
if !h.PostApplyCalled {
t.Fatal("should be called")
}
if !h.PostStateUpdateCalled {
t.Fatalf("should call post state update")
}
}
func TestContext2Apply_idAttr(t *testing.T) {
m := testModule(t, "apply-idattr")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
result := s.MergeDiff(d)
result.ID = "foo"
result.Attributes = map[string]string{
"id": "bar",
"num": "42",
}
return result, nil
}
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"num": &ResourceAttrDiff{
New: "42",
},
},
}, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
mod := state.RootModule()
rs, ok := mod.Resources["aws_instance.foo"]
if !ok {
t.Fatal("not in state")
}
var attrs map[string]interface{}
err := json.Unmarshal(rs.Instances[addrs.NoKey].Current.AttrsJSON, &attrs)
if err != nil {
t.Fatal(err)
}
if got, want := attrs["id"], "foo"; got != want {
t.Fatalf("wrong id\ngot: %#v\nwant: %#v", got, want)
}
}
func TestContext2Apply_outputBasic(t *testing.T) {
m := testModule(t, "apply-output")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputAdd(t *testing.T) {
m1 := testModule(t, "apply-output-add-before")
p1 := testProvider("aws")
p1.ApplyFn = testApplyFn
p1.DiffFn = testDiffFn
ctx1 := testContext2(t, &ContextOpts{
Config: m1,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p1),
},
),
})
if _, diags := ctx1.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state1, diags := ctx1.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
m2 := testModule(t, "apply-output-add-after")
p2 := testProvider("aws")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
ctx2 := testContext2(t, &ContextOpts{
Config: m2,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p2),
},
),
State: state1,
})
if _, diags := ctx2.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state2, diags := ctx2.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state2.String())
expected := strings.TrimSpace(testTerraformApplyOutputAddStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputList(t *testing.T) {
m := testModule(t, "apply-output-list")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputListStr)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_outputMulti(t *testing.T) {
m := testModule(t, "apply-output-multi")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputMultiStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputMultiIndex(t *testing.T) {
m := testModule(t, "apply-output-multi-index")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputMultiIndexStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_taintX(t *testing.T) {
m := testModule(t, "apply-taint")
p := testProvider("aws")
// destroyCount tests against regression of
// https://github.com/hashicorp/terraform/issues/1056
var destroyCount = int32(0)
var once sync.Once
simulateProviderDelay := func() {
time.Sleep(10 * time.Millisecond)
}
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
once.Do(simulateProviderDelay)
if d.Destroy {
atomic.AddInt32(&destroyCount, 1)
}
return testApplyFn(info, s, d)
}
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"num": "2",
"type": "aws_instance",
},
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("plan: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyTaintStr)
if actual != expected {
t.Fatalf("bad:\n%s", actual)
}
if destroyCount != 1 {
t.Fatalf("Expected 1 destroy, got %d", destroyCount)
}
}
func TestContext2Apply_taintDep(t *testing.T) {
m := testModule(t, "apply-taint-dep")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"num": "2",
"type": "aws_instance",
},
Tainted: true,
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "baz",
"num": "2",
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("plan: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyTaintDepStr)
if actual != expected {
t.Fatalf("bad:\n%s", actual)
}
}
func TestContext2Apply_taintDepRequiresNew(t *testing.T) {
m := testModule(t, "apply-taint-dep-requires-new")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"num": "2",
"type": "aws_instance",
},
Tainted: true,
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "baz",
"num": "2",
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("plan: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyTaintDepRequireNewStr)
if actual != expected {
t.Fatalf("bad:\n%s", actual)
}
}
func TestContext2Apply_targeted(t *testing.T) {
m := testModule(t, "apply-targeted")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("expected 1 resource, got: %#v", mod.Resources)
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
func TestContext2Apply_targetedCount(t *testing.T) {
m := testModule(t, "apply-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo.0:
ID = foo
provider = provider.aws
aws_instance.foo.1:
ID = foo
provider = provider.aws
aws_instance.foo.2:
ID = foo
provider = provider.aws
`)
}
func TestContext2Apply_targetedCountIndex(t *testing.T) {
m := testModule(t, "apply-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.ResourceInstance(
addrs.ManagedResourceMode, "aws_instance", "foo", addrs.IntKey(1),
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo.1:
ID = foo
provider = provider.aws
`)
}
func TestContext2Apply_targetedDestroy(t *testing.T) {
m := testModule(t, "apply-targeted")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("expected 1 resource, got: %#v", mod.Resources)
}
checkStateString(t, state, `
aws_instance.bar:
ID = i-abc123
provider = provider.aws
`)
}
func TestContext2Apply_destroyProvisionerWithLocals(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-locals")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.ApplyFn = func(_ *InstanceState, rc *ResourceConfig) error {
cmd, ok := rc.Get("command")
if !ok || cmd != "local" {
return fmt.Errorf("provisioner got %v:%s", ok, cmd)
}
return nil
}
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"command": {
Type: cty.String,
Required: true,
},
"when": {
Type: cty.String,
Optional: true,
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "1234"),
},
},
},
}),
Destroy: true,
// the test works without targeting, but this also tests that the local
// node isn't inadvertently pruned because of the wrong evaluation
// order.
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if !pr.ProvisionResourceCalled {
t.Fatal("provisioner not called")
}
}
// this also tests a local value in the config referencing a resource that
// wasn't in the state during destroy.
func TestContext2Apply_destroyProvisionerWithMultipleLocals(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-multiple-locals")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Required: true,
},
"command": {
Type: cty.String,
Required: true,
},
"when": {
Type: cty.String,
Optional: true,
},
},
},
}
pr.ApplyFn = func(is *InstanceState, rc *ResourceConfig) error {
cmd, ok := rc.Get("command")
if !ok {
return errors.New("no command in provisioner")
}
id, ok := rc.Get("id")
if !ok {
return errors.New("no id in provisioner")
}
switch id {
case "1234":
if cmd != "local" {
return fmt.Errorf("provisioner %q got:%q", is.ID, cmd)
}
case "3456":
if cmd != "1234" {
return fmt.Errorf("provisioner %q got:%q", is.ID, cmd)
}
default:
t.Fatal("unknown instance")
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "1234"),
"aws_instance.bar": resourceState("aws_instance", "3456"),
},
},
},
}),
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if !pr.ProvisionResourceCalled {
t.Fatal("provisioner not called")
}
}
func TestContext2Apply_destroyProvisionerWithOutput(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-outputs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.ApplyFn = func(is *InstanceState, rc *ResourceConfig) error {
cmd, ok := rc.Get("command")
if !ok || cmd != "3" {
return fmt.Errorf("provisioner for %s got %v:%s", is.ID, ok, cmd)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "1"),
},
Outputs: map[string]*OutputState{
"value": {
Type: "string",
Value: "3",
},
},
},
&ModuleState{
Path: []string{"root", "mod"},
Resources: map[string]*ResourceState{
"aws_instance.baz": resourceState("aws_instance", "3"),
},
// state needs to be properly initialized
Outputs: map[string]*OutputState{},
},
&ModuleState{
Path: []string{"root", "mod2"},
Resources: map[string]*ResourceState{
"aws_instance.bar": resourceState("aws_instance", "2"),
},
},
},
}),
Destroy: true,
// targeting the source of the value used by all resources should still
// destroy them all.
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("mod", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "baz",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
if !pr.ProvisionResourceCalled {
t.Fatal("provisioner not called")
}
// confirm all outputs were removed too
for _, mod := range state.Modules {
if len(mod.OutputValues) > 0 {
t.Fatalf("output left in module state: %#v\n", mod)
}
}
}
func TestContext2Apply_targetedDestroyCountDeps(t *testing.T) {
m := testModule(t, "apply-destroy-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
}
// https://github.com/hashicorp/terraform/issues/4462
func TestContext2Apply_targetedDestroyModule(t *testing.T) {
m := testModule(t, "apply-targeted-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = i-abc123
provider = provider.aws
aws_instance.foo:
ID = i-bcd345
provider = provider.aws
module.child:
aws_instance.bar:
ID = i-abc123
provider = provider.aws
`)
}
func TestContext2Apply_targetedDestroyCountIndex(t *testing.T) {
m := testModule(t, "apply-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": resourceState("aws_instance", "i-bcd345"),
"aws_instance.foo.1": resourceState("aws_instance", "i-bcd345"),
"aws_instance.foo.2": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar.0": resourceState("aws_instance", "i-abc123"),
"aws_instance.bar.1": resourceState("aws_instance", "i-abc123"),
"aws_instance.bar.2": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.ResourceInstance(
addrs.ManagedResourceMode, "aws_instance", "foo", addrs.IntKey(2),
),
addrs.RootModuleInstance.ResourceInstance(
addrs.ManagedResourceMode, "aws_instance", "bar", addrs.IntKey(1),
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar.0:
ID = i-abc123
provider = provider.aws
aws_instance.bar.2:
ID = i-abc123
provider = provider.aws
aws_instance.foo.0:
ID = i-bcd345
provider = provider.aws
aws_instance.foo.1:
ID = i-bcd345
provider = provider.aws
`)
}
func TestContext2Apply_targetedModule(t *testing.T) {
m := testModule(t, "apply-targeted-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.Module(addrs.RootModuleInstance.Child("child", addrs.NoKey))
if mod == nil {
t.Fatalf("no child module found in the state!\n\n%#v", state)
}
if len(mod.Resources) != 2 {
t.Fatalf("expected 2 resources, got: %#v", mod.Resources)
}
checkStateString(t, state, `
<no state>
module.child:
aws_instance.bar:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
// GH-1858
func TestContext2Apply_targetedModuleDep(t *testing.T) {
m := testModule(t, "apply-targeted-module-dep")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("Diff: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
Dependencies:
module.child.aws_instance.mod
module.child:
aws_instance.mod:
ID = foo
provider = provider.aws
Outputs:
output = foo
`)
}
// GH-10911 untargeted outputs should not be in the graph, and therefore
// not execute.
func TestContext2Apply_targetedModuleUnrelatedOutputs(t *testing.T) {
m := testModule(t, "apply-targeted-module-unrelated-outputs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child2", addrs.NoKey),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
{
Path: []string{"root"},
Outputs: map[string]*OutputState{},
Resources: map[string]*ResourceState{},
},
{
Path: []string{"root", "child1"},
Outputs: map[string]*OutputState{
"instance_id": {
Type: "string",
Value: "foo-bar-baz",
},
},
Resources: map[string]*ResourceState{},
},
{
Path: []string{"root", "child2"},
Outputs: map[string]*OutputState{},
Resources: map[string]*ResourceState{},
},
},
}),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// - module.child1's instance_id output is dropped because we don't preserve
// non-root module outputs between runs (they can be recalculated from config)
// - module.child2's instance_id is updated because its dependency is updated
// - child2_id is updated because if its transitive dependency via module.child2
checkStateString(t, state, `
<no state>
Outputs:
child2_id = foo
module.child2:
aws_instance.foo:
ID = foo
provider = provider.aws
Outputs:
instance_id = foo
`)
}
func TestContext2Apply_targetedModuleResource(t *testing.T) {
m := testModule(t, "apply-targeted-module-resource")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.Module(addrs.RootModuleInstance.Child("child", addrs.NoKey))
if mod == nil || len(mod.Resources) != 1 {
t.Fatalf("expected 1 resource, got: %#v", mod)
}
checkStateString(t, state, `
<no state>
module.child:
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
func TestContext2Apply_targetedResourceOrphanModule(t *testing.T) {
m := testModule(t, "apply-targeted-resource-orphan-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Create a state with an orphan module
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_unknownAttribute(t *testing.T) {
m := testModule(t, "apply-unknown")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if !diags.HasErrors() {
t.Error("should error, because attribute 'unknown' is still unknown after apply")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyUnknownAttrStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_unknownAttributeInterpolate(t *testing.T) {
m := testModule(t, "apply-unknown-interpolate")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags == nil {
t.Fatal("should error")
}
}
func TestContext2Apply_vars(t *testing.T) {
fixture := contextFixtureApplyVars(t)
opts := fixture.ContextOpts()
opts.Variables = InputValues{
"foo": &InputValue{
Value: cty.StringVal("us-east-1"),
SourceType: ValueFromCaller,
},
"test_list": &InputValue{
Value: cty.ListVal([]cty.Value{
cty.StringVal("Hello"),
cty.StringVal("World"),
}),
SourceType: ValueFromCaller,
},
"test_map": &InputValue{
Value: cty.MapVal(map[string]cty.Value{
"Hello": cty.StringVal("World"),
"Foo": cty.StringVal("Bar"),
"Baz": cty.StringVal("Foo"),
}),
SourceType: ValueFromCaller,
},
"amis": &InputValue{
Value: cty.MapVal(map[string]cty.Value{
"us-east-1": cty.StringVal("override"),
}),
SourceType: ValueFromCaller,
},
}
ctx := testContext2(t, opts)
diags := ctx.Validate()
if len(diags) != 0 {
t.Fatalf("bad: %s", diags.ErrWithWarnings())
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(testTerraformApplyVarsStr)
if got != want {
t.Errorf("wrong result\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
func TestContext2Apply_varsEnv(t *testing.T) {
fixture := contextFixtureApplyVarsEnv(t)
opts := fixture.ContextOpts()
opts.Variables = InputValues{
"string": &InputValue{
Value: cty.StringVal("baz"),
SourceType: ValueFromEnvVar,
},
"list": &InputValue{
Value: cty.ListVal([]cty.Value{
cty.StringVal("Hello"),
cty.StringVal("World"),
}),
SourceType: ValueFromEnvVar,
},
"map": &InputValue{
Value: cty.MapVal(map[string]cty.Value{
"Hello": cty.StringVal("World"),
"Foo": cty.StringVal("Bar"),
"Baz": cty.StringVal("Foo"),
}),
SourceType: ValueFromEnvVar,
},
}
ctx := testContext2(t, opts)
diags := ctx.Validate()
if len(diags) != 0 {
t.Fatalf("bad: %s", diags.ErrWithWarnings())
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyVarsEnvStr)
if actual != expected {
t.Errorf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_createBefore_depends(t *testing.T) {
m := testModule(t, "apply-depends-create-before")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","require_new":"ami-old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "lb",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz","instance":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
} else {
t.Logf("plan:\n%s", legacyDiffComparisonString(p.Changes))
}
h.Active = true
state, diags := ctx.Apply()
if diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Logf("state after apply:\n%s", state.String())
t.Fatalf("only %d resources in root module; want at least 2", len(mod.Resources))
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(testTerraformApplyDependsCreateBeforeStr)
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\n\nwant:\n%s", got, want)
}
// Test that things were managed _in the right order_
order := h.States
diffs := h.Diffs
if !order[0].IsNull() || diffs[0].Action == plans.Delete {
t.Fatalf("should create new instance first: %#v", order)
}
if order[1].GetAttr("id").AsString() != "baz" {
t.Fatalf("update must happen after create: %#v", order[1])
}
if order[2].GetAttr("id").AsString() != "bar" || diffs[2].Action != plans.Delete {
t.Fatalf("destroy must happen after update: %#v", order[2])
}
}
func TestContext2Apply_singleDestroy(t *testing.T) {
m := testModule(t, "apply-depends-create-before")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
invokeCount := 0
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
invokeCount++
switch invokeCount {
case 1:
if d.Destroy {
t.Fatalf("should not destroy")
}
if s.ID != "" {
t.Fatalf("should not have ID")
}
case 2:
if d.Destroy {
t.Fatalf("should not destroy")
}
if s.ID != "baz" {
t.Fatalf("should have id")
}
case 3:
if !d.Destroy {
t.Fatalf("should destroy")
}
if s.ID == "" {
t.Fatalf("should have ID")
}
default:
t.Fatalf("bad invoke count %d", invokeCount)
}
return testApplyFn(info, s, d)
}
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","require_new":"ami-old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "lb",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz","instance":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
h.Active = true
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if invokeCount != 3 {
t.Fatalf("bad: %d", invokeCount)
}
}
// GH-7824
func TestContext2Apply_issue7824(t *testing.T) {
p := testProvider("template")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"template_file": {
Attributes: map[string]*configschema.Attribute{
"template": {Type: cty.String, Optional: true},
"__template_requires_new": {Type: cty.Bool, Optional: true},
},
},
},
}
m, snap := testModuleWithSnapshot(t, "issue-7824")
// Apply cleanly step 0
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Write / Read plan to simulate running it through a Plan file
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
}
// This deals with the situation where a splat expression is used referring
// to another resource whose count is non-constant.
func TestContext2Apply_issue5254(t *testing.T) {
// Create a provider. We use "template" here just to match the repro
// we got from the issue itself.
p := testProvider("template")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"template_file": {
Attributes: map[string]*configschema.Attribute{
"template": {Type: cty.String, Optional: true},
"__template_requires_new": {Type: cty.Bool, Optional: true},
"id": {Type: cty.String, Computed: true},
"type": {Type: cty.String, Computed: true},
},
},
},
}
// Apply cleanly step 0
ctx := testContext2(t, &ContextOpts{
Config: testModule(t, "issue-5254/step-0"),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
m, snap := testModuleWithSnapshot(t, "issue-5254/step-1")
// Application success. Now make the modification and store a plan
ctx = testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
),
})
plan, diags = ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Write / Read plan to simulate running it through a Plan file
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
template_file.child:
ID = foo
provider = provider.template
__template_requires_new = true
template = Hi
type = template_file
Dependencies:
template_file.parent
template_file.parent.0:
ID = foo
provider = provider.template
template = Hi
type = template_file
`)
if actual != expected {
t.Fatalf("wrong final state\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_targetedWithTaintedInState(t *testing.T) {
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
m, snap := testModuleWithSnapshot(t, "apply-tainted-targets")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "iambeingadded",
),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.ifailedprovisioners": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "ifailedprovisioners",
Tainted: true,
},
},
},
},
},
}),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Write / Read plan to simulate running it through a Plan file
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
aws_instance.iambeingadded:
ID = foo
provider = provider.aws
aws_instance.ifailedprovisioners: (tainted)
ID = ifailedprovisioners
provider = provider.aws
`)
if actual != expected {
t.Fatalf("expected state: \n%s\ngot: \n%s", expected, actual)
}
}
// Higher level test exposing the bug this covers in
// TestResource_ignoreChangesRequired
func TestContext2Apply_ignoreChangesCreate(t *testing.T) {
m := testModule(t, "apply-ignore-changes-create")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
instanceSchema := p.GetSchemaReturn.ResourceTypes["aws_instance"]
instanceSchema.Attributes["required_field"] = &configschema.Attribute{
Type: cty.String,
Required: true,
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("bad: %s", state)
}
actual := strings.TrimSpace(state.String())
// Expect no changes from original state
expected := strings.TrimSpace(`
aws_instance.foo:
ID = foo
provider = provider.aws
required_field = set
type = aws_instance
`)
if actual != expected {
t.Fatalf("expected:\n%s\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_ignoreChangesWithDep(t *testing.T) {
m := testModule(t, "apply-ignore-changes-dep")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = func(i *InstanceInfo, s *InstanceState, c *ResourceConfig) (*InstanceDiff, error) {
switch i.Type {
case "aws_instance":
newAmi, _ := c.Get("ami")
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"ami": &ResourceAttrDiff{
Old: s.Attributes["ami"],
New: newAmi.(string),
RequiresNew: true,
},
},
}, nil
case "aws_eip":
return testDiffFn(i, s, c)
default:
t.Fatalf("Unexpected type: %s", i.Type)
return nil, nil
}
}
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "i-abc123",
Attributes: map[string]string{
"ami": "ami-abcd1234",
"id": "i-abc123",
},
},
},
"aws_instance.foo.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "i-bcd234",
Attributes: map[string]string{
"ami": "ami-abcd1234",
"id": "i-bcd234",
},
},
},
"aws_eip.foo.0": &ResourceState{
Type: "aws_eip",
Primary: &InstanceState{
ID: "eip-abc123",
Attributes: map[string]string{
"id": "eip-abc123",
"instance": "i-abc123",
},
},
},
"aws_eip.foo.1": &ResourceState{
Type: "aws_eip",
Primary: &InstanceState{
ID: "eip-bcd234",
Attributes: map[string]string{
"id": "eip-bcd234",
"instance": "i-bcd234",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
state, diags := ctx.Apply()
assertNoErrors(t, diags)
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(s.String())
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_ignoreChangesWildcard(t *testing.T) {
m := testModule(t, "apply-ignore-changes-wildcard")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
instanceSchema := p.GetSchemaReturn.ResourceTypes["aws_instance"]
instanceSchema.Attributes["required_field"] = &configschema.Attribute{
Type: cty.String,
Required: true,
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("bad: %s", state)
}
actual := strings.TrimSpace(state.String())
// Expect no changes from original state
expected := strings.TrimSpace(`
aws_instance.foo:
ID = foo
provider = provider.aws
required_field = set
type = aws_instance
`)
if actual != expected {
t.Fatalf("expected:\n%s\ngot:\n%s", expected, actual)
}
}
// https://github.com/hashicorp/terraform/issues/7378
func TestContext2Apply_destroyNestedModuleWithAttrsReferencingResource(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-nested-module-with-attrs")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
}
if !state.Empty() {
t.Fatalf("state after apply: %s\nwant empty state", spew.Sdump(state))
}
}
// If a data source explicitly depends on another resource, it's because we need
// that resource to be applied first.
func TestContext2Apply_dataDependsOn(t *testing.T) {
p := testProvider("null")
m := testModule(t, "apply-data-depends-on")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
// the "provisioner" here writes to this variable, because the intent is to
// create a dependency which can't be viewed through the graph, and depends
// solely on the configuration providing "depends_on"
provisionerOutput := ""
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
// the side effect of the resource being applied
provisionerOutput = "APPLIED"
return testApplyFn(info, s, d)
}
p.DiffFn = testDiffFn
p.ReadDataSourceFn = func(req providers.ReadDataSourceRequest) providers.ReadDataSourceResponse {
return providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("boop"),
"foo": cty.StringVal(provisionerOutput),
}),
}
}
_, diags := ctx.Refresh()
assertNoErrors(t, diags)
_, diags = ctx.Plan()
assertNoErrors(t, diags)
state, diags := ctx.Apply()
assertNoErrors(t, diags)
root := state.Module(addrs.RootModuleInstance)
is := root.ResourceInstance(addrs.Resource{
Mode: addrs.DataResourceMode,
Type: "null_data_source",
Name: "read",
}.Instance(addrs.NoKey))
if is == nil {
t.Fatal("data resource instance is not present in state; should be")
}
var attrs map[string]interface{}
err := json.Unmarshal(is.Current.AttrsJSON, &attrs)
if err != nil {
t.Fatal(err)
}
actual := attrs["foo"]
expected := "APPLIED"
if actual != expected {
t.Fatalf("bad:\n%s", strings.TrimSpace(state.String()))
}
}
func TestContext2Apply_terraformWorkspace(t *testing.T) {
m := testModule(t, "apply-terraform-workspace")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Meta: &ContextMeta{Env: "foo"},
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := state.RootModule().OutputValues["output"]
expected := cty.StringVal("foo")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
// verify that multiple config references only create a single depends_on entry
func TestContext2Apply_multiRef(t *testing.T) {
m := testModule(t, "apply-multi-ref")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
deps := state.Modules[""].Resources["aws_instance.other"].Instances[addrs.NoKey].Current.Dependencies
if len(deps) != 1 || deps[0].String() != "aws_instance.create" {
t.Fatalf("expected 1 depends_on entry for aws_instance.create, got %q", deps)
}
}
func TestContext2Apply_targetedModuleRecursive(t *testing.T) {
m := testModule(t, "apply-targeted-module-recursive")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
mod := state.Module(
addrs.RootModuleInstance.Child("child", addrs.NoKey).Child("subchild", addrs.NoKey),
)
if mod == nil {
t.Fatalf("no subchild module found in the state!\n\n%#v", state)
}
if len(mod.Resources) != 1 {
t.Fatalf("expected 1 resources, got: %#v", mod.Resources)
}
checkStateString(t, state, `
<no state>
module.child.subchild:
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
func TestContext2Apply_localVal(t *testing.T) {
m := testModule(t, "apply-local-val")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("error during plan: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`
<no state>
Outputs:
result_1 = hello
result_3 = hello world
module.child:
<no state>
Outputs:
result = hello
`)
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
}
}
func TestContext2Apply_destroyWithLocals(t *testing.T) {
m := testModule(t, "apply-destroy-with-locals")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = func(info *InstanceInfo, s *InstanceState, c *ResourceConfig) (*InstanceDiff, error) {
d, err := testDiffFn(info, s, c)
return d, err
}
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Outputs: map[string]*OutputState{
"name": &OutputState{
Type: "string",
Value: "test-bar",
},
},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
// FIXME: id should only exist in one place
Attributes: map[string]string{
"id": "foo",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`<no state>`)
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
}
}
func TestContext2Apply_providerWithLocals(t *testing.T) {
m := testModule(t, "provider-with-locals")
p := testProvider("aws")
providerRegion := ""
// this should not be overridden during destroy
p.ConfigureFn = func(c *ResourceConfig) error {
if r, ok := c.Get("region"); ok {
providerRegion = r.(string)
}
return nil
}
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
if state.HasResources() {
t.Fatal("expected no state, got:", state)
}
if providerRegion != "bar" {
t.Fatalf("expected region %q, got: %q", "bar", providerRegion)
}
}
func TestContext2Apply_destroyWithProviders(t *testing.T) {
m := testModule(t, "destroy-module-with-provider")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
},
&ModuleState{
Path: []string{"root", "child"},
},
&ModuleState{
Path: []string{"root", "mod", "removed"},
Resources: map[string]*ResourceState{
"aws_instance.child": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
// this provider doesn't exist
Provider: "provider.aws.baz",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
Destroy: true,
})
// test that we can't destroy if the provider is missing
if _, diags := ctx.Plan(); diags == nil {
t.Fatal("expected plan error, provider.aws.baz doesn't exist")
}
// correct the state
s.Modules["module.mod.module.removed"].Resources["aws_instance.child"].ProviderConfig = addrs.ProviderConfig{
Type: "aws",
Alias: "bar",
}.Absolute(addrs.RootModuleInstance)
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace("<no state>")
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
}
}
func TestContext2Apply_providersFromState(t *testing.T) {
m := configs.NewEmptyConfig()
p := testProvider("aws")
p.DiffFn = testDiffFn
for _, tc := range []struct {
name string
state *states.State
output string
err bool
}{
{
name: "add implicit provider",
state: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
}),
err: false,
output: "<no state>",
},
// an aliased provider must be in the config to remove a resource
{
name: "add aliased provider",
state: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws.bar",
},
},
},
},
}),
err: true,
},
// a provider in a module implies some sort of config, so this isn't
// allowed even without an alias
{
name: "add unaliased module provider",
state: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "module.child.provider.aws",
},
},
},
},
}),
err: true,
},
} {
t.Run(tc.name, func(t *testing.T) {
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: tc.state,
})
_, diags := ctx.Plan()
if tc.err {
if diags == nil {
t.Fatal("expected error")
} else {
return
}
}
if !tc.err && diags.HasErrors() {
t.Fatal(diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, "<no state>")
})
}
}
func TestContext2Apply_plannedInterpolatedCount(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-interpolated-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.test": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: s,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan failed: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providerResolver
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Applying the plan should now succeed
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply failed: %s", diags.Err())
}
}
func TestContext2Apply_plannedDestroyInterpolatedCount(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "plan-destroy-interpolated-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.a.0": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
"aws_instance.a.1": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
},
Outputs: map[string]*OutputState{
"out": {
Type: "list",
Value: []string{"foo", "foo"},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: s,
Destroy: true,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan failed: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providerResolver
ctxOpts.Destroy = true
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Applying the plan should now succeed
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply failed: %s", diags.Err())
}
}
func TestContext2Apply_scaleInMultivarRef(t *testing.T) {
m := testModule(t, "apply-resource-scale-in")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.one": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
"aws_instance.two": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "foo",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: s,
Variables: InputValues{
"instance_count": {
Value: cty.NumberIntVal(0),
SourceType: ValueFromCaller,
},
},
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
// Applying the plan should now succeed
_, diags = ctx.Apply()
assertNoErrors(t, diags)
}
func TestContext2Apply_inconsistentWithPlan(t *testing.T) {
m := testModule(t, "apply-inconsistent-with-plan")
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("before"),
}),
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
return providers.ApplyResourceChangeResponse{
NewState: cty.ObjectVal(map[string]cty.Value{
// This is intentionally incorrect: because id was fixed at "before"
// during plan, it must not change during apply.
"id": cty.StringVal("after"),
}),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if !diags.HasErrors() {
t.Fatalf("apply succeeded; want error")
}
if got, want := diags.Err().Error(), "Provider produced inconsistent result after apply"; !strings.Contains(got, want) {
t.Fatalf("wrong error\ngot: %s\nshould contain: %s", got, want)
}
}
// Issue 19908 was about retaining an existing object in the state when an
// update to it fails and the provider does not return a partially-updated
// value for it. Previously we were incorrectly removing it from the state
// in that case, but instead it should be retained so the update can be
// retried.
func TestContext2Apply_issue19908(t *testing.T) {
m := testModule(t, "apply-issue19908")
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test": {
Attributes: map[string]*configschema.Attribute{
"baz": {Type: cty.String, Required: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
var diags tfdiags.Diagnostics
diags = diags.Append(fmt.Errorf("update failed"))
return providers.ApplyResourceChangeResponse{
Diagnostics: diags,
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.BuildState(func(s *states.SyncState) {
s.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"baz":"old"}`),
Status: states.ObjectReady,
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
}),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if !diags.HasErrors() {
t.Fatalf("apply succeeded; want error")
}
if got, want := diags.Err().Error(), "update failed"; !strings.Contains(got, want) {
t.Fatalf("wrong error\ngot: %s\nshould contain: %s", got, want)
}
mod := state.RootModule()
rs := mod.Resources["test.foo"]
if rs == nil {
t.Fatalf("test.foo not in state after apply, but should be")
}
is := rs.Instances[addrs.NoKey]
if is == nil {
t.Fatalf("test.foo not in state after apply, but should be")
}
obj := is.Current
if obj == nil {
t.Fatalf("test.foo has no current object in state after apply, but should do")
}
if got, want := obj.Status, states.ObjectReady; got != want {
t.Errorf("test.foo has wrong status %s after apply; want %s", got, want)
}
if got, want := obj.AttrsJSON, []byte(`"old"`); !bytes.Contains(got, want) {
t.Errorf("test.foo attributes JSON doesn't contain %s after apply\ngot: %s", want, got)
}
}
func TestContext2Apply_invalidIndexRef(t *testing.T) {
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_instance": {
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true, Computed: true},
},
},
},
}
p.DiffFn = testDiffFn
m := testModule(t, "apply-invalid-index")
c := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
diags := c.Validate()
if diags.HasErrors() {
t.Fatalf("unexpected validation failure: %s", diags.Err())
}
wantErr := `The given key does not identify an element in this collection value`
_, diags = c.Plan()
if !diags.HasErrors() {
t.Fatalf("plan succeeded; want error")
}
gotErr := diags.Err().Error()
if !strings.Contains(gotErr, wantErr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErr, wantErr)
}
}
func TestContext2Apply_moduleReplaceCycle(t *testing.T) {
for _, mode := range []string{"normal", "cbd"} {
var m *configs.Config
switch mode {
case "normal":
m = testModule(t, "apply-module-replace-cycle")
case "cbd":
m = testModule(t, "apply-module-replace-cycle-cbd")
}
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
instanceSchema := &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"require_new": {Type: cty.String, Optional: true},
},
}
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": instanceSchema,
},
}
state := states.NewState()
modA := state.EnsureModule(addrs.RootModuleInstance.Child("a", addrs.NoKey))
modA.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "a",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","require_new":"old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
modB := state.EnsureModule(addrs.RootModuleInstance.Child("b", addrs.NoKey))
modB.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "b",
}.Instance(addrs.IntKey(0)),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"b","require_new":"old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
aBefore, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"require_new": cty.StringVal("old"),
}), instanceSchema.ImpliedType())
aAfter, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"require_new": cty.StringVal("new"),
}), instanceSchema.ImpliedType())
bBefore, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("b"),
"require_new": cty.StringVal("old"),
}), instanceSchema.ImpliedType())
bAfter, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"require_new": cty.UnknownVal(cty.String),
}), instanceSchema.ImpliedType())
var aAction plans.Action
switch mode {
case "normal":
aAction = plans.DeleteThenCreate
case "cbd":
aAction = plans.CreateThenDelete
}
changes := &plans.Changes{
Resources: []*plans.ResourceInstanceChangeSrc{
{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "a",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance.Child("a", addrs.NoKey)),
ProviderAddr: addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
ChangeSrc: plans.ChangeSrc{
Action: aAction,
Before: aBefore,
After: aAfter,
},
},
{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "b",
}.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance.Child("b", addrs.NoKey)),
ProviderAddr: addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
ChangeSrc: plans.ChangeSrc{
Action: plans.DeleteThenCreate,
Before: bBefore,
After: bAfter,
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Changes: changes,
})
t.Run(mode, func(t *testing.T) {
_, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
})
}
}
func TestContext2Apply_destroyDataCycle(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-data-cycle")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "null_resource",
Name: "a",
}.Instance(addrs.IntKey(0)),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a"}`),
},
addrs.ProviderConfig{
Type: "null",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.DataResourceMode,
Type: "null_data_source",
Name: "d",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"data"}`),
},
addrs.ProviderConfig{
Type: "null",
}.Absolute(addrs.RootModuleInstance),
)
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
)
hook := &testHook{}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: state,
Destroy: true,
Hooks: []Hook{hook},
})
plan, diags := ctx.Plan()
diags.HasErrors()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatal(err)
}
ctxOpts.ProviderResolver = providerResolver
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("failed to create context for plan: %s", diags.Err())
}
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
func TestContext2Apply_taintedDestroyFailure(t *testing.T) {
m := testModule(t, "apply-destroy-tainted")
p := testProvider("test")
p.DiffFn = testDiffFn
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
// All destroys fail.
// c will also fail to create, meaning the existing tainted instance
// becomes deposed, ans is then promoted back to current.
// only C has a foo attribute
attr := d.Attributes["foo"]
if d.Destroy || (attr != nil && attr.New == "c") {
return nil, errors.New("failure")
}
return testApplyFn(info, s, d)
}
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "a",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"a","foo":"a"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"b","foo":"b"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"c","foo":"old"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
)
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: state,
Hooks: []Hook{&testHook{}},
})
_, diags := ctx.Plan()
diags.HasErrors()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags = ctx.Apply()
if !diags.HasErrors() {
t.Fatal("expected error")
}
root = state.Module(addrs.RootModuleInstance)
// the instance that failed to destroy should remain tainted
a := root.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "a",
}.Instance(addrs.NoKey))
if a.Current.Status != states.ObjectTainted {
t.Fatal("test_instance.a should be tainted")
}
// b is create_before_destroy, and the destroy failed, so there should be 1
// deposed instance.
b := root.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
}.Instance(addrs.NoKey))
if b.Current.Status != states.ObjectReady {
t.Fatal("test_instance.b should be Ready")
}
if len(b.Deposed) != 1 {
t.Fatal("test_instance.b failed to keep deposed instance")
}
// the desposed c instance should be promoted back to Current, and remain
// tainted
c := root.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
}.Instance(addrs.NoKey))
if c.Current.Status != states.ObjectTainted {
t.Fatal("test_instance.c should be tainted")
}
if len(c.Deposed) != 0 {
t.Fatal("test_instance.c should have no deposed instances")
}
if string(c.Current.AttrsJSON) != `{"id":"c","foo":"old"}` {
t.Fatalf("unexpected attrs for c: %q\n", c.Current.AttrsJSON)
}
}
func TestContext2Apply_cbdCycle(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-cbd-cycle")
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "a",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","require_new":"old","foo":"b"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
},
Module: addrs.RootModuleInstance,
},
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"b","require_new":"old","foo":"c"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"c","require_new":"old"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
)
hook := &testHook{}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: state,
Hooks: []Hook{hook},
})
plan, diags := ctx.Plan()
diags.HasErrors()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatal(err)
}
ctxOpts.ProviderResolver = providerResolver
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("failed to create context for plan: %s", diags.Err())
}
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
| pradeepbhadani/terraform | terraform/context_apply_test.go | GO | mpl-2.0 | 275,129 |
/* Haplo Safe View Templates http://haplo.org
* (c) Haplo Services Ltd 2015 - 2016 http://www.haplo-services.com
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.haplo.template.html;
final class NodeTag extends Node {
private String name;
private String start;
private Attribute attributesHead;
private Node attributeDictionaryValue;
public NodeTag(String name) {
this.name = name;
this.start = "<"+name;
}
public boolean allowedInURLContext() {
return false; // caught by Context.TEXT check as well
}
public String getName() {
return this.name;
}
public void addAttribute(String attributeName, Node value, Context valueContext, boolean tagQuoteMinimisationAllowed) {
if(value instanceof NodeLiteral) {
// Value is just a literal string, so can be optimised
// Literal values should not be escaped, because the author is trusted
String attributeValue = ((NodeLiteral)value).getLiteralString();
if(tagQuoteMinimisationAllowed && canOmitQuotesForValue(attributeValue)) {
this.start += " "+attributeName+"="+attributeValue;
} else {
this.start += " "+attributeName+"=\""+attributeValue+'"';
}
return;
}
Attribute attribute = new Attribute();
attribute.name = attributeName;
attribute.preparedNameEquals = " "+attributeName+"=\"";
attribute.value = value;
// If a URL, and the value is a single NodeValue element, it has to output as a URL path
if((valueContext == Context.URL) && (value instanceof NodeValue)) {
valueContext = Context.URL_PATH;
}
attribute.valueContext = valueContext;
// Add to list
Attribute tail = this.attributesHead;
while(tail != null) {
if(tail.nextAttribute == null) { break; }
tail = tail.nextAttribute;
}
if(tail == null) {
this.attributesHead = attribute;
} else {
tail.nextAttribute = attribute;
}
}
public void setAttributesDictionary(Parser parser, Node value) throws ParseException {
if(this.attributeDictionaryValue != null) {
parser.error("Tag can only have one attribute dictionary");
}
if(!value.nodeRepresentsValueFromView()) {
parser.error("Attribute dictionary for tag must be a value");
}
this.attributeDictionaryValue = value;
}
private boolean canOmitQuotesForValue(CharSequence value) {
int len = value.length();
if(len == 0) { return false; }
for(int i = 0; i < len; ++i) {
char c = value.charAt(i);
if(!(
((c >= 'a') && (c <= 'z')) ||
((c >= 'A') && (c <= 'Z')) ||
((c >= '0' && (c <= '9')))
)) { return false; }
}
return true;
}
private static class Attribute {
public Attribute nextAttribute;
public String name;
public String preparedNameEquals; // " name=\"" for rendering
public Node value;
public Context valueContext;
}
protected Node orSimplifiedNode() {
if(this.attributesHead == null && this.attributeDictionaryValue == null) {
return new NodeLiteral(this.start+">");
}
return this;
}
public void render(StringBuilder builder, Driver driver, Object view, Context context) throws RenderException {
builder.append(this.start);
Attribute attribute = this.attributesHead;
while(attribute != null) {
int attributeStart = builder.length();
builder.append(attribute.preparedNameEquals);
int valueStart = builder.length();
attribute.value.render(builder, driver, view, attribute.valueContext);
// If nothing was rendered, remove the attribute
if(valueStart == builder.length()) {
builder.setLength(attributeStart);
} else {
builder.append('"');
}
attribute = attribute.nextAttribute;
}
if(this.attributeDictionaryValue != null) {
driver.iterateOverValueAsDictionary(this.attributeDictionaryValue.value(driver, view), (key, value) -> {
// Check the key (attribute name) in the dictionary isn't a special attribute or a value which isn't allowed
if(!(HTML.validTagAttributeNameAndNoSpecialHandlingRequired(this.name, key))) {
throw new RenderException(driver, "Bad attribute name for tag attribute dictionary expansion: '"+key+"'");
}
String valueString = driver.valueToStringRepresentation(value);
if((valueString != null) && (valueString.length() > 0)) {
builder.append(' ').
append(key). // safey checked above
append("=\"");
Escape.escape(valueString, builder, Context.ATTRIBUTE_VALUE);
builder.append('"');
}
});
}
builder.append('>');
}
public void dumpToBuilder(StringBuilder builder, String linePrefix) {
builder.append(linePrefix).append("TAG ").append(this.start);
if(this.attributesHead == null) {
builder.append(">\n"); // although this case should be simplified to a literal
} else {
int count = 0;
StringBuilder attributesBuilder = new StringBuilder(256);
Attribute attribute = this.attributesHead;
while(attribute != null) {
count++;
attributesBuilder.append(linePrefix+" ").append(attribute.name).append("\n");
attribute.value.dumpToBuilder(attributesBuilder, linePrefix+" ");
attribute = attribute.nextAttribute;
}
builder.append("> with "+count+" attributes:\n").
append(attributesBuilder);
}
}
protected void interateOverAttributes(AttributeIterator i) {
Attribute attribute = this.attributesHead;
while(attribute != null) {
i.attribute(attribute.name, attribute.value, attribute.valueContext);
attribute = attribute.nextAttribute;
}
}
protected interface AttributeIterator {
void attribute(String name, Node value, Context context);
}
}
| haplo-org/haplo-safe-view-templates | src/main/java/org/haplo/template/html/NodeTag.java | Java | mpl-2.0 | 6,747 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package org.royaldev.royalcommands.rcommands;
import org.bukkit.Material;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.inventory.InventoryCloseEvent;
import org.bukkit.event.inventory.InventoryType;
import org.bukkit.inventory.FurnaceRecipe;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.Recipe;
import org.bukkit.inventory.ShapedRecipe;
import org.bukkit.inventory.ShapelessRecipe;
import org.royaldev.royalcommands.MessageColor;
import org.royaldev.royalcommands.RUtils;
import org.royaldev.royalcommands.RoyalCommands;
import org.royaldev.royalcommands.exceptions.InvalidItemNameException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
// TODO: Use inventory gui (3 rows, with forward/back buttons)
@ReflectCommand
public class CmdUses extends TabCommand {
private final Map<String, Integer> tasks = new HashMap<>();
public CmdUses(final RoyalCommands instance, final String name) {
super(instance, name, true, new Short[]{CompletionType.ITEM_ALIAS.getShort()});
this.plugin.getServer().getPluginManager().registerEvents(new WorkbenchCloseListener(), this.plugin);
}
private void cancelTask(final Player p) {
if (!this.tasks.containsKey(p.getName())) return;
final int taskID = this.tasks.get(p.getName());
if (taskID != -1) this.plugin.getServer().getScheduler().cancelTask(taskID);
this.tasks.remove(p.getName());
}
private boolean containsItemStack(final Collection<? extends ItemStack> collection, final ItemStack b) {
for (final ItemStack a : collection) {
if (this.itemStackEquals(a, b)) return true;
}
return false;
}
private boolean itemStackEquals(final ItemStack a, final ItemStack b) {
return !(a == null || b == null) && a.getType() == b.getType() && (a.getDurability() == -1 || a.getDurability() == Short.MAX_VALUE || a.getDurability() == b.getDurability());
}
private void scheduleUsesTask(final Player p, final ItemStack is) {
final List<Inventory> workbenches = new ArrayList<>();
final Iterator<Recipe> recipeIterator = this.plugin.getServer().recipeIterator();
while (recipeIterator.hasNext()) {
final Recipe r = recipeIterator.next();
final Inventory i;
if (r instanceof ShapedRecipe) {
final ShapedRecipe sr = (ShapedRecipe) r;
if (!this.containsItemStack(sr.getIngredientMap().values(), is)) continue;
i = this.plugin.getServer().createInventory(new UsesHolder(), InventoryType.WORKBENCH);
final Map<Character, ItemStack> im = sr.getIngredientMap();
final String[] lines = sr.getShape();
for (int lineNum = 0; lineNum < lines.length; lineNum++) {
final String line = lines[lineNum];
for (int slot = 1; slot <= 3; slot++) {
if (slot > line.length()) continue;
final ItemStack slotItem = im.get(line.charAt(slot - 1));
if (slotItem == null) continue;
i.setItem(slot + (lineNum * 3), this.syncDurabilities(slotItem, is));
}
}
i.setItem(0, sr.getResult());
} else if (r instanceof ShapelessRecipe) {
final ShapelessRecipe sr = (ShapelessRecipe) r;
if (!this.containsItemStack(sr.getIngredientList(), is)) continue;
i = this.plugin.getServer().createInventory(new UsesHolder(), InventoryType.WORKBENCH);
final List<ItemStack> ingredients = sr.getIngredientList();
for (int slot = 1; slot <= ingredients.size(); slot++) {
if (slot > ingredients.size()) continue;
i.setItem(slot, this.syncDurabilities(ingredients.get(slot - 1), is));
}
i.setItem(0, sr.getResult());
} else if (r instanceof FurnaceRecipe) {
final FurnaceRecipe fr = (FurnaceRecipe) r;
if (!this.itemStackEquals(fr.getInput(), is)) continue;
i = this.plugin.getServer().createInventory(new UsesHolder(), InventoryType.FURNACE);
i.setItem(0, this.syncDurabilities(fr.getInput(), is));
i.setItem(2, fr.getResult());
} else continue;
workbenches.add(i);
}
if (workbenches.size() < 1) {
p.sendMessage(MessageColor.NEGATIVE + "No uses for that item!");
return;
}
final Runnable r = new Runnable() {
private int currentRecipe = 0;
private boolean display = true;
private void setClosing(final boolean closing) {
final InventoryHolder ih = p.getOpenInventory().getTopInventory().getHolder();
if (!(ih instanceof UsesHolder)) return;
final UsesHolder uh = (UsesHolder) ih;
uh.setClosing(closing);
}
@Override
public void run() {
// let's not open new workbenches, as that can cause the items to disappear
if (!this.display) return;
if (!CmdUses.this.tasks.containsKey(p.getName())) return;
if (this.currentRecipe >= workbenches.size()) this.currentRecipe = 0;
this.setClosing(true);
p.openInventory(workbenches.get(this.currentRecipe));
this.setClosing(false);
this.currentRecipe++;
if (workbenches.size() == 1) this.display = false;
}
};
final int taskID = this.plugin.getServer().getScheduler().scheduleSyncRepeatingTask(this.plugin, r, 0L, 30L);
if (taskID == -1) {
p.sendMessage(MessageColor.NEGATIVE + "Could not schedule task!");
return;
}
this.cancelTask(p);
this.tasks.put(p.getName(), taskID);
}
private ItemStack syncDurabilities(final ItemStack base, final ItemStack copyDurability) {
if (base.getType() != copyDurability.getType()) return base;
if (base.getDurability() != -1 && base.getDurability() != Short.MAX_VALUE) return base;
base.setDurability(copyDurability.getDurability());
return base;
}
@Override
protected boolean runCommand(final CommandSender cs, final Command cmd, final String label, final String[] eargs, final CommandArguments ca) {
if (eargs.length < 1) {
cs.sendMessage(cmd.getDescription());
return false;
}
if (!(cs instanceof Player)) {
cs.sendMessage(MessageColor.NEGATIVE + "This command is only available to players!");
return true;
}
final Player p = (Player) cs;
ItemStack is;
if (eargs[0].equalsIgnoreCase("hand")) {
is = p.getItemInHand();
} else {
try {
is = RUtils.getItemFromAlias(eargs[0], 1);
} catch (InvalidItemNameException e) {
is = RUtils.getItem(eargs[0], 1);
} catch (NullPointerException e) {
cs.sendMessage(MessageColor.NEGATIVE + "ItemNameManager was not loaded. Let an administrator know.");
return true;
}
}
if (is == null) {
cs.sendMessage(MessageColor.NEGATIVE + "Invalid item name!");
return true;
}
this.scheduleUsesTask(p, is);
return true;
}
private class WorkbenchCloseListener implements Listener {
@EventHandler(ignoreCancelled = true)
public void workbenchClick(final InventoryClickEvent e) {
if (!(e.getWhoClicked() instanceof Player)) return;
final ItemStack is = e.getCurrentItem();
if (is == null || is.getType() == Material.AIR) return;
final InventoryType it = e.getInventory().getType();
if (it != InventoryType.WORKBENCH && it != InventoryType.FURNACE) return;
if (!(e.getInventory().getHolder() instanceof UsesHolder)) return;
e.setCancelled(true);
if (!(e.getWhoClicked() instanceof Player)) return;
final Player p = (Player) e.getWhoClicked();
CmdUses.this.scheduleUsesTask(p, is);
}
@EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true)
public void workbenchClose(final InventoryCloseEvent e) {
if (!(e.getPlayer() instanceof Player)) return;
final Player p = (Player) e.getPlayer();
final InventoryType it = e.getInventory().getType();
if (it != InventoryType.WORKBENCH && it != InventoryType.FURNACE) return;
if (!CmdUses.this.tasks.containsKey(p.getName())) return;
if (!(e.getInventory().getHolder() instanceof UsesHolder)) return;
final UsesHolder uh = (UsesHolder) e.getInventory().getHolder();
if (uh.isClosing()) return;
CmdUses.this.cancelTask(p);
}
}
private class UsesHolder implements InventoryHolder {
private boolean closing = false;
private boolean isClosing() {
return this.closing;
}
private void setClosing(final boolean closing) {
this.closing = closing;
}
@Override
public Inventory getInventory() {
return null;
}
}
}
| joansmith/RoyalCommands | modules/RoyalCommands/src/main/java/org/royaldev/royalcommands/rcommands/CmdUses.java | Java | mpl-2.0 | 10,138 |
/*----------------------------------------------------------
This Source Code Form is subject to the terms of the
Mozilla Public License, v.2.0. If a copy of the MPL
was not distributed with this file, You can obtain one
at http://mozilla.org/MPL/2.0/.
----------------------------------------------------------*/
using ScriptEngine.Machine;
using ScriptEngine.Machine.Contexts;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace ScriptEngine.HostedScript.Library
{
[GlobalContext(Category = "Работа с переменными окружения")]
public class EnvironmentVariablesImpl : GlobalContextBase<EnvironmentVariablesImpl>
{
/// <summary>
/// Возвращает соответствие переменных среды. Ключом является имя переменной, а значением - значение переменной
/// </summary>
/// <param name="target">Расположение переменной среды</param>
/// <example>
/// Для Каждого Переменная Из ПеременныеСреды() Цикл
/// Сообщить(Переменная.Ключ + " = " + Переменная.Значение);
/// КонецЦикла;
/// </example>
/// <returns>Соответствие</returns>
[ContextMethod("ПеременныеСреды", "EnvironmentVariables")]
public MapImpl EnvironmentVariables(EnvironmentVariableTargetEnum target = EnvironmentVariableTargetEnum.Process)
{
EnvironmentVariableTarget targetParam = GetSystemEnvVariableTarget(target);
var varsMap = new MapImpl();
var allVars = System.Environment.GetEnvironmentVariables(targetParam);
foreach (DictionaryEntry item in allVars)
{
varsMap.Insert(
ValueFactory.Create((string)item.Key),
ValueFactory.Create((string)item.Value));
}
return varsMap;
}
/// <summary>
/// Позволяет установить переменную среды.
/// По умолчанию переменная устанавливается в области видимости процесса и очищается после его завершения.
/// </summary>
/// <param name="varName">Имя переменной</param>
/// <param name="value">Значение переменной</param>
/// <param name="target">Расположение переменной среды</param>
[ContextMethod("УстановитьПеременнуюСреды", "SetEnvironmentVariable")]
public void SetEnvironmentVariable(string varName, string value, EnvironmentVariableTargetEnum target = EnvironmentVariableTargetEnum.Process)
{
EnvironmentVariableTarget targetParam = GetSystemEnvVariableTarget(target);
System.Environment.SetEnvironmentVariable(varName, value, targetParam);
}
/// <summary>
/// Получить значение переменной среды.
/// </summary>
/// <param name="varName">Имя переменной</param>
/// <param name="target">Расположение переменной среды</param>
/// <returns>Строка. Значение переменной</returns>
[ContextMethod("ПолучитьПеременнуюСреды", "GetEnvironmentVariable")]
public IValue GetEnvironmentVariable(string varName, EnvironmentVariableTargetEnum target = EnvironmentVariableTargetEnum.Process)
{
EnvironmentVariableTarget targetParam = GetSystemEnvVariableTarget(target);
string value = System.Environment.GetEnvironmentVariable(varName, targetParam);
if (value == null)
return ValueFactory.Create();
else
return ValueFactory.Create(value);
}
public static IAttachableContext CreateInstance()
{
return new EnvironmentVariablesImpl();
}
private static EnvironmentVariableTarget GetSystemEnvVariableTarget(EnvironmentVariableTargetEnum target)
{
EnvironmentVariableTarget targetParam = EnvironmentVariableTarget.Process;
switch (target)
{
case EnvironmentVariableTargetEnum.Process:
targetParam = EnvironmentVariableTarget.Process;
break;
case EnvironmentVariableTargetEnum.User:
targetParam = EnvironmentVariableTarget.User;
break;
case EnvironmentVariableTargetEnum.Machine:
targetParam = EnvironmentVariableTarget.Machine;
break;
}
return targetParam;
}
}
}
| Faithfinder/OneScript | src/ScriptEngine.HostedScript/Library/EnvironmentVariablesImpl.cs | C# | mpl-2.0 | 5,140 |
import React from 'react';
import { OverlayTrigger, Tooltip, NavItem, Glyphicon } from 'react-bootstrap';
// This authenticates to Auth0 by opening a new Window where Auth0 will do its
// thing, then closing that window when login is complete.
export default class Auth0LoginMenuItem extends React.PureComponent {
static handleSelect() {
const loginView = new URL('/login', window.location);
window.open(loginView, '_blank');
}
render() {
const tooltip = (
<Tooltip id="auth0-signin">
Sign in with the LDAP account you use to push to version control, or
with email if you do not have version control access.
</Tooltip>
);
return (
<OverlayTrigger placement="bottom" delay={600} overlay={tooltip}>
<NavItem onSelect={Auth0LoginMenuItem.handleSelect}>
<Glyphicon glyph="log-in" /> Sign In
</NavItem>
</OverlayTrigger>
);
}
}
| lundjordan/services | src/shipit/frontend/src/components/auth/Auth0LoginMenuItem.js | JavaScript | mpl-2.0 | 925 |
/*
PICCANTE
The hottest HDR imaging library!
http://vcg.isti.cnr.it/piccante
Copyright (C) 2014
Visual Computing Laboratory - ISTI CNR
http://vcg.isti.cnr.it
First author: Francesco Banterle
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#ifndef PIC_FILTERING_FILTER_LOG_2D_HPP
#define PIC_FILTERING_FILTER_LOG_2D_HPP
#include "../filtering/filter_diff_gauss_2d.hpp"
namespace pic {
/**
* @brief The FilterLoG2D class
*/
class FilterLoG2D: public FilterDiffGauss
{
public:
float sigma;
/**
* @brief FilterLoG2D
* @param sigma
*/
FilterLoG2D(float sigma) : FilterDiffGauss(sigma * sqrtf(2.0f), sigma / sqrtf(2.0f))
{
this->sigma = sigma;
}
/**
* @brief execute
* @param imgIn
* @param imgOut
* @param sigma_1
* @param sigma_2
* @return
*/
static Image *execute(Image *imgIn, Image *imgOut, float sigma)
{
FilterLoG2D filter(sigma);
return filter.Process(Single(imgIn), imgOut);
}
};
} // end namespace pic
#endif /* PIC_FILTERING_FILTER_LOG_2D_HPP */
| banterle/piccante | include/filtering/filter_log_2d.hpp | C++ | mpl-2.0 | 1,225 |
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/*
* This file is part of the libvisio project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#include "VSDStencils.h"
#include "libvisio_utils.h"
libvisio::VSDShape::VSDShape()
: m_geometries(), m_shapeList(), m_fields(), m_foreign(0), m_parent(0), m_masterPage(MINUS_ONE),
m_masterShape(MINUS_ONE), m_shapeId(MINUS_ONE), m_lineStyleId(MINUS_ONE), m_fillStyleId(MINUS_ONE),
m_textStyleId(MINUS_ONE), m_lineStyle(), m_fillStyle(), m_textBlockStyle(), m_charStyle(),
m_themeRef(), m_charList(), m_paraStyle(), m_paraList(), m_text(), m_names(),
m_textFormat(libvisio::VSD_TEXT_UTF16), m_nurbsData(), m_polylineData(), m_xform(), m_txtxform(0),
m_misc()
{
}
libvisio::VSDShape::VSDShape(const libvisio::VSDShape &shape)
: m_geometries(shape.m_geometries), m_shapeList(shape.m_shapeList), m_fields(shape.m_fields),
m_foreign(shape.m_foreign ? new ForeignData(*(shape.m_foreign)) : 0), m_parent(shape.m_parent),
m_masterPage(shape.m_masterPage), m_masterShape(shape.m_masterShape), m_shapeId(shape.m_shapeId),
m_lineStyleId(shape.m_lineStyleId), m_fillStyleId(shape.m_fillStyleId), m_textStyleId(shape.m_textStyleId),
m_lineStyle(shape.m_lineStyle), m_fillStyle(shape.m_fillStyle), m_textBlockStyle(shape.m_textBlockStyle),
m_charStyle(shape.m_charStyle), m_themeRef(shape.m_themeRef), m_charList(shape.m_charList),
m_paraStyle(shape.m_paraStyle), m_paraList(shape.m_paraList), m_text(shape.m_text), m_names(shape.m_names),
m_textFormat(shape.m_textFormat), m_nurbsData(shape.m_nurbsData), m_polylineData(shape.m_polylineData),
m_xform(shape.m_xform), m_txtxform(shape.m_txtxform ? new XForm(*(shape.m_txtxform)) : 0), m_misc(shape.m_misc)
{
}
libvisio::VSDShape::~VSDShape()
{
clear();
}
libvisio::VSDShape &libvisio::VSDShape::operator=(const libvisio::VSDShape &shape)
{
if (this != &shape)
{
m_geometries = shape.m_geometries;
m_shapeList = shape.m_shapeList;
m_fields = shape.m_fields;
if (m_foreign)
delete m_foreign;
m_foreign = shape.m_foreign ? new ForeignData(*(shape.m_foreign)) : 0;
m_parent = shape.m_parent;
m_masterPage = shape.m_masterPage;
m_masterShape = shape.m_masterShape;
m_shapeId = shape.m_shapeId;
m_lineStyleId = shape.m_lineStyleId;
m_fillStyleId = shape.m_fillStyleId;
m_textStyleId = shape.m_textStyleId;
m_lineStyle = shape.m_lineStyle;
m_fillStyle = shape.m_fillStyle;
m_textBlockStyle = shape.m_textBlockStyle;
m_charStyle = shape.m_charStyle;
m_themeRef = shape.m_themeRef;
m_charList = shape.m_charList;
m_paraStyle = shape.m_paraStyle;
m_paraList = shape.m_paraList;
m_text = shape.m_text;
m_names = shape.m_names;
m_textFormat = shape.m_textFormat;
m_nurbsData = shape.m_nurbsData;
m_polylineData = shape.m_polylineData;
m_xform = shape.m_xform;
if (m_txtxform)
delete m_txtxform;
m_txtxform = shape.m_txtxform ? new XForm(*(shape.m_txtxform)) : 0;
m_misc = shape.m_misc;
}
return *this;
}
void libvisio::VSDShape::clear()
{
if (m_foreign)
delete m_foreign;
m_foreign = 0;
if (m_txtxform)
delete m_txtxform;
m_txtxform = 0;
m_geometries.clear();
m_shapeList.clear();
m_fields.clear();
m_lineStyle = VSDOptionalLineStyle();
m_fillStyle = VSDOptionalFillStyle();
m_textBlockStyle = VSDOptionalTextBlockStyle();
m_charStyle = VSDOptionalCharStyle();
m_themeRef = VSDOptionalThemeReference();
m_charList.clear();
m_paraStyle = VSDOptionalParaStyle();
m_paraList.clear();
m_text.clear();
m_names.clear();
m_nurbsData.clear();
m_polylineData.clear();
m_xform = XForm();
m_parent = 0;
m_masterPage = MINUS_ONE;
m_masterShape = MINUS_ONE;
m_shapeId = MINUS_ONE;
m_lineStyleId = MINUS_ONE;
m_fillStyleId = MINUS_ONE;
m_textStyleId = MINUS_ONE;
m_textFormat = libvisio::VSD_TEXT_UTF16;
m_misc = VSDMisc();
}
libvisio::VSDStencil::VSDStencil()
: m_shapes(), m_shadowOffsetX(0.0), m_shadowOffsetY(0.0), m_firstShapeId(MINUS_ONE)
{
}
libvisio::VSDStencil::VSDStencil(const libvisio::VSDStencil &stencil)
: m_shapes(stencil.m_shapes), m_shadowOffsetX(stencil.m_shadowOffsetX),
m_shadowOffsetY(stencil.m_shadowOffsetY), m_firstShapeId(stencil.m_firstShapeId)
{
}
libvisio::VSDStencil::~VSDStencil()
{
}
libvisio::VSDStencil &libvisio::VSDStencil::operator=(const libvisio::VSDStencil &stencil)
{
if (this != &stencil)
{
m_shapes = stencil.m_shapes;
m_shadowOffsetX = stencil.m_shadowOffsetX;
m_shadowOffsetY = stencil.m_shadowOffsetY;
m_firstShapeId = stencil.m_firstShapeId;
}
return *this;
}
void libvisio::VSDStencil::addStencilShape(unsigned id, const VSDShape &shape)
{
m_shapes[id] = shape;
}
void libvisio::VSDStencil::setFirstShape(unsigned id)
{
if (m_firstShapeId == MINUS_ONE)
m_firstShapeId = id;
}
const libvisio::VSDShape *libvisio::VSDStencil::getStencilShape(unsigned id) const
{
std::map<unsigned, VSDShape>::const_iterator iter = m_shapes.find(id);
if (iter != m_shapes.end())
return &(iter->second);
else
return 0;
}
libvisio::VSDStencils::VSDStencils() :
m_stencils()
{
}
libvisio::VSDStencils::~VSDStencils()
{
}
void libvisio::VSDStencils::addStencil(unsigned idx, const libvisio::VSDStencil &stencil)
{
m_stencils[idx] = stencil;
}
const libvisio::VSDStencil *libvisio::VSDStencils::getStencil(unsigned idx) const
{
std::map<unsigned, VSDStencil>::const_iterator iter = m_stencils.find(idx);
if (iter != m_stencils.end())
return &(iter->second);
else
return 0;
}
const libvisio::VSDShape *libvisio::VSDStencils::getStencilShape(unsigned pageId, unsigned shapeId) const
{
if (MINUS_ONE == pageId)
return 0;
const libvisio::VSDStencil *tmpStencil = getStencil(pageId);
if (!tmpStencil)
return 0;
if (MINUS_ONE == shapeId)
shapeId = tmpStencil->m_firstShapeId;
return tmpStencil->getStencilShape(shapeId);
}
/* vim:set shiftwidth=2 softtabstop=2 expandtab: */
| sdteffen/libvisio | src/lib/VSDStencils.cpp | C++ | mpl-2.0 | 6,217 |
//// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
//// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
//// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
//// PARTICULAR PURPOSE.
////
//// Copyright (c) Microsoft Corporation. All rights reserved
(function () {
"use strict";
var page = WinJS.UI.Pages.define("/html/scenario9_imageProtocols.html", {
ready: function (element, options) {
document.getElementById("imageProtocolSelector").addEventListener("change", imageProtocolSelector, false);
document.getElementById("openPicker").addEventListener("click", openPicker, false);
document.getElementById("sendTileNotification").addEventListener("click", sendTileNotification, false);
document.getElementById("imageProtocolSelector").selectedIndex = 0;
}
});
function imageProtocolSelector() {
var protocol = document.getElementById("imageProtocolSelector").selectedIndex;
if (protocol === 0) {
document.getElementById("appdataURLDiv").style.display = "none";
document.getElementById("httpURLDiv").style.display = "none";
} else if (protocol === 1) {
document.getElementById("appdataURLDiv").style.display = "block";
document.getElementById("httpURLDiv").style.display = "none";
} else if (protocol === 2) {
document.getElementById("appdataURLDiv").style.display = "none";
document.getElementById("httpURLDiv").style.display = "block";
}
}
var imageRelativePath;
function openPicker() {
var picker = new Windows.Storage.Pickers.FileOpenPicker();
picker.fileTypeFilter.replaceAll([".jpg", ".jpeg", ".png", ".gif"]);
picker.commitButtonText = "Copy";
picker.pickSingleFileAsync().then(function (file) {
return file.copyAsync(Windows.Storage.ApplicationData.current.localFolder, file.name, Windows.Storage.NameCollisionOption.generateUniqueName);
}).done(function (newFile) {
var imageAbsolutePath = newFile.path;
//change image to relative path
imageRelativePath = imageAbsolutePath.substring(imageAbsolutePath.lastIndexOf("\\") + 1);
document.getElementById("notificationXmlContent").innerText = "";
WinJS.log && WinJS.log("Image copied to application data local storage: " + newFile.path, "sample", "status");
}, function (e) {
document.getElementById("notificationXmlContent").innerText = "";
WinJS.log && WinJS.log(e, "sample", "error");
});
}
function sendTileNotification() {
var protocol = document.getElementById("imageProtocolSelector").selectedIndex;
var wide310x150TileContent;
if (protocol === 0) { //using the ms-appx: protocol
wide310x150TileContent = NotificationsExtensions.TileContent.TileContentFactory.createTileWide310x150ImageAndText01();
wide310x150TileContent.textCaptionWrap.text = "The image is in the appx package";
wide310x150TileContent.image.src = "ms-appx:///images/redWide310x150.png";
} else if (protocol === 1) { //using the ms-appdata:/// protocol
wide310x150TileContent = NotificationsExtensions.TileContent.TileContentFactory.createTileWide310x150Image();
wide310x150TileContent.image.src = "ms-appdata:///local/" + imageRelativePath; // make sure you are providing a relative path!
} else if (protocol === 2) { //using http:// protocol
// Important - The Internet (Client) capability must be checked in the manifest in the Capabilities tab
wide310x150TileContent = NotificationsExtensions.TileContent.TileContentFactory.createTileWide310x150PeekImageCollection04();
wide310x150TileContent.textBodyWrap.text = "The baseUri is " + document.getElementById("baseUri").value;
wide310x150TileContent.imageMain.src = document.getElementById("image" + 0).value;
wide310x150TileContent.imageSmallColumn1Row1.src = document.getElementById("image" + 1).value;
wide310x150TileContent.imageSmallColumn1Row2.src = document.getElementById("image" + 2).value;
wide310x150TileContent.imageSmallColumn2Row1.src = document.getElementById("image" + 3).value;
wide310x150TileContent.imageSmallColumn2Row2.src = document.getElementById("image" + 4).value;
// set the baseUri
try {
wide310x150TileContent.baseUri = document.getElementById("baseUri").value;
} catch (e) {
document.getElementById("notificationXmlContent").innerText = "";
WinJS.log && WinJS.log(e.message, "sample", "error");
return;
}
}
wide310x150TileContent.requireSquare150x150Content = false;
Windows.UI.Notifications.TileUpdateManager.createTileUpdaterForApplication().update(wide310x150TileContent.createNotification());
document.getElementById("notificationXmlContent").innerText = wide310x150TileContent.getContent();
WinJS.log && WinJS.log("Tile notification sent", "sample", "status");
}
})(); | SoftwareFactoryUPC/ProjectTemplates | Mobile/Windows Phone/Ejemplos Windows Phone 8.1/App tiles and badges sample/Shared/js/scenario9_imageProtocols.js | JavaScript | mpl-2.0 | 5,266 |
import React from 'react'
import { expect } from 'chai'
import { shallow } from 'enzyme'
import { AccountApp } from '../../app/js/account/AccountApp'
describe('AccountApp', () => {
let props
let wrapper
before(() => {
props = {
children: {},
storageConnected: true,
location: {
pathname: '/not-account'
}
}
wrapper = shallow(<AccountApp {...props} />)
})
it('renders the NavBar', () => {
expect(wrapper.find('Navbar').length).to.equal(1)
})
it('renders the SecondaryNavBar', () => {
expect(wrapper.find('SecondaryNavBar').length).to.equal(1)
})
})
| blockstack/blockstack-portal | test/account/AccountApp.test.js | JavaScript | mpl-2.0 | 621 |
import DS from 'ember-data';
import Ember from 'ember';
const { decamelize } = Ember.String;
export default DS.RESTSerializer.extend({
primaryKey: 'name',
keyForAttribute: function(attr) {
return decamelize(attr);
},
normalizeSecrets(payload) {
if (payload.data.keys && Array.isArray(payload.data.keys)) {
const secrets = payload.data.keys.map(secret => ({ name: secret }));
return secrets;
}
Ember.assign(payload, payload.data);
delete payload.data;
return [payload];
},
normalizeResponse(store, primaryModelClass, payload, id, requestType) {
const nullResponses = ['updateRecord', 'createRecord', 'deleteRecord'];
const secrets = nullResponses.includes(requestType) ? { name: id } : this.normalizeSecrets(payload);
const { modelName } = primaryModelClass;
let transformedPayload = { [modelName]: secrets };
// just return the single object because ember is picky
if (requestType === 'queryRecord') {
transformedPayload = { [modelName]: secrets[0] };
}
return this._super(store, primaryModelClass, transformedPayload, id, requestType);
},
serialize(snapshot, requestType) {
if (requestType === 'update') {
const min_decryption_version = snapshot.attr('minDecryptionVersion');
const min_encryption_version = snapshot.attr('minEncryptionVersion');
const deletion_allowed = snapshot.attr('deletionAllowed');
return {
min_decryption_version,
min_encryption_version,
deletion_allowed,
};
} else {
return this._super(...arguments);
}
},
});
| Aloomaio/vault | ui/app/serializers/transit-key.js | JavaScript | mpl-2.0 | 1,606 |
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
using System;
using System.Collections.Generic;
namespace Google.Protobuf.Reflection
{
/// <summary>
/// Descriptor for an enum type in a .proto file.
/// </summary>
public sealed class EnumDescriptor : DescriptorBase
{
private readonly EnumDescriptorProto proto;
private readonly MessageDescriptor containingType;
private readonly IList<EnumValueDescriptor> values;
private readonly Type generatedType;
internal EnumDescriptor(EnumDescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int index, Type generatedType)
: base(file, file.ComputeFullName(parent, proto.Name), index)
{
this.proto = proto;
this.generatedType = generatedType;
containingType = parent;
if (proto.Value.Count == 0)
{
// We cannot allow enums with no values because this would mean there
// would be no valid default value for fields of this type.
throw new DescriptorValidationException(this, "Enums must contain at least one value.");
}
values = DescriptorUtil.ConvertAndMakeReadOnly(proto.Value,
(value, i) => new EnumValueDescriptor(value, file, this, i));
File.DescriptorPool.AddSymbol(this);
}
internal EnumDescriptorProto Proto { get { return proto; } }
/// <summary>
/// The brief name of the descriptor's target.
/// </summary>
public override string Name { get { return proto.Name; } }
/// <summary>
/// The generated type for this enum, or <c>null</c> if the descriptor does not represent a generated type.
/// </summary>
public Type GeneratedType { get { return generatedType; } }
/// <value>
/// If this is a nested type, get the outer descriptor, otherwise null.
/// </value>
public MessageDescriptor ContainingType
{
get { return containingType; }
}
/// <value>
/// An unmodifiable list of defined value descriptors for this enum.
/// </value>
public IList<EnumValueDescriptor> Values
{
get { return values; }
}
/// <summary>
/// Finds an enum value by number. If multiple enum values have the
/// same number, this returns the first defined value with that number.
/// If there is no value for the given number, this returns <c>null</c>.
/// </summary>
public EnumValueDescriptor FindValueByNumber(int number)
{
return File.DescriptorPool.FindEnumValueByNumber(this, number);
}
/// <summary>
/// Finds an enum value by name.
/// </summary>
/// <param name="name">The unqualified name of the value (e.g. "FOO").</param>
/// <returns>The value's descriptor, or null if not found.</returns>
public EnumValueDescriptor FindValueByName(string name)
{
return File.DescriptorPool.FindSymbol<EnumValueDescriptor>(FullName + "." + name);
}
}
} | ouyangpeter/WinFormFileSystem_Client | Google.Protobuf/Reflection/EnumDescriptor.cs | C# | mpl-2.0 | 4,899 |
;
; *WARNING*
;
; DO NOT PUT THIS FILE IN YOUR WEBROOT DIRECTORY.
;
; *WARNING*
;
; Anyone can view your database password if you do!
;
debug = FALSE
;
;Database
;
db_type = "mysql"
db_host = "localhost"
db_user = "root"
db_password = ""
db_name = "openevsys"
db_table_prefix = "gacl_"
;
;Caching
;
caching = FALSE
force_cache_expire = TRUE
cache_dir = "/tmp/phpgacl_cache"
cache_expire_time = 600
;
;Admin interface
;
items_per_page = 100
max_select_box_items = 100
max_search_return_items = 200
;NO Trailing slashes
smarty_dir = "smarty/libs"
smarty_template_dir = "templates"
smarty_compile_dir = "templates_c"
| huridocs/OpenEvSys | 3rd/phpgacl/gacl.ini.php | PHP | agpl-3.0 | 644 |
package org.ow2.proactive.resourcemanager.nodesource.infrastructure;/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2011 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.objectweb.proactive.core.ProActiveException;
import com.xerox.amazonws.ec2.EC2Exception;
import com.xerox.amazonws.ec2.ImageDescription;
import com.xerox.amazonws.ec2.InstanceType;
import com.xerox.amazonws.ec2.Jec2;
import com.xerox.amazonws.ec2.ReservationDescription;
import com.xerox.amazonws.ec2.ReservationDescription.Instance;
import org.apache.log4j.Logger;
/**
*
* Amazon EC2 Node deployer backend
* <p>
* Contains a Java wrapper for EC2 operations ; Requires proper Amazon credentials
*
*
* @author The ProActive Team
* @since ProActive Scheduling 1.0
*
*/
public class EC2Deployer implements java.io.Serializable {
/** logger */
protected static Logger logger = Logger.getLogger(EC2Deployer.class);
/** Access Key */
private String AWS_AKEY;
/** Secret Key */
private String AWS_SKEY;
/** Amazon username */
private String AWS_USER;
/** KeyPair container */
private String AWS_KPINFO;
/** KeyPair name */
private String keyName;
/** Deployed instances */
private List<String> instanceIds;
/** Activity checker */
private boolean active;
/** Minimum instances to deploy */
private int minInstances;
/** Maximum instances to deploy */
private int maxInstances;
/** Current number of deployed instances */
private int currentInstances;
/** instance type: smaller is cheaper; bigger is faster;
* x86_64 AMIs requires extra large, or will fail to deploy */
private InstanceType instanceType;
/**
* Once an image descriptor is retrieved, cache it
*/
private Map<String, ImageDescription> cachedImageDescriptors =
Collections.synchronizedMap(new HashMap<String, ImageDescription>());
/**
* EC2 server URL - the EC2 zone used depends on this url
* Leave null for ec2 default behavior
*/
private String ec2RegionHost = null;
public String getEc2RegionHost() {
return ec2RegionHost;
}
public void setEc2RegionHost(String ec2ServerURL) {
this.ec2RegionHost = ec2ServerURL;
}
/**
* Constructs a new node deployer for Amazon EC2
*/
public EC2Deployer() {
this.instanceIds = new ArrayList<>();
this.active = false;
this.minInstances = 1;
this.maxInstances = 1;
this.instanceType = InstanceType.DEFAULT;
}
/**
* Constructs a new node deployer/killer for Amazon EC2
*
* @param aws_accesskey
* Amazon access key
* @param aws_secretkey
* Amazon secret key
* @param aws_user
* Amazon user name
*/
public EC2Deployer(String aws_accesskey, String aws_secretkey, String aws_user) {
this();
this.resetKeys(aws_accesskey, aws_secretkey, aws_user);
}
/**
* Reset amazon deployment
*
* @param aws_accesskey
* Amazon access key
* @param aws_secretkey
* Amazon secret key
* @param aws_user
* Amazon user name
* @return a Java EC2 Wrapper with the new credentials
*/
public Jec2 resetKeys(String aws_accesskey, String aws_secretkey, String aws_user) {
Jec2 EC2Requester;
this.AWS_AKEY = aws_accesskey;
this.AWS_SKEY = aws_secretkey;
this.AWS_USER = aws_user;
EC2Requester = new Jec2(this.AWS_AKEY, this.AWS_SKEY);
keyName = AWS_USER + "-" + AWS_AKEY.charAt(0) + AWS_SKEY.charAt(0);
try {
// if (terminateAllInstances(true)) {
// EC2Requester.deleteKeyPair(keyName);
this.AWS_KPINFO = EC2Requester.createKeyPair(keyName).getKeyName();
// }
} catch (EC2Exception e) {
// this should happen frequently,
// as keys can't be generated more than once without being deleted,
logger.warn("Can't regen keypair ", e);
}
this.active = true;
return EC2Requester;
}
private Jec2 getEC2Wrapper() {
Jec2 jec2 = resetKeys(this.AWS_AKEY, this.AWS_SKEY, this.AWS_USER);
if (ec2RegionHost != null) {
jec2.setRegionUrl(ec2RegionHost);
}
return jec2;
}
/**
* Retrieves all available images on AmazonS3
*
* @param all
* if true Get all AMI, if false, get only user's AMI
* @return User's or All AMI from Amazon S3
*/
public List<ImageDescription> getAvailableImages(boolean all) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return null;
List<String> params = new ArrayList<>();
if (!all)
params.add(AWS_USER);
List<ImageDescription> images = null;
try {
images = ec2req.describeImagesByOwner(params);
} catch (EC2Exception e) {
logger.error("Unable to get image description", e);
}
return images;
}
/**
* Retrieves all available images on AmazonS3
*
* @param amiId
* an unique AMI id
* @param all
* if true Get all AMI, if false, get only user's AMI
* @return first AMI from Amazon S3 corresponding to pattern
*/
public ImageDescription getAvailableImages(String amiId, boolean all) {
synchronized (cachedImageDescriptors) {
if (cachedImageDescriptors.containsKey(amiId))
return cachedImageDescriptors.get(amiId);
}
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return null;
List<ImageDescription> imgs = this.getAvailableImages(all);
for (ImageDescription img : imgs) {
if (img.getImageId().equals(amiId)) {
//cache it
cachedImageDescriptors.put(amiId, img);
return img;
}
}
logger.error("Could nod find AMI: " + amiId);
return null;
}
/**
* Gets a set of instances
*
* @return a set of instances
*/
public List<Instance> getInstances() {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return null;
List<String> params = new ArrayList<>();
List<ReservationDescription> res = null;
List<Instance> instances = new ArrayList<>();
try {
res = ec2req.describeInstances(params);
} catch (EC2Exception e) {
logger.error("Unable to get instances list", e);
return null;
}
for (ReservationDescription rdesc : res) {
instances.addAll(rdesc.getInstances());
}
return instances;
}
/**
* Returns the hostname of a running instance
* If the instance is not running, will return an empty string
*
* @param id the unique id of the instance
* @return the hostname of the running instance corresponding to the id,
* or an empty string
*/
public String getInstanceHostname(String id) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return "";
try {
for (ReservationDescription desc : ec2req.describeInstances(new String[] {})) {
for (Instance inst : desc.getInstances()) {
if (id.equals(inst.getInstanceId())) {
return inst.getDnsName();
}
}
}
} catch (EC2Exception e) {
return "";
}
return "";
}
/**
* Attempts to terminate all instances deployed by this EC2Deployer
*
* @return the number of terminated instances
*/
public int terminateAll() {
Jec2 ec2req = getEC2Wrapper();
int t = 0;
for (String id : this.instanceIds) {
try {
ec2req.terminateInstances(new String[] { id });
logger.debug("Successfully terminated orphan EC2 node: " + id);
t++;
} catch (EC2Exception e) {
logger.error("Cannot terminate instance " + id + " with IP " + this.getInstanceHostname(id) +
". Do it manually.");
}
}
return t;
}
/**
* Launch a new instance with the provided AMI id
*
* @param imageId
* an unique AMI id
* @param userData
* the user data to use for this deployment
* @return the Reservation's id
* @throws ProActiveException
* acquisition failed
*/
public List<Instance> runInstances(String imageId, String userData) throws ProActiveException {
return this.runInstances(this.minInstances, this.maxInstances, imageId, userData);
}
/**
* Launch a new instance with the provided AMI id
*
* @param minNumber
* minimal number of instances to deploy
* @param maxNumber
* maximal number of instances to deploy
* @param imageId
* an unique AMI id
* @param userData
* the user data to use for this deployment
* @return the Reservation's id
* @throws ProActiveException
* acquisition failed
*/
public List<Instance> runInstances(int minNumber, int maxNumber, String imageId, String userData)
throws ProActiveException {
ImageDescription imgd = getAvailableImages(imageId, true);
if (imgd == null) {
throw new ProActiveException("Could not find AMI : " + imageId);
}
return this.runInstances(minNumber, maxNumber, imgd, userData);
}
/**
* Launch a new instance with provided AMI
*
* @param min
* minimal number of instances to deploy
* @param max
* maximal number of instances to deploy
* @param imgd
* an image description containing AMI id
* @param userData
* the user data to use for this deployment
* @return the Reservation's id
* @throws ProActiveException
* acquisition failed
*/
public List<Instance> runInstances(int min, int max, ImageDescription imgd, String userData)
throws ProActiveException {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null) {
throw new ProActiveException();
}
if (this.currentInstances + min > this.maxInstances) {
max = this.maxInstances - this.currentInstances;
}
if (min > max) {
min = max;
}
if (imgd == null) {
imgd = this.getAvailableImages(false).get(0);
}
try {
//Do not force large instance, small works fine on windows. Let the user chose.
if (imgd.getArchitecture().equals("x86_64")) {
if (instanceType != InstanceType.XLARGE && instanceType != InstanceType.XLARGE_HCPU &&
instanceType != InstanceType.LARGE) {
logger.warn("AMI " + imgd.getImageId() + " is " + imgd.getPlatform() + " x86_64 Arch," +
" it might not be compatible with the chosen Instance Type " +
instanceType.getTypeId());
//instanceType = InstanceType.LARGE;
}
}
ReservationDescription rdesc = ec2req.runInstances(imgd.getImageId(), min, max,
new ArrayList<String>(), userData, this.AWS_KPINFO, instanceType);
int number = rdesc.getInstances().size();
for (Instance inst : rdesc.getInstances()) {
this.instanceIds.add(inst.getInstanceId());
}
currentInstances += number;
logger.debug("Created " + number + " instance" + ((number != 1) ? "s" : ""));
return rdesc.getInstances();
} catch (EC2Exception e) {
throw new ProActiveException(e);
}
}
/**
* Terminate a running instance
*
* @param inst the instance to terminate
* @return true upon success, or false
*/
public boolean terminateInstance(Instance inst) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return false;
try {
ec2req.terminateInstances(new String[] { inst.getInstanceId() });
this.currentInstances--;
return true;
} catch (EC2Exception e) {
logger.error("Failed to terminate instance: " + inst, e);
return false;
}
}
/**
* Try to terminate an instance from EC2 with IP/Host addr
*
* @param hostname
* hostname of the node
* @param ip
* ip of the node
*
* @return True on success, false otherwise
*/
public boolean terminateInstanceByAddr(InetAddress addr) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return false;
List<Instance> instances = this.getInstances();
for (Instance i : instances) {
try {
InetAddress inetAddr = InetAddress.getByName(i.getDnsName());
if (inetAddr.equals(addr)) {
terminateInstance(i);
}
} catch (UnknownHostException e1) {
logger.error("Unable to resolve instance Inet Address: " + i.getDnsName(), e1);
}
}
return false;
}
/**
*
* @return the number of instances currently running
*/
public int getCurrentInstances() {
return currentInstances;
}
/**
*
* @return the maximum number of instances to attempt to reserve
*/
public int getMaxInstances() {
return maxInstances;
}
/**
* Sets the number of instances to request
*
* @param min
* Minimum number of instance to attempt to reserve
* @param max
* Maximum number of instance to attempt to reserve
*/
public void setNumInstances(int min, int max) {
this.minInstances = Math.max(min, 1);
this.maxInstances = Math.max(max, minInstances);
}
/**
*
* @return <code>true</code> if this infrastructure is allowed to acquire more nodes
*/
public boolean canGetMoreNodes() {
return (currentInstances < maxInstances);
}
/**
* Sets the instance type
*
* the smaller the cheaper;
* the larger the faster;
* 64bit AMI need to be run on xlarge instances
*
* @param it The type of hardware on which nodes will be deployed
* @throws IllegalArgumentException when the provided String does not match any
* existing instance type
*/
public void setInstanceType(String it) {
this.instanceType = InstanceType.getTypeFromString(it);
if (instanceType == null) {
throw new IllegalArgumentException("Invalid instance type: " + it);
}
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return "EC2Deployer :: " + "User[" + this.AWS_USER + "] " + "Status[" +
((this.active) ? "active" : "unactive") + "] ";
// "Instances[" + this.getInstances(true).size() + "]";
}
}
| sandrineBeauche/scheduling | rm/rm-infrastructure/rm-infrastructure-ec2/src/main/java/org/ow2/proactive/resourcemanager/nodesource/infrastructure/EC2Deployer.java | Java | agpl-3.0 | 17,047 |
<?php
/******* BEGIN LICENSE BLOCK *****
* BilboPlanet - An Open Source RSS feed aggregator written in PHP
* Copyright (C) 2010 By French Dev Team : Dev BilboPlanet
* Contact : dev@bilboplanet.com
* Website : www.bilboplanet.com
* Tracker : http://chili.kiwais.com/projects/bilboplanet
* Blog : www.bilboplanet.com
*
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
***** END LICENSE BLOCK *****/
?>
<?php
require_once(dirname(__FILE__).'/../inc/admin/prepend.php');
if ($core->auth->sessionExists()):
if (!$core->auth->superUser()){
__error(T_("Permission denied"),
T_('You are not allowed to see this page.')
.' '.T_('You can delete your session if you logout : ').'<a href="?logout">Logout</a>');
exit;
}
include_once(dirname(__FILE__).'/head.php');
include_once(dirname(__FILE__).'/sidebar.php');
?>
<div id="BP_page" class="page">
<div class="inpage">
<div id="flash-log" style="display:none;">
<div id="flash-msg"><!-- spanner --></div>
</div>
<fieldset><legend><?php echo T_('Manage user permissions');?></legend>
<div class="message">
<p><?php echo T_('Check user statuses and configure their permissions');?></p>
</div>
<div id="users-list"></div>
</fieldset>
<script type="text/javascript" src="meta/js/manage-permissions.js"></script>
<script type="text/javascript" src="meta/js/jquery.boxy.js"></script>
<?php
include(dirname(__FILE__).'/footer.php');
else:
$page_url = urlencode(http::getHost().$_SERVER['REQUEST_URI']);
http::redirect('../auth.php?came_from='.$page_url);
endif;
?>
| theclimber/Bilboplanet | admin/manage-permissions.php | PHP | agpl-3.0 | 2,136 |
/*
YUI 3.10.0 (build a03ce0e)
Copyright 2013 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add('uploader', function (Y, NAME) {
/**
* Provides UI for selecting multiple files and functionality for
* uploading multiple files to the server with support for either
* html5 or Flash transport mechanisms, automatic queue management,
* upload progress monitoring, and error events.
* @module uploader
* @main uploader
* @since 3.5.0
*/
/**
* `Y.Uploader` serves as an alias for either <a href="UploaderFlash.html">`Y.UploaderFlash`</a>
* or <a href="UploaderHTML5.html">`Y.UploaderHTML5`</a>, depending on the feature set available
* in a specific browser. If neither HTML5 nor Flash transport layers are available, `Y.Uploader.TYPE`
* static property is set to `"none"`.
*
* @class Uploader
*/
/**
* The static property reflecting the type of uploader that `Y.Uploader`
* aliases. The possible values are:
* <ul>
* <li><strong>`"html5"`</strong>: Y.Uploader is an alias for <a href="UploaderHTML5.html">Y.UploaderHTML5</a></li>
* <li><strong>`"flash"`</strong>: Y.Uploader is an alias for <a href="UploaderFlash.html">Y.UploaderFlash</a></li>
* <li><strong>`"none"`</strong>: Neither Flash not HTML5 are available, and Y.Uploader does
* not reference an actual implementation.</li>
* </ul>
*
* @property TYPE
* @type {String}
* @static
*/
var Win = Y.config.win;
if (Win && Win.File && Win.FormData && Win.XMLHttpRequest) {
Y.Uploader = Y.UploaderHTML5;
}
else if (Y.SWFDetect.isFlashVersionAtLeast(10,0,45)) {
Y.Uploader = Y.UploaderFlash;
}
else {
Y.namespace("Uploader");
Y.Uploader.TYPE = "none";
}
}, '3.10.0', {"requires": ["uploader-html5", "uploader-flash"]});
| miing/mci_migo | identityprovider/static/yui/3.10.0/uploader/uploader.js | JavaScript | agpl-3.0 | 1,747 |
class CourseColumn < ActiveRecord::Base
belongs_to :course
attr_accessible :slug, :name, :content, :position
before_save :save_show
def save_show
self.show = self.content.present?
true
end
named_scope :active, { :conditions => ["show = ?", true] }
acts_as_list :scope => :course
COLUMN_INTRO = "intro"
COLUMN_GUIDE = "guide"
COLUMN_SYLLABUS = "syllabus"
COLUMN_CALENDAR = "calendar"
COLUMN_MATERIALS = "materials"
COLUMN_REQUIRMENTS = "requirment"
COLUMN_FACULTY = "faculty"
COLUMN_PRODUCE_TEAM = "prodteam"
def self.columns_for_college
{
COLUMN_INTRO => t("intro" , 'Introduction') ,
COLUMN_GUIDE => t("guide" , 'Guides') ,
COLUMN_SYLLABUS => t("syllabus" , 'Syllabus') ,
COLUMN_CALENDAR => t("calendar" , 'Calendar') ,
COLUMN_MATERIALS => t("materials" , 'Materials') ,
COLUMN_REQUIRMENTS => t("requirments" , 'Requirments') ,
COLUMN_FACULTY => t("faculty" , 'Faculty') ,
COLUMN_PRODUCE_TEAM => t("produce_team" , 'Produce Team') ,
}.freeze
end
end
| rup/Tarrax-1 | app/models/course_column.rb | Ruby | agpl-3.0 | 1,171 |
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.therapies.treatment.vo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import ims.framework.enumerations.SortOrder;
/**
* Linked to therapies.treatment.DeepFrictionMassage business object (ID: 1019100020).
*/
public class DeepFrictionMassageRefVoCollection extends ims.vo.ValueObjectCollection implements ims.domain.IDomainCollectionGetter, ims.vo.ImsCloneable, Iterable<DeepFrictionMassageRefVo>
{
private static final long serialVersionUID = 1L;
private ArrayList<DeepFrictionMassageRefVo> col = new ArrayList<DeepFrictionMassageRefVo>();
public final String getBoClassName()
{
return "ims.therapies.treatment.domain.objects.DeepFrictionMassage";
}
public ims.domain.IDomainGetter[] getIDomainGetterItems()
{
ims.domain.IDomainGetter[] result = new ims.domain.IDomainGetter[col.size()];
col.toArray(result);
return result;
}
public boolean add(DeepFrictionMassageRefVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
return this.col.add(value);
}
return false;
}
public boolean add(int index, DeepFrictionMassageRefVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
this.col.add(index, value);
return true;
}
return false;
}
public void clear()
{
this.col.clear();
}
public void remove(int index)
{
this.col.remove(index);
}
public int size()
{
return this.col.size();
}
public int indexOf(DeepFrictionMassageRefVo instance)
{
return col.indexOf(instance);
}
public DeepFrictionMassageRefVo get(int index)
{
return this.col.get(index);
}
public boolean set(int index, DeepFrictionMassageRefVo value)
{
if(value == null)
return false;
this.col.set(index, value);
return true;
}
public void remove(DeepFrictionMassageRefVo instance)
{
if(instance != null)
{
int index = indexOf(instance);
if(index >= 0)
remove(index);
}
}
public boolean contains(DeepFrictionMassageRefVo instance)
{
return indexOf(instance) >= 0;
}
public Object clone()
{
DeepFrictionMassageRefVoCollection clone = new DeepFrictionMassageRefVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
if(this.col.get(x) != null)
clone.col.add((DeepFrictionMassageRefVo)this.col.get(x).clone());
else
clone.col.add(null);
}
return clone;
}
public boolean isValidated()
{
return true;
}
public String[] validate()
{
return null;
}
public DeepFrictionMassageRefVo[] toArray()
{
DeepFrictionMassageRefVo[] arr = new DeepFrictionMassageRefVo[col.size()];
col.toArray(arr);
return arr;
}
public DeepFrictionMassageRefVoCollection sort()
{
return sort(SortOrder.ASCENDING);
}
public DeepFrictionMassageRefVoCollection sort(SortOrder order)
{
return sort(new DeepFrictionMassageRefVoComparator(order));
}
@SuppressWarnings("unchecked")
public DeepFrictionMassageRefVoCollection sort(Comparator comparator)
{
Collections.sort(this.col, comparator);
return this;
}
public Iterator<DeepFrictionMassageRefVo> iterator()
{
return col.iterator();
}
@Override
protected ArrayList getTypedCollection()
{
return col;
}
private class DeepFrictionMassageRefVoComparator implements Comparator
{
private int direction = 1;
public DeepFrictionMassageRefVoComparator()
{
this(SortOrder.ASCENDING);
}
public DeepFrictionMassageRefVoComparator(SortOrder order)
{
if (order == SortOrder.DESCENDING)
{
this.direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
DeepFrictionMassageRefVo voObj1 = (DeepFrictionMassageRefVo)obj1;
DeepFrictionMassageRefVo voObj2 = (DeepFrictionMassageRefVo)obj2;
return direction*(voObj1.compareTo(voObj2));
}
}
}
| IMS-MAXIMS/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/therapies/treatment/vo/DeepFrictionMassageRefVoCollection.java | Java | agpl-3.0 | 6,037 |
<?php
/**
* @author Joas Schilling <nickvergessen@owncloud.com>
*
* @copyright Copyright (c) 2016, Joas Schilling <nickvergessen@owncloud.com>
* @license AGPL-3.0
*
* This code is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License, version 3,
* along with this program. If not, see <http://www.gnu.org/licenses/>
*
*/
return [
'routes' => [
['name' => 'page#index', 'url' => '/', 'verb' => 'GET'],
['name' => 'page#get', 'url' => '/announcement', 'verb' => 'GET'],
['name' => 'page#add', 'url' => '/announcement', 'verb' => 'POST'],
['name' => 'page#delete', 'url' => '/announcement/{id}', 'verb' => 'DELETE'],
]
];
| owncloud/announcementcenter | appinfo/routes.php | PHP | agpl-3.0 | 1,097 |
import SoftwareLibraries, { renderName } from "./software-libraries";
const COLUMNS = [
{
width: "60%",
title: "Library",
key: "library",
dataIndex: "name",
render: renderName,
},
{
width: "20%",
title: "R (systemwide)",
key: "r",
dataIndex: "r",
},
{
width: "20%",
title: "SageMath R",
key: "sage_r",
dataIndex: "sage_r",
},
];
export default function RLibraries() {
return <SoftwareLibraries prog="R" maxWidth={15} columns={COLUMNS} />;
}
| DrXyzzy/cocalc | src/packages/next/components/landing/r-libraries.tsx | TypeScript | agpl-3.0 | 509 |
/* This file is part of the db4o object database http://www.db4o.com
Copyright (C) 2004 - 2011 Versant Corporation http://www.versant.com
db4o is free software; you can redistribute it and/or modify it under
the terms of version 3 of the GNU General Public License as published
by the Free Software Foundation.
db4o is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/. */
package com.db4o.nativequery.expr;
public class AndExpression extends BinaryExpression {
public AndExpression(Expression left, Expression right) {
super(left, right);
}
public String toString() {
return "("+_left+")&&("+_right+")";
}
public void accept(ExpressionVisitor visitor) {
visitor.visit(this);
}
}
| xionghuiCoder/db4o | src/main/java/com/db4o/nativequery/expr/AndExpression.java | Java | agpl-3.0 | 1,002 |
# -*- coding: utf-8 -*-
# © 2016 Antiun Ingenieria S.L. - Antonio Espinosa
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import models
from .hooks import post_init_hook
| open-synergy/contract | contract_payment_mode/__init__.py | Python | agpl-3.0 | 201 |
import { Alert, Input } from "antd";
import { useEffect, useState } from "react";
import apiPost from "lib/api/post";
import { capitalize } from "@cocalc/util/misc";
export function EditableTitle({
license_id,
title,
onChange,
}: {
license_id: string;
title: string;
onChange?: () => void;
}) {
return (
<EditableTextField
license_id={license_id}
field="title"
value={title}
onChange={onChange}
/>
);
}
export function EditableDescription({
license_id,
description,
onChange,
}: {
license_id: string;
description: string;
onChange?: () => void;
}) {
return (
<EditableTextField
license_id={license_id}
field="description"
value={description}
rows={3}
onChange={onChange}
/>
);
}
function EditableTextField({
license_id,
field,
value,
rows,
onChange,
}: {
license_id: string;
field: "title" | "description";
value?: string;
rows?: number;
onChange?: () => void;
}) {
const [edit, setEdit] = useState<boolean>(false);
const [value2, setValue] = useState<string>(value ?? "");
const [error, setError] = useState<string>("");
useEffect(() => {
setValue(value ?? "");
setEdit(false);
setError("");
}, [value]);
async function save(value: string): Promise<void> {
setEdit(false);
setError("");
const query = { manager_site_licenses: { id: license_id, [field]: value } };
try {
await apiPost("/user-query", { query });
onChange?.();
} catch (err) {
setError(err.message);
}
}
return (
<div style={{ cursor: "pointer" }} onClick={() => setEdit(true)}>
{error && (
<Alert type="error" message={`Error saving ${field} - ${error}`} />
)}
{capitalize(field)}:{" "}
{edit &&
(rows ? (
<Input.TextArea
autoFocus
value={value2}
onChange={(e) => setValue(e.target.value)}
onBlur={() => save(value2)}
rows={rows}
/>
) : (
<Input
autoFocus
value={value2}
onChange={(e) => setValue(e.target.value)}
onBlur={() => save(value2)}
onPressEnter={() => save(value2)}
/>
))}
{!edit && <>{value2.trim() ? value2 : `(set ${field}...)`}</>}
</div>
);
}
| DrXyzzy/cocalc | src/packages/next/components/licenses/editable-license.tsx | TypeScript | agpl-3.0 | 2,354 |
// A frame represents the flow of game time, and is updated up to 60 times per second.
define(["data"], function (data) {
var Option = function () {
// The name of the option setting
this.name = "";
// The value of the option setting
this.value = false;
};
return data.define("option", Option);
}); | topiacloud/topia-online | src/plugins/core/data/option.js | JavaScript | agpl-3.0 | 349 |
#!/usr/bin/env python
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'alert.settings'
import sys
# append these to the path to make the dev machines and the server happy (respectively)
execfile('/etc/courtlistener')
sys.path.append(INSTALL_ROOT)
from django import db
from django.core.exceptions import MultipleObjectsReturned
from django.utils.text import slugify
from alert.search.models import Court, Document
from alert.lib.parse_dates import parse_dates
from juriscraper.lib.string_utils import trunc
from alert.lib.scrape_tools import hasDuplicate
from lxml.html import fromstring, tostring
from urlparse import urljoin
import datetime
import re
import subprocess
import time
import urllib2
def load_fix_files():
"""Loads the fix files into memory so they can be accessed efficiently."""
court_fix_file = open('../logs/f2_court_fix_file.txt', 'r')
date_fix_file = open('../logs/f2_date_fix_file.txt', 'r')
case_name_short_fix_file = open('../logs/f2_short_case_name_fix_file.txt', 'r')
court_fix_dict = {}
date_fix_dict = {}
case_name_short_dict = {}
for line in court_fix_file:
key, value = line.split('|')
court_fix_dict[key] = value
for line in date_fix_file:
key, value = line.split('|')
date_fix_dict[key] = value
for line in case_name_short_fix_file:
key, value = line.split('|')
case_name_short_dict[key] = value
court_fix_file.close()
date_fix_file.close()
case_name_short_fix_file.close()
return court_fix_dict, date_fix_dict, case_name_short_dict
def check_fix_list(sha1, fix_dict):
""" Given a sha1, return the correction for a case. Return false if no values.
Corrections are strings that the parser can interpret as needed. Items are
written to this file the first time the cases are imported, and this file
can be used to import F2 into later systems.
"""
try:
return fix_dict[sha1].strip()
except KeyError:
return False
def exceptional_cleaner(caseName):
"""Cleans common Resource.org special cases off of case names, and
sets the precedential_status for a document.
Returns caseName, precedential_status
"""
caseName = caseName.lower()
ca1regex = re.compile('(unpublished disposition )?notice: first circuit local rule 36.2\(b\)6 states unpublished opinions may be cited only in related cases.?')
ca2regex = re.compile('(unpublished disposition )?notice: second circuit local rule 0.23 states unreported opinions shall not be cited or otherwise used in unrelated cases.?')
ca3regex = re.compile('(unpublished disposition )?notice: third circuit rule 21\(i\) states citations to federal decisions which have not been formally reported should identify the court, docket number and date.?')
ca4regex = re.compile('(unpublished disposition )?notice: fourth circuit (local rule 36\(c\)|i.o.p. 36.6) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the fourth circuit.?')
ca5regex = re.compile('(unpublished disposition )?notice: fifth circuit local rule 47.5.3 states that unpublished opinions should normally be cited only when they establish the law of the case, are relied upon as a basis for res judicata or collateral estoppel, or involve related facts. if an unpublished opinion is cited, a copy shall be attached to each copy of the brief.?')
ca6regex = re.compile('(unpublished disposition )?notice: sixth circuit rule 24\(c\) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the sixth circuit.?')
ca7regex = re.compile('(unpublished disposition )?notice: seventh circuit rule 53\(b\)\(2\) states unpublished orders shall not be cited or used as precedent except to support a claim of res judicata, collateral estoppel or law of the case in any federal court within the circuit.?')
ca8regex = re.compile('(unpublished disposition )?notice: eighth circuit rule 28a\(k\) governs citation of unpublished opinions and provides that (no party may cite an opinion not intended for publication unless the cases are related by identity between the parties or the causes of action|they are not precedent and generally should not be cited unless relevant to establishing the doctrines of res judicata, collateral estoppel, the law of the case, or if the opinion has persuasive value on a material issue and no published opinion would serve as well).?')
ca9regex = re.compile('(unpublished disposition )?notice: ninth circuit rule 36-3 provides that dispositions other than opinions or orders designated for publication are not precedential and should not be cited except when relevant under the doctrines of law of the case, res judicata, or collateral estoppel.?')
ca10regex = re.compile('(unpublished disposition )?notice: tenth circuit rule 36.3 states that unpublished opinions and orders and judgments have no precedential value and shall not be cited except for purposes of establishing the doctrines of the law of the case, res judicata, or collateral estoppel.?')
cadcregex = re.compile('(unpublished disposition )?notice: d.c. circuit local rule 11\(c\) states that unpublished orders, judgments, and explanatory memoranda may not be cited as precedents, but counsel may refer to unpublished dispositions when the binding or preclusive effect of the disposition, rather than its quality as precedent, is relevant.?')
cafcregex = re.compile('(unpublished disposition )?notice: federal circuit local rule 47.(6|8)\(b\) states that opinions and orders which are designated as not citable as precedent shall not be employed or cited as precedent. this does not preclude assertion of issues of claim preclusion, issue preclusion, judicial estoppel, law of the case or the like based on a decision of the court rendered in a nonprecedential opinion or order.?')
# Clean off special cases
if 'first circuit' in caseName:
caseName = re.sub(ca1regex, '', caseName)
precedential_status = 'Unpublished'
elif 'second circuit' in caseName:
caseName = re.sub(ca2regex, '', caseName)
precedential_status = 'Unpublished'
elif 'third circuit' in caseName:
caseName = re.sub(ca3regex, '', caseName)
precedential_status = 'Unpublished'
elif 'fourth circuit' in caseName:
caseName = re.sub(ca4regex, '', caseName)
precedential_status = 'Unpublished'
elif 'fifth circuit' in caseName:
caseName = re.sub(ca5regex, '', caseName)
precedential_status = 'Unpublished'
elif 'sixth circuit' in caseName:
caseName = re.sub(ca6regex, '', caseName)
precedential_status = 'Unpublished'
elif 'seventh circuit' in caseName:
caseName = re.sub(ca7regex, '', caseName)
precedential_status = 'Unpublished'
elif 'eighth circuit' in caseName:
caseName = re.sub(ca8regex, '', caseName)
precedential_status = 'Unpublished'
elif 'ninth circuit' in caseName:
caseName = re.sub(ca9regex, '', caseName)
precedential_status = 'Unpublished'
elif 'tenth circuit' in caseName:
caseName = re.sub(ca10regex, '', caseName)
precedential_status = 'Unpublished'
elif 'd.c. circuit' in caseName:
caseName = re.sub(cadcregex, '', caseName)
precedential_status = 'Unpublished'
elif 'federal circuit' in caseName:
caseName = re.sub(cafcregex, '', caseName)
precedential_status = 'Unpublished'
else:
precedential_status = 'Published'
return caseName, precedential_status
def scrape_and_parse():
"""Traverses the bulk data from public.resource.org, and puts them in the
DB.
Probably lots of ways to go about this, but I think the easiest will be the following:
- look at the index page of all volumes, and follow all the links it has.
- for each volume, look at its index page, and follow the link to all cases
- for each case, collect information wisely.
- put it all in the DB
"""
# begin by loading up the fix files into memory
court_fix_dict, date_fix_dict, case_name_short_dict = load_fix_files()
results = []
DEBUG = 4
# Set to False to disable automatic browser usage. Else, set to the
# command you want to run, e.g. 'firefox'
BROWSER = False
court_fix_file = open('../logs/f2_court_fix_file.txt', 'a')
date_fix_file = open('../logs/f2_date_fix_file.txt', 'a')
case_name_short_fix_file = open('../logs/f2_short_case_name_fix_file.txt', 'a')
vol_file = open('../logs/vol_file.txt', 'r+')
case_file = open('../logs/case_file.txt', 'r+')
url = "file://%s/Resource.org/F2/index.html" % INSTALL_ROOT
openedURL = urllib2.urlopen(url)
content = openedURL.read()
openedURL.close()
tree = fromstring(content)
volumeLinks = tree.xpath('//table/tbody/tr/td[1]/a')
try:
i = int(vol_file.readline())
except ValueError:
# the volume file is emtpy or otherwise failing.
i = 0
vol_file.close()
if DEBUG >= 1:
print "Number of remaining volumes is: %d" % (len(volumeLinks) - i)
# used later, needs a default value.
saved_caseDate = None
saved_court = None
while i < len(volumeLinks):
# we iterate over every case in the volume
volumeURL = volumeLinks[i].text + "/index.html"
volumeURL = urljoin(url, volumeURL)
if DEBUG >= 1:
print "Current volumeURL is: %s" % volumeURL
openedVolumeURL = urllib2.urlopen(volumeURL)
content = openedVolumeURL.read()
volumeTree = fromstring(content)
openedVolumeURL.close()
caseLinks = volumeTree.xpath('//table/tbody/tr/td[1]/a')
caseDates = volumeTree.xpath('//table/tbody/tr/td[2]')
sha1Hashes = volumeTree.xpath('//table/tbody/tr/td[3]/a')
# The following loads a serialized placeholder from disk.
try:
j = int(case_file.readline())
except ValueError:
j = 0
case_file.close()
while j < len(caseLinks):
# iterate over each case, throwing it in the DB
if DEBUG >= 1:
print ''
# like the scraper, we begin with the caseLink field (relative for
# now, not absolute)
caseLink = caseLinks[j].get('href')
# sha1 is easy
sha1Hash = sha1Hashes[j].text
if DEBUG >= 4:
print "SHA1 is: %s" % sha1Hash
# using the caselink from above, and the volumeURL, we can get the
# html
absCaseLink = urljoin(volumeURL, caseLink)
html = urllib2.urlopen(absCaseLink).read()
htmlTree = fromstring(html)
bodyContents = htmlTree.xpath('//body/*[not(@id="footer")]')
body = ""
bodyText = ""
for element in bodyContents:
body += tostring(element)
try:
bodyText += tostring(element, method='text')
except UnicodeEncodeError:
# Happens with odd characters. Simply pass this iteration.
pass
if DEBUG >= 5:
print body
print bodyText
# need to figure out the court ID
try:
courtPs = htmlTree.xpath('//p[@class = "court"]')
# Often the court ends up in the parties field.
partiesPs = htmlTree.xpath("//p[@class= 'parties']")
court = ""
for courtP in courtPs:
court += tostring(courtP).lower()
for party in partiesPs:
court += tostring(party).lower()
except IndexError:
court = check_fix_list(sha1Hash, court_fix_dict)
if not court:
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
court = raw_input("Please input court name (e.g. \"First Circuit of Appeals\"): ").lower()
court_fix_file.write("%s|%s\n" % (sha1Hash, court))
if ('first' in court) or ('ca1' == court):
court = 'ca1'
elif ('second' in court) or ('ca2' == court):
court = 'ca2'
elif ('third' in court) or ('ca3' == court):
court = 'ca3'
elif ('fourth' in court) or ('ca4' == court):
court = 'ca4'
elif ('fifth' in court) or ('ca5' == court):
court = 'ca5'
elif ('sixth' in court) or ('ca6' == court):
court = 'ca6'
elif ('seventh' in court) or ('ca7' == court):
court = 'ca7'
elif ('eighth' in court) or ('ca8' == court):
court = 'ca8'
elif ('ninth' in court) or ('ca9' == court):
court = 'ca9'
elif ("tenth" in court) or ('ca10' == court):
court = 'ca10'
elif ("eleventh" in court) or ('ca11' == court):
court = 'ca11'
elif ('columbia' in court) or ('cadc' == court):
court = 'cadc'
elif ('federal' in court) or ('cafc' == court):
court = 'cafc'
elif ('patent' in court) or ('ccpa' == court):
court = 'ccpa'
elif (('emergency' in court) and ('temporary' not in court)) or ('eca' == court):
court = 'eca'
elif ('claims' in court) or ('uscfc' == court):
court = 'uscfc'
else:
# No luck extracting the court name. Try the fix file.
court = check_fix_list(sha1Hash, court_fix_dict)
if not court:
# Not yet in the fix file. Check if it's a crazy ca5 case
court = ''
ca5courtPs = htmlTree.xpath('//p[@class = "center"]')
for ca5courtP in ca5courtPs:
court += tostring(ca5courtP).lower()
if 'fifth circuit' in court:
court = 'ca5'
else:
court = False
if not court:
# Still no luck. Ask for input, then append it to
# the fix file.
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
court = raw_input("Unknown court. Input the court code to proceed successfully [%s]: " % saved_court)
court = court or saved_court
court_fix_file.write("%s|%s\n" % (sha1Hash, court))
saved_court = court
court = Court.objects.get(pk=court)
if DEBUG >= 4:
print "Court is: %s" % court
# next: west_cite, docket_number and caseName. Full casename is gotten later.
west_cite = caseLinks[j].text
docket_number = absCaseLink.split('.')[-2]
caseName = caseLinks[j].get('title')
caseName, precedential_status = exceptional_cleaner(caseName)
cite, new = hasDuplicate(caseName, west_cite, docket_number)
if cite.caseNameShort == '':
# No luck getting the case name
savedCaseNameShort = check_fix_list(sha1Hash, case_name_short_dict)
if not savedCaseNameShort:
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
caseName = raw_input("Short casename: ")
cite.caseNameShort = trunc(caseName, 100)
cite.caseNameFull = caseName
case_name_short_fix_file.write("%s|%s\n" % (sha1Hash, caseName))
else:
# We got both the values from the save files. Use 'em.
cite.caseNameShort = trunc(savedCaseNameShort, 100)
cite.caseNameFull = savedCaseNameShort
# The slug needs to be done here, b/c it is only done automatically
# the first time the citation is saved, and this will be
# at least the second.
cite.slug = trunc(slugify(cite.caseNameShort), 50)
cite.save()
if DEBUG >= 4:
print "precedential_status: " + precedential_status
print "west_cite: " + cite.west_cite
print "docket_number: " + cite.docket_number
print "caseName: " + cite.caseNameFull
# date is kinda tricky...details here:
# http://pleac.sourceforge.net/pleac_python/datesandtimes.html
rawDate = caseDates[j].find('a')
try:
if rawDate is not None:
# Special cases
if sha1Hash == 'f0da421f117ef16223d7e61d1e4e5526036776e6':
date_text = 'August 28, 1980'
elif sha1Hash == '8cc192eaacd1c544b5e8ffbd751d9be84c311932':
date_text = 'August 16, 1985'
elif sha1Hash == 'd19bce155f72a9f981a12efabd760a35e1e7dbe7':
date_text = 'October 12, 1979'
elif sha1Hash == '9f7583cf0d46ddc9cad4e7943dd775f9e9ea99ff':
date_text = 'July 30, 1980'
elif sha1Hash == '211ea81a4ab4132483c483698d2a40f4366f5640':
date_text = 'November 3, 1981'
elif sha1Hash == 'eefb344034461e9c6912689677a32cd18381d5c2':
date_text = 'July 28, 1983'
else:
date_text = rawDate.text
try:
caseDate = datetime.datetime(*time.strptime(date_text, "%B, %Y")[0:5])
except ValueError, TypeError:
caseDate = datetime.datetime(*time.strptime(date_text, "%B %d, %Y")[0:5])
else:
# No value was found. Throw an exception.
raise ValueError
except:
# No date provided.
try:
# Try to get it from the saved list
caseDate = datetime.datetime(*time.strptime(check_fix_list(sha1Hash, date_fix_dict), "%B %d, %Y")[0:5])
except:
caseDate = False
if not caseDate:
# Parse out the dates with debug set to false.
try:
dates = parse_dates(bodyText, False)
except OverflowError:
# Happens when we try to make a date from a very large number
dates = []
try:
first_date_found = dates[0]
except IndexError:
# No dates found.
first_date_found = False
if first_date_found == saved_caseDate:
# High likelihood of date being correct. Use it.
caseDate = saved_caseDate
else:
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
print "Unknown date. Possible options are:"
try:
print " 1) %s" % saved_caseDate.strftime("%B %d, %Y")
except AttributeError:
# Happens on first iteration when saved_caseDate has no strftime attribute.
try:
saved_caseDate = dates[0]
print " 1) %s" % saved_caseDate.strftime(
"%B %d, %Y")
except IndexError:
# Happens when dates has no values.
print " No options available."
for k, date in enumerate(dates[0:4]):
if date.year >= 1900:
# strftime can't handle dates before 1900.
print " %s) %s" % (k + 2,
date.strftime("%B %d, %Y"))
choice = raw_input("Enter the date or an option to proceed [1]: ")
choice = choice or 1
if str(choice) == '1':
# The user chose the default. Use the saved value from the last case
caseDate = saved_caseDate
elif choice in ['2', '3', '4', '5']:
# The user chose an option between 2 and 5. Use it.
caseDate = dates[int(choice) - 2]
else:
# The user typed a new date. Use it.
caseDate = datetime.datetime(*time.strptime(choice, "%B %d, %Y")[0:5])
date_fix_file.write("%s|%s\n" % (sha1Hash, caseDate.strftime("%B %d, %Y")))
# Used during the next iteration as the default value
saved_caseDate = caseDate
if DEBUG >= 3:
print "caseDate is: %s" % caseDate
try:
doc, created = Document.objects.get_or_create(
sha1=sha1Hash, court=court)
except MultipleObjectsReturned:
# this shouldn't happen now that we're using SHA1 as the dup
# check, but the old data is problematic, so we must catch this.
created = False
if created:
# we only do this if it's new
doc.html = body
doc.sha1 = sha1Hash
doc.download_url = "http://bulk.resource.org/courts.gov/c/F2/"\
+ str(i + 178) + "/" + caseLink
doc.date_filed = caseDate
doc.source = "R"
doc.precedential_status = precedential_status
doc.citation = cite
doc.save()
if not created:
# something is afoot. Throw a big error.
print "Duplicate found at volume " + str(i + 1) + \
" and row " + str(j + 1) + "!!!!"
print "Found document %s in the database with doc id of %d!" % (doc, doc.pk)
exit(1)
# save our location within the volume.
j += 1
case_file = open('../logs/case_file.txt', 'w')
case_file.write(str(j))
case_file.close()
# save the last volume completed.
i += 1
vol_file = open('../logs/vol_file.txt', 'w')
vol_file.write(str(i))
vol_file.close()
# Clear query cache, as it presents a memory leak
db.reset_queries()
return 0
def main():
print scrape_and_parse()
print "Completed all volumes successfully. Exiting."
exit(0)
if __name__ == '__main__':
main()
| shashi792/courtlistener | alert/corpus_importer/resource_org/import_f2.py | Python | agpl-3.0 | 23,715 |
// Copyright 2011-2012 Paulo Augusto Peccin. See licence.txt distributed with this file.
package org.javatari.general.m6502.instructions;
import org.javatari.general.m6502.Instruction;
import org.javatari.general.m6502.M6502;
import org.javatari.general.m6502.OperandType;
public final class BIT extends Instruction {
public BIT(M6502 cpu, int type) {
super(cpu);
this.type = type;
}
@Override
public int fetch() {
if (type == OperandType.Z_PAGE) { ea = cpu.fetchZeroPageAddress(); return 3; }
if (type == OperandType.ABS) { ea = cpu.fetchAbsoluteAddress(); return 4; }
throw new IllegalStateException("BIT Invalid Operand Type: " + type);
}
@Override
public void execute() {
final byte val = cpu.bus.readByte(ea);
cpu.ZERO = (val & cpu.A) == 0;
cpu.OVERFLOW = (val & 0x40) != 0; // value of bit 6 from memory
cpu.NEGATIVE = (val & 0x80) != 0; // value of bit 7 from memory
}
private final int type;
private int ea;
public static final long serialVersionUID = 1L;
}
| admazzola/javatari-deeplearn | javatari/src/org/javatari/general/m6502/instructions/BIT.java | Java | agpl-3.0 | 1,049 |
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinicaladmin.forms.dailypatternandshifts;
import ims.framework.delegates.*;
abstract public class Handlers implements ims.framework.UILogic, IFormUILogicCode, ims.framework.interfaces.IClearInfo
{
abstract protected void onFormModeChanged();
abstract protected void onFormOpen() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnSaveClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnCancelClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnUpdateClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnNewClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onCmbNumberOfShiftsValueChanged() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onChkUsePeriodsValueChanged() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onTimStartValueChanged() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onGrdTypeSelectionChanged() throws ims.framework.exceptions.PresentationLogicException;
public final void setContext(ims.framework.UIEngine engine, GenForm form)
{
this.engine = engine;
this.form = form;
this.form.setFormModeChangedEvent(new FormModeChanged()
{
private static final long serialVersionUID = 1L;
public void handle()
{
onFormModeChanged();
}
});
this.form.setFormOpenEvent(new FormOpen()
{
private static final long serialVersionUID = 1L;
public void handle(Object[] args) throws ims.framework.exceptions.PresentationLogicException
{
onFormOpen();
}
});
this.form.btnSave().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnSaveClick();
}
});
this.form.btnCancel().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnCancelClick();
}
});
this.form.btnUpdate().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnUpdateClick();
}
});
this.form.btnNew().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnNewClick();
}
});
this.form.cmbNumberOfShifts().setValueChangedEvent(new ValueChanged()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onCmbNumberOfShiftsValueChanged();
}
});
this.form.chkUsePeriods().setValueChangedEvent(new ValueChanged()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onChkUsePeriodsValueChanged();
}
});
this.form.timStart().setValueChangedEvent(new ValueChanged()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onTimStartValueChanged();
}
});
this.form.grdType().setSelectionChangedEvent(new GridSelectionChanged()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.enumerations.MouseButton mouseButton) throws ims.framework.exceptions.PresentationLogicException
{
onGrdTypeSelectionChanged();
}
});
}
public void free()
{
this.engine = null;
this.form = null;
}
public abstract void clearContextInformation();
protected ims.framework.UIEngine engine;
protected GenForm form;
}
| IMS-MAXIMS/openMAXIMS | Source Library/openmaxims_workspace/ClinicalAdmin/src/ims/clinicaladmin/forms/dailypatternandshifts/Handlers.java | Java | agpl-3.0 | 6,150 |
#!env/python3
# coding: utf-8
import ipdb
import os
import json
import datetime
import uuid
import psycopg2
import hashlib
import asyncio
import ped_parser
from config import *
from core.framework.common import *
from core.model import *
# =====================================================================================================================
# FILTER ENGINE
# =====================================================================================================================
class FilterEngine:
op_map = {'AND': ' AND ', 'OR': ' OR ', '==': '=', '!=': '<>', '>': '>', '<': '<', '>=': '>=', '<=': '<=', '~': ' LIKE ', '!~': ' NOT LIKE ',
# As a left join will be done on the chr+pos or chr+pos+ref+alt according to the type of the set operation (by site or by variant)
# We just need to test if one of the "joined" field is set or not
'IN': '{0}.chr is not null',
'NOTIN': '{0}.chr is null'}
sql_type_map = {'int': 'integer', 'string': 'text', 'float': 'real', 'percent': 'real', 'enum': 'integer', 'range': 'int8range', 'bool': 'boolean',
'list_i': 'text', 'list_s': 'text', 'list_f': 'text', 'list_i': 'text', 'list_pb': 'text'}
def __init__(self):
run_until_complete(self.load_annotation_metadata())
async def load_annotation_metadata(self):
"""
Init Annso Filtering engine.
Init mapping collection for annotations databases and fields
"""
refname = 'hg19' # execute("SELECT table_suffix FROM reference WHERE id="+str(reference)).first()["table_suffix"]
self.reference = 2
self.fields_map = {}
self.db_map = {}
self.variant_table = "sample_variant_{0}".format(refname)
query = "SELECT d.uid AS duid, d.name AS dname, d.name_ui AS dname_ui, d.jointure, d.reference_id, d.type AS dtype, d.db_pk_field_uid, a.uid AS fuid, a.name AS fname, a.type, a.wt_default FROM annotation_field a LEFT JOIN annotation_database d ON a.database_uid=d.uid"
result = await execute_aio(query)
for row in result:
if row.duid not in self.db_map:
self.db_map[row.duid] = {"name": row.dname, "join": row.jointure, "fields": {}, "reference_id": row.reference_id, "type": row.dtype, "db_pk_field_uid" : row.db_pk_field_uid}
self.db_map[row.duid]["fields"][row.fuid] = {"name": row.fname, "type": row.type}
self.fields_map[row.fuid] = {"name": row.fname, "type": row.type, "db_uid": row.duid, "db_name_ui": row.dname_ui, "db_name": row.dname, "db_type": row.dtype, "join": row.jointure, "wt_default": row.wt_default}
def create_working_table(self, analysis_id, sample_ids, field_uids, dbs_uids, filter_ids=[], attributes={}):
"""
Create a working sql table for the analysis to improove speed of filtering/annotation.
A Working table contains all variants used by the analysis, with all annotations used by filters or displayed
"""
from core.core import core
if len(sample_ids) == 0: raise RegovarException("No sample... so not able to retrieve data")
db_ref_suffix= "hg19" # execute("SELECT table_suffix FROM reference WHERE id={}".format(reference_id)).first().table_suffix
progress = {"msg": "wt_processing", "start": datetime.datetime.now().ctime(), "analysis_id": analysis_id, "step": 1}
core.notify_all(progress)
# Create schema
w_table = 'wt_{}'.format(analysis_id)
query = "DROP TABLE IF EXISTS {0} CASCADE; CREATE TABLE {0} (\
is_variant boolean DEFAULT False, \
annotated boolean DEFAULT False, \
variant_id bigint, \
bin integer, \
chr bigint, \
pos integer, \
ref text, \
alt text,\
transcript_pk_field_uid character varying(32), \
transcript_pk_value character varying(100), \
is_transition boolean, \
sample_tlist integer[], \
sample_tcount integer, \
sample_alist integer[], \
sample_acount integer, \
depth integer, "
query += ", ".join(["s{}_gt integer".format(i) for i in sample_ids]) + ", "
query += ", ".join(["s{}_dp integer".format(i) for i in sample_ids])
query += ", CONSTRAINT {0}_ukey UNIQUE (variant_id, transcript_pk_field_uid, transcript_pk_value));"
execute(query.format(w_table))
# Insert variant without annotation first
query = "INSERT INTO {0} (variant_id, bin, chr, pos, ref, alt, is_transition, sample_tlist) \
SELECT DISTINCT sample_variant_{1}.variant_id, sample_variant_{1}.bin, sample_variant_{1}.chr, sample_variant_{1}.pos, sample_variant_{1}.ref, sample_variant_{1}.alt, \
variant_{1}.is_transition, \
variant_{1}.sample_list \
FROM sample_variant_{1} INNER JOIN variant_{1} ON sample_variant_{1}.variant_id=variant_{1}.id \
WHERE sample_variant_{1}.sample_id IN ({2}) \
ON CONFLICT (variant_id, transcript_pk_field_uid, transcript_pk_value) DO NOTHING;"
execute(query.format(w_table, db_ref_suffix, ','.join([str(i) for i in sample_ids])))
# Complete sample-variant's associations
for sid in sample_ids:
execute("UPDATE {0} SET s{2}_gt=_sub.genotype, s{2}_dp=_sub.depth FROM (SELECT variant_id, genotype, depth FROM sample_variant_{1} WHERE sample_id={2}) AS _sub WHERE {0}.variant_id=_sub.variant_id".format(w_table, db_ref_suffix, sid))
query = "UPDATE {0} SET \
is_variant=(CASE WHEN ref<>alt THEN True ELSE False END), \
sample_tcount=array_length(sample_tlist,1), \
sample_alist=array_intersect(sample_tlist, array[{1}]), \
sample_acount=array_length(array_intersect(sample_tlist, array[{1}]),1), \
depth=GREATEST({2})"
execute(query.format(w_table, ",".join([str(i) for i in sample_ids]), ", ".join(["s{}_dp".format(i) for i in sample_ids])))
# Create indexes
# FIXME : do we need to create index on boolean fields ? Is partition a better way to do for low cardinality fields : http://www.postgresql.org/docs/9.1/static/ddl-partitioning.html
# query = "CREATE INDEX {0}_idx_ann ON {0} USING btree (annotated);".format(w_table)
query = "CREATE INDEX {0}_idx_vid ON {0} USING btree (variant_id);".format(w_table)
query += "CREATE INDEX {0}_idx_var ON {0} USING btree (bin, chr, pos, transcript_pk_field_uid, transcript_pk_value);".format(w_table)
query += "CREATE INDEX {0}_idx_trx ON {0} USING btree (transcript_pk_field_uid, transcript_pk_value);".format(w_table)
query += "".join(["CREATE INDEX {0}_idx_s{1}_gt ON {0} USING btree (s{1}_gt);".format(w_table, i) for i in sample_ids])
query += "".join(["CREATE INDEX {0}_idx_s{1}_dp ON {0} USING btree (s{1}_dp);".format(w_table, i) for i in sample_ids])
execute(query)
# Update count stat of the analysis
query = "UPDATE analysis SET total_variants=(SELECT COUNT(*) FROM {} WHERE is_variant), status='ANNOTATING' WHERE id={}".format(w_table, analysis_id)
execute(query)
# Update working table by computing annotation
self.update_working_table(analysis_id, sample_ids, field_uids, dbs_uids, filter_ids, attributes)
def update_working_table(self, analysis_id, sample_ids, field_uids, dbs_uids, filter_ids=[], attributes={}):
"""
Update annotation of the working table of an analysis. The working table shall already exists
"""
from core.core import core
# Get list of fields to add in the wt
analysis = Analysis.from_id(analysis_id)
total = analysis.total_variants
diff_fields = []
diff_dbs = []
progress = {"msg": "wt_processing", "start": datetime.datetime.now().ctime(), "analysis_id": analysis_id, "step": 2, "progress_total": total, "progress_current": 0}
core.notify_all(progress)
try:
query = "SELECT column_name FROM information_schema.columns WHERE table_name='wt_{}'".format(analysis_id)
current_fields = [row.column_name if row.column_name[0] != '_' else row.column_name[1:] for row in execute(query)]
current_dbs = []
for f_uid in current_fields:
if f_uid in self.fields_map and self.fields_map[f_uid]['db_uid'] not in current_dbs:
current_dbs.append(self.fields_map[f_uid]['db_uid'])
for f_uid in field_uids:
if f_uid not in current_fields and self.fields_map[f_uid]['db_name_ui'] != 'Variant':
diff_fields.append('_{}'.format(f_uid))
if self.fields_map[f_uid]['db_uid'] not in diff_dbs and self.fields_map[f_uid]['db_uid'] not in current_dbs:
diff_dbs.append(self.fields_map[f_uid]['db_uid'])
except:
# working table doesn't exist
return False
# Alter working table to add new fields
pattern = "ALTER TABLE wt_{0} ADD COLUMN {1}{2} {3};"
query = ""
update_queries = []
for f_uid in diff_fields:
if f_uid[0] == '_':
f_uid = f_uid[1:]
query += pattern.format(analysis_id, '_', f_uid, self.sql_type_map[self.fields_map[f_uid]['type']])
for a_name in attributes.keys():
att_checked = []
for sid, att in attributes[a_name].items():
if 'attr_{}_{}'.format(a_name.lower(), att.lower()) in current_fields:
# We consider that if the first key_value for the attribute is define, the whole attribute's columns are defined,
# So break and switch to the next attribute.
# That's why before updating and attribute-value, we need before to drop all former columns in the wt
break;
else:
if att not in att_checked:
att_checked.append(att)
query += pattern.format(analysis_id, 'attr_', "{}_{}".format(a_name.lower(), att.lower()), 'boolean DEFAULT False')
update_queries.append("UPDATE wt_{} SET attr_{}_{}=True WHERE s{}_gt IS NOT NULL; ".format(analysis_id, a_name.lower(), att.lower(), sid))
for f_id in filter_ids:
if 'filter_{}'.format(f_id) not in current_fields:
query += pattern.format(analysis_id, 'filter_', f_id, 'boolean DEFAULT False')
f_filter = json.loads(execute("SELECT filter FROM filter WHERE id={}".format(f_id)).first().filter)
q = self.build_query(analysis_id, analysis.reference_id, 'table', f_filter, [], None, None)
queries = q[0]
if len(queries) > 0:
# add all query to create temps tables needed by the filter if they do not yet exists
for q in queries[:-1]:
query += q
# add the query to update wt with the filter
# Note : As transcript_pk_field_uid and transcript_pk_field_value may be null, we cannot use '=' operator and must use 'IS NOT DISTINCT FROM'
# as two expressions that return 'null' are not considered as equal in SQL.
update_queries.append("UPDATE wt_{0} SET filter_{1}=True FROM ({2}) AS _sub WHERE wt_{0}.variant_id=_sub.variant_id AND wt_{0}.transcript_pk_field_uid IS NOT DISTINCT FROM _sub.transcript_pk_field_uid AND wt_{0}.transcript_pk_value IS NOT DISTINCT FROM _sub.transcript_pk_value ; ".format(analysis_id, f_id, queries[-1].strip()[:-1]))
if query != "":
# Add new annotation columns to the working table
execute(query)
progress.update({"step": 3})
core.notify_all(progress)
# Loop over new annotation's databases, because if new: need to add new transcripts to the working table
fields_to_copy_from_variant = ["variant_id","bin","chr","pos","ref","alt","is_transition","sample_tlist","sample_tcount","sample_alist","sample_acount","depth"]
fields_to_copy_from_variant.extend(['s{}_gt'.format(s) for s in sample_ids])
fields_to_copy_from_variant.extend(['s{}_dp'.format(s) for s in sample_ids])
fields_to_copy_from_variant.extend(['attr_{}'.format(a.lower()) for a in attributes.keys()])
fields_to_copy_from_variant.extend(['filter_{}'.format(f) for f in filter_ids])
pattern = "INSERT INTO wt_{0} (annotated, transcript_pk_field_uid, transcript_pk_value, {1}) \
SELECT False, '{2}', {4}.transcript_id, {3} \
FROM (SELECT {1} FROM wt_{0} WHERE transcript_pk_field_uid IS NULL) AS _var \
INNER JOIN {4} ON _var.variant_id={4}.variant_id" # TODO : check if more optim to select with JOIN ON bin/chr/pos/ref/alt
for uid in diff_dbs:
if self.db_map[uid]["type"] == "transcript":
query = pattern.format(analysis_id,
', '.join(fields_to_copy_from_variant),
self.db_map[uid]["db_pk_field_uid"],
', '.join(["_var.{}".format(f) for f in fields_to_copy_from_variant]),
self.db_map[uid]["name"])
execute(query)
progress.update({"step": 4})
core.notify_all(progress)
# Create update query to retrieve annotation
UPDATE_LOOP_RANGE = 1000
to_update = {}
for f_uid in diff_fields:
if self.fields_map[f_uid[1:]]['db_uid'] not in to_update.keys():
to_update[self.fields_map[f_uid[1:]]['db_uid']] = []
to_update[self.fields_map[f_uid[1:]]['db_uid']].append({
"name": self.fields_map[f_uid[1:]]['name'],
"uid":f_uid[1:],
"db_name": self.fields_map[f_uid[1:]]['db_name']})
# Loop to update working table annotation (queries "packed" fields requested by annotation's database)
for db_uid in to_update.keys():
if self.db_map[db_uid]["type"] == "transcript":
qset_ann = ', '.join(['_{0}=_ann._{0}'.format(f["uid"]) for f in to_update[db_uid]])
qslt_ann = ','.join(['{0}.{1} AS _{2}'.format(f['db_name'], f["name"], f["uid"]) for f in to_update[db_uid]])
qslt_var = "SELECT variant_id, bin, chr, pos, ref, alt, transcript_pk_value FROM wt_{0} WHERE annotated=False AND transcript_pk_field_uid='{1}' LIMIT {2}".format(analysis_id, self.db_map[self.fields_map[f_uid[1:]]['db_uid']]['db_pk_field_uid'], UPDATE_LOOP_RANGE)
qjoin = 'LEFT JOIN {0} '.format(self.db_map[db_uid]['join'].format('_var'))
query = "UPDATE wt_{0} SET annotated=True, {1} FROM (SELECT _var.variant_id, _var.transcript_pk_value, {2} FROM ({3}) AS _var {4}) AS _ann \
WHERE wt_{0}.variant_id=_ann.variant_id AND wt_{0}.transcript_pk_field_uid='{5}' AND wt_{0}.transcript_pk_value=_ann.transcript_pk_value".format(
analysis_id,
qset_ann,
qslt_ann,
qslt_var,
qjoin,
self.db_map[self.fields_map[f_uid[1:]]['db_uid']]['db_pk_field_uid'])
else:
qset_ann = ', '.join(['{0}=_ann._{0}'.format(f_uid) for f_uid in diff_fields])
qslt_ann = ','.join(['{0}.{1} AS _{2}'.format(self.fields_map[f_uid[1:]]['db_name'], self.fields_map[f_uid[1:]]['name'], f_uid) for f_uid in diff_fields])
qslt_var = 'SELECT variant_id, bin, chr, pos, ref, alt FROM wt_{0} WHERE annotated=False AND transcript_pk_field_uid IS NULL LIMIT {1}'.format(analysis_id, UPDATE_LOOP_RANGE)
qjoin = ' '.join(['LEFT JOIN {0} '.format(self.db_map[db_uid]['join'].format('_var'), self.db_map[db_uid]) for db_uid in diff_dbs])
query = "UPDATE wt_{0} SET annotated=True, {1} FROM (SELECT _var.variant_id, {2} FROM ({3}) AS _var {4}) AS _ann WHERE wt_{0}.variant_id=_ann.variant_id".format(analysis_id, qset_ann, qslt_ann, qslt_var, qjoin)
if qset_ann != "":
# Mark all variant as not annotated (to be able to do a "resumable update")
execute("UPDATE wt_{} SET annotated=False".format(analysis_id))
for page in range(0, total, UPDATE_LOOP_RANGE):
execute(query)
progress.update({"progress_current": page})
core.notify_all(progress)
progress.update({"step": 5, "progress_current": total})
core.notify_all(progress)
# Apply queries to update attributes and filters columns in the wt
if len(update_queries) > 0:
execute("".join(update_queries))
progress.update({"step": 6})
core.notify_all(progress)
# Update count stat of the analysis
query = "UPDATE analysis SET status='READY' WHERE id={}".format(analysis_id)
execute(query)
def request(self, analysis_id, mode, filter_json, fields=None, order=None, limit=100, offset=0, count=False):
"""
"""
# Check parameters: if no field, select by default the first field avalaible to avoir error
if fields is None:
fields = [next(iter(self.fields_map.keys()))]
if type(analysis_id) != int or analysis_id <= 0:
analysis_id = None
if mode not in ["table", "list"]:
mode = "table"
# Get analysis data and check status if ok to do filtering
analysis = Analysis.from_id(analysis_id)
if analysis is None:
raise RegovarException("Not able to retrieve analysis with provided id: {}".format(analysis_id))
# Parse data to generate sql query and retrieve list of needed annotations databases/fields
query, field_uids, dbs_uids, sample_ids, filter_ids, attributes = self.build_query(analysis_id, analysis.reference_id, mode, filter_json, fields, order, limit, offset, count)
# Prepare database working table
if analysis.status is None or analysis.status == '':
self.create_working_table(analysis_id, sample_ids, field_uids, dbs_uids, filter_ids, attributes)
else:
self.update_working_table(analysis_id, sample_ids, field_uids, dbs_uids, filter_ids, attributes)
# Execute query
sql_result = None
with Timer() as t:
sql_result = execute(' '.join(query))
log("---\nFields:\n{0}\nFilter:\n{1}\nQuery:\n{2}\nRequest query: {3}".format(fields, filter_json, '\n'.join(query), t))
# Save filter in analysis settings
if not count and analysis_id > 0:
settings = {}
try:
settings = json.loads(execute("SELECT settings FROM analysis WHERE id={}".format(analysis_id)).first().settings)
settings["filter"] = filter_json
settings["fields"] = fields
settings["order"] = [] if order is None else order
execute("UPDATE analysis SET {0}update_date=CURRENT_TIMESTAMP WHERE id={1}".format("settings='{0}', ".format(json.dumps(settings)), analysis_id))
except:
# TODO: log error
err("Not able to save current filter")
# Get result
if count:
result = sql_result.first()[0]
else:
result = []
with Timer() as t:
if sql_result is not None:
for row in sql_result:
entry = {"id" : "{}_{}_{}".format(row.variant_id, row.transcript_pk_field_uid, row.transcript_pk_value )}
for f_uid in fields:
# Manage special case for fields splitted by sample
if self.fields_map[f_uid]['name'].startswith('s{}_'):
pattern = "row." + self.fields_map[f_uid]['name']
r = {}
for sid in sample_ids:
r[sid] = FilterEngine.parse_result(eval(pattern.format(sid)))
entry[f_uid] = r
else:
if self.fields_map[f_uid]['db_name_ui'] == 'Variant':
entry[f_uid] = FilterEngine.parse_result(eval("row.{}".format(self.fields_map[f_uid]['name'])))
else:
entry[f_uid] = FilterEngine.parse_result(eval("row._{}".format(f_uid)))
result.append(entry)
log("Result processing: {0}\nTotal result: {1}".format(t, "-"))
return result
def build_query(self, analysis_id, reference_id, mode, filter, fields, order=None, limit=100, offset=0, count=False):
"""
This method build the sql query according to the provided parameters, and also build several list with ids of
fields, databases, sample, etc... all information that could be used by the analysis to work.
"""
# Data that will be computed and returned by this method !
query = [] # sql queries that correspond to the provided parameters (we will have several queries if need to create temp tables)
field_uids = [] # list of annotation field's uids that need to be present in the analysis working table
db_uids = [] # list of annotation databases uids used for the analysis
sample_ids = [] # list of sample's ids used for the analysis
filter_ids = [] # list of saved filter's ids for this analysis
attributes = {} # list of attributes (and their values by sample) defined for this analysis
# Retrieve sample ids of the analysis
for row in execute("select sample_id from analysis_sample where analysis_id={0}".format(analysis_id)):
sample_ids.append(str(row.sample_id))
# Retrieve attributes of the analysis
for row in execute("select sample_id, value, name from attribute where analysis_id={0}".format(analysis_id)):
if row.name not in attributes.keys():
attributes[row.name] = {row.sample_id: row.value}
else:
attributes[row.name].update({row.sample_id: row.value})
# Init fields uid and db uids with the defaults annotations fields according to the reference (hg19 by example)
# for row in execute("SELECT d.uid AS duid, f.uid FROM annotation_database d INNER JOIN annotation_field f ON d.uid=f.database_uid WHERE d.reference_id={} AND d.type='variant' AND f.wt_default=True".format(reference_id)):
# if row.duid not in db_uids:
# db_uids.append(row.duid)
# field_uids.append(row.uid)
# Retrieve saved filter's ids of the analysis - and parse their filter to get list of dbs/fields used by filters
for row in execute("select id, filter from filter where analysis_id={0} ORDER BY id ASC".format(analysis_id)): # ORDER BY is important as a filter can "called" an oldest filter to be build.
filter_ids.append(row.id)
q, f, d = self.parse_filter(analysis_id, mode, sample_ids, row.filter, fields, None, None)
field_uids = array_merge(field_uids, f)
db_uids = array_merge(db_uids, d)
# Parse the current filter
query, f, d = self.parse_filter(analysis_id, mode, sample_ids, filter, fields, order, limit, offset, count)
field_uids = array_merge(field_uids, f)
db_uids = array_merge(db_uids, d)
# return query and all usefulldata about annotations needed to execute the query
return query, field_uids, db_uids, sample_ids, filter_ids, attributes
def parse_filter(self, analysis_id, mode, sample_ids, filters, fields=[], order=None, limit=100, offset=0, count=False):
"""
This method parse the json filter and return the corresponding postgreSQL query, and also the list of fields and databases uid used by the query
(thoses databases/fields must be present in the working table to be run succefully the query)
"""
# Init some global variables
wt = 'wt_{}'.format(analysis_id)
query = ""
field_uids = []
db_uids = []
with_trx = False
# Build SELECT
fields_names = []
for f_uid in fields:
if self.fields_map[f_uid]["db_uid"] not in db_uids:
db_uids.append(self.fields_map[f_uid]["db_uid"])
field_uids.append(f_uid)
if self.fields_map[f_uid]['db_name_ui'] == 'Variant':
# Manage special case for fields splitted by sample
if self.fields_map[f_uid]['name'].startswith('s{}_'):
fields_names.extend(['{}.'.format(wt) + self.fields_map[f_uid]['name'].format(s) for s in sample_ids])
else:
fields_names.append('{}.{}'.format(wt, self.fields_map[f_uid]["name"]))
else:
with_trx = with_trx or self.fields_map[f_uid]["db_type"] == "transcript"
fields_names.append('{}._{}'.format(wt, f_uid))
q_select = 'variant_id, transcript_pk_field_uid, transcript_pk_value{} {}'.format(',' if len(fields_names) > 0 else '', ', '.join(fields_names))
# Build FROM/JOIN
q_from = wt
# Build WHERE
temporary_to_import = {}
def check_field_uid(data):
if data[0] == 'field':
if self.fields_map[data[1]]["db_uid"] not in db_uids:
db_uids.append(self.fields_map[data[1]]["db_uid"])
field_uids.append(data[1])
def build_filter(data):
"""
Recursive method that build the query from the filter json data at operator level
"""
operator = data[0]
if operator in ['AND', 'OR']:
if len(data[1]) == 0:
return ''
return ' (' + FilterEngine.op_map[operator].join([build_filter(f) for f in data[1]]) + ') '
elif operator in ['==', '!=', '>', '<', '>=', '<=']:
# If comparaison with a field, the field MUST BE the first operande
if data[1][0] == 'field':
metadata = self.fields_map[data[1][1]]
else:
metadata = {"type": "string", "name":""}
check_field_uid(data[1])
check_field_uid(data[2])
# Manage special case for fields splitted by sample
if metadata['name'].startswith('s{}_'):
# With these special fields, we don't allow field tot field comparaison.
# First shall always be the special fields, and the second shall be everythong except another special fields
return ' (' + ' OR '.join(['{0}{1}{2}'.format(metadata['name'].format(s), FilterEngine.op_map[operator], parse_value(metadata["type"], data[2])) for s in sample_ids]) + ') '
else:
return '{0}{1}{2}'.format(parse_value(metadata["type"], data[1]), FilterEngine.op_map[operator], parse_value(metadata["type"], data[2]))
elif operator in ['~', '!~']:
check_field_uid(data[1])
check_field_uid(data[2])
return '{0}{1}{2}'.format(parse_value('string', data[1]), FilterEngine.op_map[operator], parse_value('string%', data[2]))
elif operator in ['IN', 'NOTIN']:
tmp_table = get_tmp_table(data[1], data[2])
temporary_to_import[tmp_table]['where'] = FilterEngine.op_map[operator].format(tmp_table, wt)
if data[1] == 'site':
temporary_to_import[tmp_table]['from'] = " LEFT JOIN {1} ON {0}.bin={1}.bin AND {0}.chr={1}.chr AND {0}.pos={1}.pos".format(wt, tmp_table)
else: # if data[1] == 'variant':
temporary_to_import[tmp_table]['from'] = " LEFT JOIN {1} ON {0}.bin={1}.bin AND {0}.chr={1}.chr AND {0}.pos={1}.pos AND {0}.ref={1}.ref AND {0}.alt={1}.alt".format(wt, tmp_table)
return temporary_to_import[tmp_table]['where']
def get_tmp_table(mode, data):
"""
Parse json data to build temp table for ensemblist operation IN/NOTIN
mode: site or variant
data: json data about the temp table to create
"""
ttable_quer_map = "CREATE TABLE IF NOT EXISTS {0} AS {1}; "
if data[0] == 'sample':
tmp_table_name = "tmp_sample_{0}_{1}".format(data[1], mode)
if mode == 'site':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos FROM {0} WHERE {0}.s{1}_gt IS NOT NULL".format(wt, data[1]))
else: # if mode = 'variant':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos, {0}.ref, {0}.alt FROM {0} WHERE {0}.s{1}_gt IS NOT NULL".format(wt, data[1]))
elif data[0] == 'filter':
tmp_table_name = "tmp_filter_{0}".format(data[1])
if mode == 'site':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos FROM {0} WHERE {0}.filter_{1}=True".format(wt, data[1]))
else: # if mode = 'variant':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos, {0}.ref, {0}.alt FROM {0} WHERE {0}.filter_{1}=True".format(wt, data[1]))
elif data[0] == 'attribute':
key, value = data[1].split(':')
tmp_table_name = "tmp_attribute_{0}_{1}_{2}_{3}".format(analysis_id, key, value, mode)
if mode == 'site':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos FROM {0} WHERE {0}.attr_{1}='{2}'".format(wt, key, value))
else: # if mode = 'variant':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos, {0}.ref, {0}.alt FROM {0} WHERE {0}.attr_{1}='{2}'".format(wt, key, value))
temporary_to_import[tmp_table_name] = {'query': tmp_table_query + "CREATE INDEX IF NOT EXISTS {0}_idx_var ON {0} USING btree (bin, chr, pos);".format(tmp_table_name)}
return tmp_table_name
def parse_value(ftype, data):
if data[0] == 'field':
if self.fields_map[data[1]]["type"] == ftype:
if self.fields_map[data[1]]['db_name_ui'] == 'Variant':
return "{0}".format(self.fields_map[data[1]]["name"])
else:
return "_{0}".format(data[1])
if data[0] == 'value':
if ftype in ['int', 'float', 'enum', 'percent']:
return str(data[1])
elif ftype == 'string':
return "'{0}'".format(data[1])
elif ftype == 'string%':
return "'%%{0}%%'".format(data[1])
elif ftype == 'range' and len(data) == 3:
return 'int8range({0}, {1})'.format(data[1], data[2])
raise RegovarException("FilterEngine.request.parse_value - Unknow type: {0} ({1})".format(ftype, data))
# q_where = ""
# if len(sample_ids) == 1:
# q_where = "{0}.sample_id={1}".format(wt, sample_ids[0])
# elif len(sample_ids) > 1:
# q_where = "{0}.sample_id IN ({1})".format(wt, ','.join(sample_ids))
q_where = build_filter(filters)
if q_where is not None and len(q_where.strip()) > 0:
q_where = "WHERE " + q_where
# Build FROM/JOIN according to the list of used annotations databases
q_from += " ".join([t['from'] for t in temporary_to_import.values()])
# Build ORDER BY
# TODO : actually, it's not possible to do "order by" on special fields (GT and DP because they are split by sample)
q_order = ""
if order is not None and len(order) > 0:
orders = []
for f_uid in order:
asc = 'ASC'
if f_uid[0] == '-':
f_uid = f_uid[1:]
asc = 'DESC'
if self.fields_map[f_uid]['db_name_ui'] == 'Variant':
# Manage special case for fields splitted by sample
if self.fields_map[f_uid]['name'].startswith('s{}_'):
pass
else:
orders.append('{} {}'.format(self.fields_map[f_uid]["name"], asc))
else:
orders.append('_{} {}'.format(f_uid, asc))
q_order = 'ORDER BY {}'.format(', '.join(orders))
# build final query
query_tpm = [t['query'] for t in temporary_to_import.values()]
if count:
query_req = "SELECT DISTINCT {0} FROM {1} {2}".format(q_select, q_from, q_where)
query = query_tpm + ['SELECT COUNT(*) FROM ({0}) AS sub;'.format(query_req)]
else:
query_req = "SELECT DISTINCT {0} FROM {1} {2} {3} {4} {5};".format(q_select, q_from, q_where, q_order, 'LIMIT {}'.format(limit) if limit is not None else '', 'OFFSET {}'.format(offset) if offset is not None else '')
query = query_tpm + [query_req]
return query, field_uids, db_uids
@staticmethod
def get_hasname(analysis_id, mode, fields, filter_json):
# clean and sort fields list
clean_fields = fields
clean_fields.sort()
clean_fields = list(set(clean_fields))
string_id = "{0}{1}{2}{3}".format(analysis_id, mode, clean_fields, json.dumps(filter_json))
return hashlib.md5(string_id.encode()).hexdigest()
@staticmethod
def parse_result(value):
"""
Parse value returned by sqlAlchemy and cast it, if needed, into "simples" python types
"""
# if value is None:
# return ""
if type(value) == psycopg2._range.NumericRange:
return (value.lower, value.upper)
return value
| REGOVAR/Annso | annso/core/annso/filter_manager.py | Python | agpl-3.0 | 34,605 |
<?php
namespace hierarchy;
class Controller_Table extends \AbstractController {
public $class_name;
public $child_ref;
public $parent_ref;
function init(){
parent::init();
$this->owner->hierarchy_controller=$this;
}
function useField($field){
if(!$this->class_name)$this->class_name=preg_replace('/^Model_/', '', get_class($this->owner)); // remove "Model_" from class
if(!$this->child_ref)$this->child_ref=$this->class_name;
$this->parent_ref=$field;
if(!$this->owner->hasElement($this->parent_ref))$this->owner->hasOne($this->class_name,$field)
->display(array('form'=>'hierarchy/drilldown'));
if(!$this->owner->hasElement($this->child_ref))$this->owner->hasMany($this->child_ref,$field);
$this->addCountColumn(strtolower($this->child_ref).'_cnt');
}
function addCountColumn($f){
$self=$this;
$this->owner->addExpression($f)->set(function($m)use($self,$f){
$m=$self->owner->newInstance();
//$m->table_alias=$f; // Imants: This still don't work as expected and creates Exception_DB when we use Models where model name is not the same as table name or table alias.
$ref=$self->owner->getElement($self->child_ref);
$m->addCondition($ref->their_field,$self->owner->getElement($ref->our_field));
return $m->count();
});
}
}
| xavoctechnocratspvtltd/property | atk4-addons/hierarchy/lib/Controller/Table.php | PHP | agpl-3.0 | 1,421 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Report Configurator - Stock',
'version': '8.0.1.0.0',
'category': 'Reporting Subsystem',
'sequence': 14,
'summary': '',
'description': """
Report Configurator - Stock
=============================
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'images': [
],
'depends': [
'report_extended',
'stock_voucher',
],
'data': [
'views/report_view.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': True,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| HBEE/odoo-addons | report_extended_stock/__openerp__.py | Python | agpl-3.0 | 1,603 |
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinical.forms.outpatientstreatmentplan;
import java.io.Serializable;
import ims.framework.Context;
import ims.framework.FormName;
import ims.framework.FormAccessLogic;
public class BaseAccessLogic extends FormAccessLogic implements Serializable
{
private static final long serialVersionUID = 1L;
public final void setContext(Context context, FormName formName)
{
form = new CurrentForm(new GlobalContext(context), new CurrentForms());
engine = new CurrentEngine(formName);
}
public boolean isAccessible()
{
if(!form.getGlobalContext().Core.getPatientShortIsNotNull())
return false;
if(!form.getGlobalContext().Core.getCurrentCareContextIsNotNull())
return false;
return true;
}
public boolean isReadOnly()
{
return false;
}
public CurrentEngine engine;
public CurrentForm form;
public final static class CurrentForm implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentForm(GlobalContext globalcontext, CurrentForms forms)
{
this.globalcontext = globalcontext;
this.forms = forms;
}
public final GlobalContext getGlobalContext()
{
return globalcontext;
}
public final CurrentForms getForms()
{
return forms;
}
private GlobalContext globalcontext;
private CurrentForms forms;
}
public final static class CurrentEngine implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentEngine(FormName formName)
{
this.formName = formName;
}
public final FormName getFormName()
{
return formName;
}
private FormName formName;
}
public static final class CurrentForms implements Serializable
{
private static final long serialVersionUID = 1L;
protected final class LocalFormName extends FormName
{
private static final long serialVersionUID = 1L;
protected LocalFormName(int value)
{
super(value);
}
}
private CurrentForms()
{
Clinical = new ClinicalForms();
OCRR = new OCRRForms();
}
public final class ClinicalForms implements Serializable
{
private static final long serialVersionUID = 1L;
private ClinicalForms()
{
OutPatientsTreatmentPlan = new LocalFormName(122120);
MedicationOnAdmission = new LocalFormName(122126);
DocumentGeneration = new LocalFormName(122140);
DiagnosisComplications = new LocalFormName(123117);
PatientProblems = new LocalFormName(123120);
Procedures = new LocalFormName(123119);
}
public final FormName OutPatientsTreatmentPlan;
public final FormName MedicationOnAdmission;
public final FormName DocumentGeneration;
public final FormName DiagnosisComplications;
public final FormName PatientProblems;
public final FormName Procedures;
}
public final class OCRRForms implements Serializable
{
private static final long serialVersionUID = 1L;
private OCRRForms()
{
MyOrder = new LocalFormName(116111);
}
public final FormName MyOrder;
}
public ClinicalForms Clinical;
public OCRRForms OCRR;
}
}
| FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/Clinical/src/ims/clinical/forms/outpatientstreatmentplan/BaseAccessLogic.java | Java | agpl-3.0 | 5,236 |
/*
* RapidMiner
*
* Copyright (C) 2001-2013 by Rapid-I and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapid-i.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.operator.preprocessing.transformation.aggregation;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.table.DoubleArrayDataRow;
import com.rapidminer.tools.Ontology;
/**
* This class implements the Log Product Aggregation function. This will calculate the
* logarithm of a product of a source attribute for each group. This can help in situations, where
* the normal product would exceed the numerical range and should be used as an intermediate result.
*
* This obviously only works on numbers being all greater 0.
*
* @author Sebastian Land
*/
public class LogProductAggregationFunction extends NumericalAggregationFunction {
public static final String FUNCTION_LOG_PRODUCT = "logProduct";
public LogProductAggregationFunction(Attribute sourceAttribute, boolean ignoreMissings, boolean countOnlyDisctinct) {
super(sourceAttribute, ignoreMissings, countOnlyDisctinct, FUNCTION_LOG_PRODUCT, FUNCTION_SEPARATOR_OPEN, FUNCTION_SEPARATOR_CLOSE);
}
public LogProductAggregationFunction(Attribute sourceAttribute, boolean ignoreMissings, boolean countOnlyDisctinct, String functionName, String separatorOpen, String separatorClose) {
super(sourceAttribute, ignoreMissings, countOnlyDisctinct, functionName, separatorOpen, separatorClose);
}
@Override
public Aggregator createAggregator() {
return new LogProductAggregator(this);
}
@Override
public void setDefault(Attribute attribute, DoubleArrayDataRow row) {
row.set(attribute, 0);
}
@Override
protected int getTargetValueType(int sourceValueType) {
return Ontology.REAL;
}
@Override
public boolean isCompatible() {
return getSourceAttribute().isNumerical();
}
}
| aborg0/RapidMiner-Unuk | src/com/rapidminer/operator/preprocessing/transformation/aggregation/LogProductAggregationFunction.java | Java | agpl-3.0 | 2,633 |
//-----------------------------------------------------------------------------
// Platform-dependent functionality.
//
// Copyright 2017 whitequark
//-----------------------------------------------------------------------------
#if defined(__APPLE__)
// Include Apple headers before solvespace.h to avoid identifier clashes.
# include <CoreFoundation/CFString.h>
# include <CoreFoundation/CFURL.h>
# include <CoreFoundation/CFBundle.h>
#endif
#include "solvespace.h"
#include "config.h"
#if defined(WIN32)
// Conversely, include Microsoft headers after solvespace.h to avoid clashes.
# include <windows.h>
#else
# include <unistd.h>
# include <sys/stat.h>
#endif
namespace SolveSpace {
namespace Platform {
//-----------------------------------------------------------------------------
// UTF-8 ⟷ UTF-16 conversion, on Windows.
//-----------------------------------------------------------------------------
#if defined(WIN32)
std::string Narrow(const wchar_t *in)
{
std::string out;
DWORD len = WideCharToMultiByte(CP_UTF8, 0, in, -1, NULL, 0, NULL, NULL);
out.resize(len - 1);
ssassert(WideCharToMultiByte(CP_UTF8, 0, in, -1, &out[0], len, NULL, NULL),
"Invalid UTF-16");
return out;
}
std::string Narrow(const std::wstring &in)
{
if(in == L"") return "";
std::string out;
out.resize(WideCharToMultiByte(CP_UTF8, 0, &in[0], (int)in.length(),
NULL, 0, NULL, NULL));
ssassert(WideCharToMultiByte(CP_UTF8, 0, &in[0], (int)in.length(),
&out[0], (int)out.length(), NULL, NULL),
"Invalid UTF-16");
return out;
}
std::wstring Widen(const char *in)
{
std::wstring out;
DWORD len = MultiByteToWideChar(CP_UTF8, 0, in, -1, NULL, 0);
out.resize(len - 1);
ssassert(MultiByteToWideChar(CP_UTF8, 0, in, -1, &out[0], len),
"Invalid UTF-8");
return out;
}
std::wstring Widen(const std::string &in)
{
if(in == "") return L"";
std::wstring out;
out.resize(MultiByteToWideChar(CP_UTF8, 0, &in[0], (int)in.length(), NULL, 0));
ssassert(MultiByteToWideChar(CP_UTF8, 0, &in[0], (int)in.length(),
&out[0], (int)out.length()),
"Invalid UTF-8");
return out;
}
#endif
//-----------------------------------------------------------------------------
// Path utility functions.
//-----------------------------------------------------------------------------
static std::vector<std::string> Split(const std::string &joined, char separator) {
std::vector<std::string> parts;
size_t oldpos = 0, pos = 0;
while(true) {
oldpos = pos;
pos = joined.find(separator, pos);
if(pos == std::string::npos) break;
parts.push_back(joined.substr(oldpos, pos - oldpos));
pos += 1;
}
if(oldpos != joined.length() - 1) {
parts.push_back(joined.substr(oldpos));
}
return parts;
}
static std::string Concat(const std::vector<std::string> &parts, char separator) {
std::string joined;
bool first = true;
for(auto &part : parts) {
if(!first) joined += separator;
joined += part;
first = false;
}
return joined;
}
//-----------------------------------------------------------------------------
// Path manipulation.
//-----------------------------------------------------------------------------
#if defined(WIN32)
const char SEPARATOR = '\\';
#else
const char SEPARATOR = '/';
#endif
Path Path::From(std::string raw) {
Path path = { raw };
return path;
}
Path Path::CurrentDirectory() {
#if defined(WIN32)
// On Windows, OpenFile needs an absolute UNC path proper, so get that.
std::wstring rawW;
rawW.resize(GetCurrentDirectoryW(0, NULL));
DWORD length = GetCurrentDirectoryW((int)rawW.length(), &rawW[0]);
ssassert(length > 0 && length == rawW.length() - 1, "Cannot get current directory");
rawW.resize(length);
return From(Narrow(rawW));
#else
char *raw = getcwd(NULL, 0);
ssassert(raw != NULL, "Cannot get current directory");
Path path = From(raw);
free(raw);
return path;
#endif
}
std::string Path::FileName() const {
std::string fileName = raw;
size_t slash = fileName.rfind(SEPARATOR);
if(slash != std::string::npos) {
fileName = fileName.substr(slash + 1);
}
return fileName;
}
std::string Path::FileStem() const {
std::string baseName = FileName();
size_t dot = baseName.rfind('.');
if(dot != std::string::npos) {
baseName = baseName.substr(0, dot);
}
return baseName;
}
std::string Path::Extension() const {
size_t dot = raw.rfind('.');
if(dot != std::string::npos) {
return raw.substr(dot + 1);
}
return "";
}
bool Path::HasExtension(std::string theirExt) const {
std::string ourExt = Extension();
std::transform(ourExt.begin(), ourExt.end(), ourExt.begin(), ::tolower);
std::transform(theirExt.begin(), theirExt.end(), theirExt.begin(), ::tolower);
return ourExt == theirExt;
}
Path Path::WithExtension(std::string ext) const {
Path withExt = *this;
size_t dot = withExt.raw.rfind('.');
if(dot != std::string::npos) {
withExt.raw.erase(dot);
}
withExt.raw += ".";
withExt.raw += ext;
return withExt;
}
static void FindPrefix(const std::string &raw, size_t *pos) {
*pos = std::string::npos;
#if defined(WIN32)
if(raw.size() >= 7 && raw[2] == '?' && raw[3] == '\\' &&
isalpha(raw[4]) && raw[5] == ':' && raw[6] == '\\') {
*pos = 7;
} else if(raw.size() >= 3 && isalpha(raw[0]) && raw[1] == ':' && raw[2] == '\\') {
*pos = 3;
} else if(raw.size() >= 2 && raw[0] == '\\' && raw[1] == '\\') {
size_t slashAt = raw.find('\\', 2);
if(slashAt != std::string::npos) {
*pos = raw.find('\\', slashAt + 1);
}
}
#else
if(raw.size() >= 1 && raw[0] == '/') {
*pos = 1;
}
#endif
}
bool Path::IsAbsolute() const {
size_t pos;
FindPrefix(raw, &pos);
return pos != std::string::npos;
}
// Removes one component from the end of the path.
// Returns an empty path if the path consists only of a root.
Path Path::Parent() const {
Path parent = { raw };
if(!parent.raw.empty() && parent.raw.back() == SEPARATOR) {
parent.raw.pop_back();
}
size_t slash = parent.raw.rfind(SEPARATOR);
if(slash != std::string::npos) {
parent.raw = parent.raw.substr(0, slash + 1);
} else {
parent.raw.clear();
}
if(IsAbsolute() && !parent.IsAbsolute()) {
return From("");
}
return parent;
}
// Concatenates a component to this path.
// Returns an empty path if this path or the component is empty.
Path Path::Join(const std::string &component) const {
ssassert(component.find(SEPARATOR) == std::string::npos,
"Use the Path::Join(const Path &) overload to append an entire path");
return Join(Path::From(component));
}
// Concatenates a relative path to this path.
// Returns an empty path if either path is empty, or the other path is absolute.
Path Path::Join(const Path &other) const {
if(IsEmpty() || other.IsEmpty() || other.IsAbsolute()) {
return From("");
}
Path joined = { raw };
if(joined.raw.back() != SEPARATOR) {
joined.raw += SEPARATOR;
}
joined.raw += other.raw;
return joined;
}
// Expands the "." and ".." components in this path.
// On Windows, additionally prepends the UNC prefix to absolute paths without one.
// Returns an empty path if a ".." component would escape from the root.
Path Path::Expand(bool fromCurrentDirectory) const {
Path source;
Path expanded;
if(fromCurrentDirectory && !IsAbsolute()) {
source = CurrentDirectory().Join(*this);
} else {
source = *this;
}
size_t splitAt;
FindPrefix(source.raw, &splitAt);
if(splitAt != std::string::npos) {
expanded.raw = source.raw.substr(0, splitAt);
} else {
splitAt = 0;
}
std::vector<std::string> expandedComponents;
for(std::string component : Split(source.raw.substr(splitAt), SEPARATOR)) {
if(component == ".") {
// skip
} else if(component == "..") {
if(!expandedComponents.empty()) {
expandedComponents.pop_back();
} else {
return From("");
}
} else if(!component.empty()) {
expandedComponents.push_back(component);
}
}
if(expanded.IsEmpty()) {
if(expandedComponents.empty()) {
expandedComponents.push_back(".");
}
expanded = From(Concat(expandedComponents, SEPARATOR));
} else if(!expandedComponents.empty()) {
expanded = expanded.Join(From(Concat(expandedComponents, SEPARATOR)));
}
#if defined(WIN32)
if(expanded.IsAbsolute() && expanded.raw.substr(0, 2) != "\\\\") {
expanded.raw = "\\\\?\\" + expanded.raw;
}
#endif
return expanded;
}
static std::string FilesystemNormalize(const std::string &str) {
#if defined(WIN32)
std::wstring strW = Widen(str);
std::transform(strW.begin(), strW.end(), strW.begin(), towlower);
return Narrow(strW);
#elif defined(__APPLE__)
CFMutableStringRef cfStr =
CFStringCreateMutableCopy(NULL, 0,
CFStringCreateWithBytesNoCopy(NULL, (const UInt8*)str.data(), str.size(),
kCFStringEncodingUTF8, /*isExternalRepresentation=*/false, kCFAllocatorNull));
CFStringLowercase(cfStr, NULL);
std::string normalizedStr;
normalizedStr.resize(CFStringGetMaximumSizeOfFileSystemRepresentation(cfStr));
CFStringGetFileSystemRepresentation(cfStr, &normalizedStr[0], normalizedStr.size());
normalizedStr.erase(normalizedStr.find('\0'));
return normalizedStr;
#else
return str;
#endif
}
bool Path::Equals(const Path &other) const {
return FilesystemNormalize(raw) == FilesystemNormalize(other.raw);
}
// Returns a relative path from a given base path.
// Returns an empty path if any of the paths is not absolute, or
// if they belong to different roots, or
// if they cannot be expanded.
Path Path::RelativeTo(const Path &base) const {
Path expanded = Expand();
Path baseExpanded = base.Expand();
if(!(expanded.IsAbsolute() && baseExpanded.IsAbsolute())){
return From("");
}
size_t splitAt;
FindPrefix(expanded.raw, &splitAt);
size_t baseSplitAt;
FindPrefix(baseExpanded.raw, &baseSplitAt);
if(FilesystemNormalize(expanded.raw.substr(0, splitAt)) !=
FilesystemNormalize(baseExpanded.raw.substr(0, splitAt))) {
return From("");
}
std::vector<std::string> components =
Split(expanded.raw.substr(splitAt), SEPARATOR);
std::vector<std::string> baseComponents =
Split(baseExpanded.raw.substr(baseSplitAt), SEPARATOR);
size_t common;
for(common = 0; common < baseComponents.size() &&
common < components.size(); common++) {
if(FilesystemNormalize(baseComponents[common]) !=
FilesystemNormalize(components[common])) {
break;
}
}
std::vector<std::string> resultComponents;
for(size_t i = common; i < baseComponents.size(); i++) {
resultComponents.push_back("..");
}
resultComponents.insert(resultComponents.end(),
components.begin() + common, components.end());
if(resultComponents.empty()) {
resultComponents.push_back(".");
}
return From(Concat(resultComponents, SEPARATOR));
}
Path Path::FromPortable(const std::string &repr) {
return From(Concat(Split(repr, '/'), SEPARATOR));
}
std::string Path::ToPortable() const {
ssassert(!IsAbsolute(), "absolute paths cannot be made portable");
return Concat(Split(raw, SEPARATOR), '/');
}
//-----------------------------------------------------------------------------
// File manipulation.
//-----------------------------------------------------------------------------
FILE *OpenFile(const Platform::Path &filename, const char *mode) {
ssassert(filename.raw.length() == strlen(filename.raw.c_str()),
"Unexpected null byte in middle of a path");
#if defined(WIN32)
return _wfopen(Widen(filename.Expand().raw).c_str(), Widen(mode).c_str());
#else
return fopen(filename.raw.c_str(), mode);
#endif
}
void RemoveFile(const Platform::Path &filename) {
ssassert(filename.raw.length() == strlen(filename.raw.c_str()),
"Unexpected null byte in middle of a path");
#if defined(WIN32)
_wremove(Widen(filename.Expand().raw).c_str());
#else
remove(filename.raw.c_str());
#endif
}
bool ReadFile(const Platform::Path &filename, std::string *data) {
FILE *f = OpenFile(filename, "rb");
if(f == NULL) return false;
fseek(f, 0, SEEK_END);
data->resize(ftell(f));
fseek(f, 0, SEEK_SET);
fread(&(*data)[0], 1, data->size(), f);
fclose(f);
return true;
}
bool WriteFile(const Platform::Path &filename, const std::string &data) {
FILE *f = OpenFile(filename, "wb");
if(f == NULL) return false;
fwrite(&data[0], 1, data.size(), f);
fclose(f);
return true;
}
//-----------------------------------------------------------------------------
// Loading resources, on Windows.
//-----------------------------------------------------------------------------
#if defined(WIN32) && !defined(LIBRARY)
const void *LoadResource(const std::string &name, size_t *size) {
HRSRC hres = FindResourceW(NULL, Widen(name).c_str(), RT_RCDATA);
ssassert(hres != NULL, "Cannot find resource");
HGLOBAL res = ::LoadResource(NULL, hres);
ssassert(res != NULL, "Cannot load resource");
*size = SizeofResource(NULL, hres);
return LockResource(res);
}
#endif
//-----------------------------------------------------------------------------
// Loading resources, on *nix.
//-----------------------------------------------------------------------------
#if defined(__APPLE__)
static Platform::Path PathFromCFURL(CFURLRef cfUrl) {
Path path;
CFStringRef cfPath = CFURLCopyFileSystemPath(cfUrl, kCFURLPOSIXPathStyle);
path.raw.resize(CFStringGetMaximumSizeOfFileSystemRepresentation(cfPath));
CFStringGetFileSystemRepresentation(cfPath, &path.raw[0], path.raw.size());
path.raw.erase(path.raw.find('\0'));
CFRelease(cfPath);
return path;
}
static Platform::Path ResourcePath(const std::string &name) {
Path path;
// First, try to get the URL from the bundle.
CFStringRef cfName = CFStringCreateWithCString(kCFAllocatorDefault, name.c_str(),
kCFStringEncodingUTF8);
CFURLRef cfUrl = CFBundleCopyResourceURL(CFBundleGetMainBundle(), cfName, NULL, NULL);
if(cfUrl != NULL) {
path = PathFromCFURL(cfUrl);
CFRelease(cfUrl);
}
CFRelease(cfName);
if(!path.IsEmpty()) return path;
// If that failed, it means we aren't running from the bundle.
// Reference off the executable path, then.
cfUrl = CFBundleCopyExecutableURL(CFBundleGetMainBundle());
if(cfUrl != NULL) {
path = PathFromCFURL(cfUrl).Parent().Parent().Join("res");
path = path.Join(Path::FromPortable(name));
CFRelease(cfUrl);
}
return path;
}
#elif !defined(WIN32)
# if defined(__linux__)
static const char *selfSymlink = "/proc/self/exe";
# elif defined(__NetBSD__)
static const char *selfSymlink = "/proc/curproc/exe";
# elif defined(__OpenBSD__) || defined(__FreeBSD__)
static const char *selfSymlink = "/proc/curproc/file";
# else
static const char *selfSymlink = "";
# endif
static Platform::Path FindLocalResourceDir() {
// Find out the path to the running binary.
Platform::Path selfPath;
char *expandedSelfPath = realpath(selfSymlink, NULL);
if(expandedSelfPath != NULL) {
selfPath = Path::From(expandedSelfPath);
}
free(expandedSelfPath);
Platform::Path resourceDir;
if(selfPath.IsEmpty()) {
// We don't know how to find the local resource directory on this platform,
// so use the global one (by returning an empty string).
return Path::From(UNIX_DATADIR);
} else {
resourceDir = selfPath.Parent().Parent().Join("res");
}
struct stat st;
if(stat(resourceDir.raw.c_str(), &st) != -1) {
// An executable-adjacent resource directory exists, good.
return resourceDir;
}
// No executable-adjacent resource directory; use the one from compile-time prefix.
return Path::From(UNIX_DATADIR);
}
static Platform::Path ResourcePath(const std::string &name) {
static Platform::Path resourceDir;
if(resourceDir.IsEmpty()) {
resourceDir = FindLocalResourceDir();
}
return resourceDir.Join(Path::FromPortable(name));
}
#endif
#if !defined(WIN32)
const void *LoadResource(const std::string &name, size_t *size) {
static std::map<std::string, std::string> cache;
auto it = cache.find(name);
if(it == cache.end()) {
ssassert(ReadFile(ResourcePath(name), &cache[name]), "Cannot read resource");
it = cache.find(name);
}
const std::string &content = (*it).second;
*size = content.size();
return (const void*)content.data();
}
#endif
}
}
| KmolYuan/python-solvespace | src/platform/platform.cpp | C++ | agpl-3.0 | 17,403 |
<?php decorate_with('layout_1col.php') ?>
<?php slot('title') ?>
<h1 class="multiline">
<?php echo __('Edit %1% - ISDF', array('%1%' => sfConfig::get('app_ui_label_function'))) ?>
<span class="sub"><?php echo render_title($resource->getLabel()) ?></span>
</h1>
<?php end_slot() ?>
<?php slot('content') ?>
<?php echo $form->renderGlobalErrors() ?>
<?php if (isset($sf_request->getAttribute('sf_route')->resource)): ?>
<?php echo $form->renderFormTag(url_for(array($resource, 'module' => 'function', 'action' => 'edit')), array('id' => 'editForm')) ?>
<?php else: ?>
<?php echo $form->renderFormTag(url_for(array('module' => 'function', 'action' => 'add')), array('id' => 'editForm')) ?>
<?php endif; ?>
<?php echo $form->renderHiddenFields() ?>
<section id="content">
<fieldset class="collapsible collapsed" id="identityArea">
<legend><?php echo __('Identity area') ?></legend>
<?php echo $form->type
->help(__('"Specify whether the description is a function or one of its subdivisions." (ISDF 5.1.1) Select the type from the drop-down menu; these values are drawn from the ISDF Function Types taxonomy.'))
->label(__('Type').' <span class="form-required" title="'.__('This is a mandatory element.').'">*</span>')
->renderRow() ?>
<?php echo render_field($form->authorizedFormOfName
->help(__('"Record the authorised name of the function being described. In cases where the name is not enough, add qualifiers to make it unique such as the territorial or administrative scope, or the name of the institution which performs it. This element is to be used in conjunction with the Function description identifier element (5.4.1)." (ISDF 5.1.2)'))
->label(__('Authorized form of name').' <span class="form-required" title="'.__('This is a mandatory element.').'">*</span>'), $resource) ?>
<?php echo $form->parallelName
->help(__('"Purpose: To indicate the various forms in which the authorized form(s) of name occurs in other languages or script forms. Rule: Record the parallel form(s) of name in accordance with any relevant national or international conventions or rules applied by the agency that created the description, including any necessary sub elements and/or qualifiers required by those conventions or rules. Specify in the Rules and/or conventions element (5.4.3.) which rules have been applied." (ISDF 5.1.3)'))
->label(__('Parallel form(s) of name'))
->renderRow() ?>
<?php echo $form->otherName
->help(__('"Record any other names for the function being described." (ISDF 5.1.4)'))
->label(__('Other form(s) of name'))
->renderRow() ?>
<?php echo render_field($form->classification
->help(__('"Record any term and/or code from a classification scheme of functions. Record the classification scheme used in the element Rules and/or conventions used (5.4.3)." (ISDF 5.1.5)')), $resource) ?>
</fieldset>
<fieldset class="collapsible collapsed" id="descriptionArea">
<legend><?php echo __('Context area') ?></legend>
<?php echo render_field($form->dates
->help(__('"Provide a date or date span which covers the dates when the function was started and when it finished. If a function is ongoing, no end date is needed." (ISDF 5.2.1)')), $resource) ?>
<?php echo render_field($form->description
->help(__('"Record a narrative description of the purpose of the function." (ISDF 5.2.2)')), $resource, array('class' => 'resizable')) ?>
<?php echo render_field($form->history
->help(__('"Record in narrative form or as a chronology the main events relating to the function." (ISDF 5.2.3)')), $resource, array('class' => 'resizable')) ?>
<?php echo render_field($form->legislation
->help(__('"Record any law, directive or charter which creates, changes or ends the function." (ISDF 5.2.4)')), $resource, array('class' => 'resizable')) ?>
</fieldset>
<fieldset class="collapsible collapsed" id="relationshipsArea">
<legend><?php echo __('Relationships area') ?></legend>
<?php echo get_partial('relatedFunction', $relatedFunctionComponent->getVarHolder()->getAll()) ?>
<?php echo get_partial('relatedAuthorityRecord', $relatedAuthorityRecordComponent->getVarHolder()->getAll()) ?>
<?php echo get_partial('relatedResource', $relatedResourceComponent->getVarHolder()->getAll()) ?>
</fieldset>
<fieldset class="collapsible collapsed" id="controlArea">
<legend><?php echo __('Control area') ?></legend>
<?php echo render_field($form->descriptionIdentifier
->help(__('"Record a unique description identifier in accordance with local and/or national conventions. If the description is to be used internationally, record the code of the country in which the description was created in accordance with the latest version of ISO 3166 Codes for the representation of names of countries. Where the creator of the description is an international organisation, give the organisational identifier in place of the country code." (ISDF 5.4.1)'))
->label(__('Description identifier').' <span class="form-required" title="'.__('This is a mandatory element.').'">*</span>'), $resource) ?>
<?php echo render_field($form->institutionIdentifier
->help(__('"Record the full authorised form of name(s) of agency(ies) responsible for creating, modifying or disseminating the description or, alternatively, record a recognized code for the agency." (ISDF 5.4.2)'))
->label(__('Institution identifier')), $resource) ?>
<?php echo render_field($form->rules
->help(__('"Purpose: To identify the national or international conventions or rules applied in creating the archival description. Rule: Record the names and where useful the editions or publication dates of the conventions or rules applied." (ISDF 5.4.3)'))
->label(__('Rules and/or conventions used')), $resource, array('class' => 'resizable')) ?>
<?php echo $form->descriptionStatus
->help(__('The purpose of this field is "[t]o indicate the drafting status of the description so that users can understand the current status of the description." (ISDF 5.4.4). Select Final, Revised or Draft from the drop-down menu.'))
->label(__('Status'))
->renderRow() ?>
<?php echo $form->descriptionDetail
->help(__('Select Full, Partial or Minimal from the drop-down menu. "In the absence of national guidelines or rules, minimum records are those that consist only of the three essential elements of an ISDF compliant record (see 4.7), while full records are those that convey information for all relevant ISDF elements of description." (ISDF 5.4.5)'))
->label(__('Level of detail'))
->renderRow() ?>
<?php echo render_field($form->revisionHistory
->help(__('"Record the date the description was created and the dates of any revisions to the description." (ISDF 5.4.6)'))
->label(__('Dates of creation, revision or deletion')), $resource, array('class' => 'resizable')) ?>
<?php echo $form->language
->help(__('Select the language(s) of this record from the drop-down menu; enter the first few letters to narrow the choices. (ISDF 5.4.7)'))
->label(__('Language(s)'))
->renderRow(array('class' => 'form-autocomplete')) ?>
<?php echo $form->script
->help(__('Select the script(s) of this record from the drop-down menu; enter the first few letters to narrow the choices. (ISDF 5.4.7)'))
->label(__('Script(s)'))
->renderRow(array('class' => 'form-autocomplete')) ?>
<?php echo render_field($form->sources
->help(__('"Record the sources consulted in establishing the function description." (ISDF 5.4.8)')), $resource, array('class' => 'resizable')) ?>
<?php echo render_field($form->maintenanceNotes
->help(__('"Record notes pertinent to the creation and maintenance of the description." (ISDF 5.4.9)')), $isdf, array('class' => 'resizable')) ?>
</fieldset>
</section>
<section class="actions">
<ul>
<?php if (isset($sf_request->getAttribute('sf_route')->resource)): ?>
<li><?php echo link_to(__('Cancel'), array($resource, 'module' => 'function'), array('class' => 'c-btn')) ?></li>
<li><input class="c-btn c-btn-submit" type="submit" value="<?php echo __('Save') ?>"/></li>
<?php else: ?>
<li><?php echo link_to(__('Cancel'), array('module' => 'function', 'action' => 'list'), array('class' => 'c-btn')) ?></li>
<li><input class="c-btn c-btn-submit" type="submit" value="<?php echo __('Create') ?>"/></li>
<?php endif; ?>
</ul>
</section>
</form>
<?php end_slot() ?>
| PeaceWorksTechnologySolutions/atom | plugins/sfIsdfPlugin/modules/sfIsdfPlugin/templates/editSuccess.php | PHP | agpl-3.0 | 8,993 |
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.ocrr.vo;
public class OrdersRequiringAuthorisationSearchCriteriaVo extends ims.vo.ValueObject implements ims.vo.ImsCloneable, Comparable
{
private static final long serialVersionUID = 1L;
public OrdersRequiringAuthorisationSearchCriteriaVo()
{
}
public OrdersRequiringAuthorisationSearchCriteriaVo(ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean bean)
{
this.fromdate = bean.getFromDate() == null ? null : bean.getFromDate().buildDate();
this.todate = bean.getToDate() == null ? null : bean.getToDate().buildDate();
this.respclinician = bean.getRespClinician() == null ? null : bean.getRespClinician().buildVo();
this.hospital = bean.getHospital() == null ? null : new ims.core.resource.place.vo.LocationRefVo(new Integer(bean.getHospital().getId()), bean.getHospital().getVersion());
this.loctype = bean.getLoctype();
this.patientlocation = bean.getPatientLocation() == null ? null : bean.getPatientLocation().buildVo();
this.patientclinic = bean.getPatientClinic() == null ? null : bean.getPatientClinic().buildVo();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean bean)
{
this.fromdate = bean.getFromDate() == null ? null : bean.getFromDate().buildDate();
this.todate = bean.getToDate() == null ? null : bean.getToDate().buildDate();
this.respclinician = bean.getRespClinician() == null ? null : bean.getRespClinician().buildVo(map);
this.hospital = bean.getHospital() == null ? null : new ims.core.resource.place.vo.LocationRefVo(new Integer(bean.getHospital().getId()), bean.getHospital().getVersion());
this.loctype = bean.getLoctype();
this.patientlocation = bean.getPatientLocation() == null ? null : bean.getPatientLocation().buildVo(map);
this.patientclinic = bean.getPatientClinic() == null ? null : bean.getPatientClinic().buildVo(map);
}
public ims.vo.ValueObjectBean getBean()
{
return this.getBean(new ims.vo.ValueObjectBeanMap());
}
public ims.vo.ValueObjectBean getBean(ims.vo.ValueObjectBeanMap map)
{
ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean bean = null;
if(map != null)
bean = (ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean)map.getValueObjectBean(this);
if (bean == null)
{
bean = new ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean();
map.addValueObjectBean(this, bean);
bean.populate(map, this);
}
return bean;
}
public boolean getFromDateIsNotNull()
{
return this.fromdate != null;
}
public ims.framework.utils.Date getFromDate()
{
return this.fromdate;
}
public void setFromDate(ims.framework.utils.Date value)
{
this.isValidated = false;
this.fromdate = value;
}
public boolean getToDateIsNotNull()
{
return this.todate != null;
}
public ims.framework.utils.Date getToDate()
{
return this.todate;
}
public void setToDate(ims.framework.utils.Date value)
{
this.isValidated = false;
this.todate = value;
}
public boolean getRespClinicianIsNotNull()
{
return this.respclinician != null;
}
public ims.core.vo.HcpLiteVo getRespClinician()
{
return this.respclinician;
}
public void setRespClinician(ims.core.vo.HcpLiteVo value)
{
this.isValidated = false;
this.respclinician = value;
}
public boolean getHospitalIsNotNull()
{
return this.hospital != null;
}
public ims.core.resource.place.vo.LocationRefVo getHospital()
{
return this.hospital;
}
public void setHospital(ims.core.resource.place.vo.LocationRefVo value)
{
this.isValidated = false;
this.hospital = value;
}
public boolean getLoctypeIsNotNull()
{
return this.loctype != null;
}
public Integer getLoctype()
{
return this.loctype;
}
public void setLoctype(Integer value)
{
this.isValidated = false;
this.loctype = value;
}
public boolean getPatientLocationIsNotNull()
{
return this.patientlocation != null;
}
public ims.core.vo.LocationLiteVo getPatientLocation()
{
return this.patientlocation;
}
public void setPatientLocation(ims.core.vo.LocationLiteVo value)
{
this.isValidated = false;
this.patientlocation = value;
}
public boolean getPatientClinicIsNotNull()
{
return this.patientclinic != null;
}
public ims.core.vo.ClinicLiteVo getPatientClinic()
{
return this.patientclinic;
}
public void setPatientClinic(ims.core.vo.ClinicLiteVo value)
{
this.isValidated = false;
this.patientclinic = value;
}
public final String getIItemText()
{
return toString();
}
public final Integer getBoId()
{
return null;
}
public final String getBoClassName()
{
return null;
}
public boolean isValidated()
{
if(this.isBusy)
return true;
this.isBusy = true;
if(!this.isValidated)
{
this.isBusy = false;
return false;
}
if(this.respclinician != null)
{
if(!this.respclinician.isValidated())
{
this.isBusy = false;
return false;
}
}
if(this.patientlocation != null)
{
if(!this.patientlocation.isValidated())
{
this.isBusy = false;
return false;
}
}
if(this.patientclinic != null)
{
if(!this.patientclinic.isValidated())
{
this.isBusy = false;
return false;
}
}
this.isBusy = false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(this.isBusy)
return null;
this.isBusy = true;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
if(this.respclinician != null)
{
String[] listOfOtherErrors = this.respclinician.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
if(this.patientlocation != null)
{
String[] listOfOtherErrors = this.patientlocation.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
if(this.patientclinic != null)
{
String[] listOfOtherErrors = this.patientclinic.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
{
this.isBusy = false;
this.isValidated = true;
return null;
}
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
this.isBusy = false;
this.isValidated = false;
return result;
}
public Object clone()
{
if(this.isBusy)
return this;
this.isBusy = true;
OrdersRequiringAuthorisationSearchCriteriaVo clone = new OrdersRequiringAuthorisationSearchCriteriaVo();
if(this.fromdate == null)
clone.fromdate = null;
else
clone.fromdate = (ims.framework.utils.Date)this.fromdate.clone();
if(this.todate == null)
clone.todate = null;
else
clone.todate = (ims.framework.utils.Date)this.todate.clone();
if(this.respclinician == null)
clone.respclinician = null;
else
clone.respclinician = (ims.core.vo.HcpLiteVo)this.respclinician.clone();
clone.hospital = this.hospital;
clone.loctype = this.loctype;
if(this.patientlocation == null)
clone.patientlocation = null;
else
clone.patientlocation = (ims.core.vo.LocationLiteVo)this.patientlocation.clone();
if(this.patientclinic == null)
clone.patientclinic = null;
else
clone.patientclinic = (ims.core.vo.ClinicLiteVo)this.patientclinic.clone();
clone.isValidated = this.isValidated;
this.isBusy = false;
return clone;
}
public int compareTo(Object obj)
{
return compareTo(obj, true);
}
public int compareTo(Object obj, boolean caseInsensitive)
{
if (obj == null)
{
return -1;
}
if(caseInsensitive); // this is to avoid eclipse warning only.
if (!(OrdersRequiringAuthorisationSearchCriteriaVo.class.isAssignableFrom(obj.getClass())))
{
throw new ClassCastException("A OrdersRequiringAuthorisationSearchCriteriaVo object cannot be compared an Object of type " + obj.getClass().getName());
}
OrdersRequiringAuthorisationSearchCriteriaVo compareObj = (OrdersRequiringAuthorisationSearchCriteriaVo)obj;
int retVal = 0;
if (retVal == 0)
{
if(this.getHospital() == null && compareObj.getHospital() != null)
return -1;
if(this.getHospital() != null && compareObj.getHospital() == null)
return 1;
if(this.getHospital() != null && compareObj.getHospital() != null)
retVal = this.getHospital().compareTo(compareObj.getHospital());
}
return retVal;
}
public synchronized static int generateValueObjectUniqueID()
{
return ims.vo.ValueObject.generateUniqueID();
}
public int countFieldsWithValue()
{
int count = 0;
if(this.fromdate != null)
count++;
if(this.todate != null)
count++;
if(this.respclinician != null)
count++;
if(this.hospital != null)
count++;
if(this.loctype != null)
count++;
if(this.patientlocation != null)
count++;
if(this.patientclinic != null)
count++;
return count;
}
public int countValueObjectFields()
{
return 7;
}
protected ims.framework.utils.Date fromdate;
protected ims.framework.utils.Date todate;
protected ims.core.vo.HcpLiteVo respclinician;
protected ims.core.resource.place.vo.LocationRefVo hospital;
protected Integer loctype;
protected ims.core.vo.LocationLiteVo patientlocation;
protected ims.core.vo.ClinicLiteVo patientclinic;
private boolean isValidated = false;
private boolean isBusy = false;
}
| FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/ocrr/vo/OrdersRequiringAuthorisationSearchCriteriaVo.java | Java | agpl-3.0 | 12,173 |
<?php
/**
* @package plugins.caption
* @subpackage model.data
*/
class kCopyCaptionsJobData extends kJobData
{
/** entry Id
* @var string
*/
private $entryId;
/**
* the sources start time and duration
* @var array
*/
private $clipsDescriptionArray;
/**
* @var bool
*/
private $fullCopy;
/**
* @return string
*/
public function getEntryId()
{
return $this->entryId;
}
/**
* @param string $entryId
*/
public function setEntryId($entryId)
{
$this->entryId = $entryId;
}
/**
* @return array
*/
public function getClipsDescriptionArray()
{
return $this->clipsDescriptionArray;
}
/**
* @param array $clipsDescriptionArray
*/
public function setClipsDescriptionArray($clipsDescriptionArray)
{
$this->clipsDescriptionArray = $clipsDescriptionArray;
}
/**
* @return bool
*/
public function getFullCopy()
{
return $this->fullCopy;
}
/**
* @param bool $fullCopy
*/
public function setFullCopy($fullCopy)
{
$this->fullCopy = $fullCopy;
}
}
| DBezemer/server | plugins/content/caption/base/lib/model/kCopyCaptionsJobData.php | PHP | agpl-3.0 | 1,140 |
<?php
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
/*********************************************************************************
* SugarCRM is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004 - 2010 SugarCRM Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU General Public License version 3.
*
* In accordance with Section 7(b) of the GNU General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo. If the display of the logo is not reasonably feasible for
* technical reasons, the Appropriate Legal Notices must display the words
* "Powered by SugarCRM".
********************************************************************************/
$module_name = 'gcoop_notificaciones';
$_module_name = 'gcoop_notificaciones';
$popupMeta = array('moduleMain' => $module_name,
'varName' => $module_name,
'orderBy' => $_module_name.'.name',
'whereClauses' =>
array('name' => $_module_name . '.name',
),
'searchInputs'=> array($_module_name. '_number', 'name', 'priority','status'),
);
?>
| gcoop-libre/Advanced-Workflows-For-Sugar-CRM | install_dir/modules/gcoop_notificaciones/metadata/popupdefs.php | PHP | agpl-3.0 | 2,445 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, cint, cstr
from frappe import _
from frappe.model.mapper import get_mapped_doc
from frappe.model.document import Document
from six import iteritems
class SalaryStructure(Document):
def validate(self):
self.set_missing_values()
self.validate_amount()
self.strip_condition_and_formula_fields()
self.validate_max_benefits_with_flexi()
self.validate_component_based_on_tax_slab()
def set_missing_values(self):
overwritten_fields = ["depends_on_payment_days", "variable_based_on_taxable_salary", "is_tax_applicable", "is_flexible_benefit"]
overwritten_fields_if_missing = ["amount_based_on_formula", "formula", "amount"]
for table in ["earnings", "deductions"]:
for d in self.get(table):
component_default_value = frappe.db.get_value("Salary Component", cstr(d.salary_component),
overwritten_fields + overwritten_fields_if_missing, as_dict=1)
if component_default_value:
for fieldname in overwritten_fields:
value = component_default_value.get(fieldname)
if d.get(fieldname) != value:
d.set(fieldname, value)
if not (d.get("amount") or d.get("formula")):
for fieldname in overwritten_fields_if_missing:
d.set(fieldname, component_default_value.get(fieldname))
def validate_component_based_on_tax_slab(self):
for row in self.deductions:
if row.variable_based_on_taxable_salary and (row.amount or row.formula):
frappe.throw(_("Row #{0}: Cannot set amount or formula for Salary Component {1} with Variable Based On Taxable Salary")
.format(row.idx, row.salary_component))
def validate_amount(self):
if flt(self.net_pay) < 0 and self.salary_slip_based_on_timesheet:
frappe.throw(_("Net pay cannot be negative"))
def strip_condition_and_formula_fields(self):
# remove whitespaces from condition and formula fields
for row in self.earnings:
row.condition = row.condition.strip() if row.condition else ""
row.formula = row.formula.strip() if row.formula else ""
for row in self.deductions:
row.condition = row.condition.strip() if row.condition else ""
row.formula = row.formula.strip() if row.formula else ""
def validate_max_benefits_with_flexi(self):
have_a_flexi = False
if self.earnings:
flexi_amount = 0
for earning_component in self.earnings:
if earning_component.is_flexible_benefit == 1:
have_a_flexi = True
max_of_component = frappe.db.get_value("Salary Component", earning_component.salary_component, "max_benefit_amount")
flexi_amount += max_of_component
if have_a_flexi and flt(self.max_benefits) == 0:
frappe.throw(_("Max benefits should be greater than zero to dispense benefits"))
if have_a_flexi and flexi_amount and flt(self.max_benefits) > flexi_amount:
frappe.throw(_("Total flexible benefit component amount {0} should not be less than max benefits {1}")
.format(flexi_amount, self.max_benefits))
if not have_a_flexi and flt(self.max_benefits) > 0:
frappe.throw(_("Salary Structure should have flexible benefit component(s) to dispense benefit amount"))
def get_employees(self, **kwargs):
conditions, values = [], []
for field, value in kwargs.items():
if value:
conditions.append("{0}=%s".format(field))
values.append(value)
condition_str = " and " + " and ".join(conditions) if conditions else ""
employees = frappe.db.sql_list("select name from tabEmployee where status='Active' {condition}"
.format(condition=condition_str), tuple(values))
return employees
@frappe.whitelist()
def assign_salary_structure(self, company=None, grade=None, department=None, designation=None,employee=None,
from_date=None, base=None, variable=None, income_tax_slab=None):
employees = self.get_employees(company= company, grade= grade,department= department,designation= designation,name=employee)
if employees:
if len(employees) > 20:
frappe.enqueue(assign_salary_structure_for_employees, timeout=600,
employees=employees, salary_structure=self,from_date=from_date,
base=base, variable=variable, income_tax_slab=income_tax_slab)
else:
assign_salary_structure_for_employees(employees, self, from_date=from_date,
base=base, variable=variable, income_tax_slab=income_tax_slab)
else:
frappe.msgprint(_("No Employee Found"))
def assign_salary_structure_for_employees(employees, salary_structure, from_date=None, base=None, variable=None, income_tax_slab=None):
salary_structures_assignments = []
existing_assignments_for = get_existing_assignments(employees, salary_structure, from_date)
count=0
for employee in employees:
if employee in existing_assignments_for:
continue
count +=1
salary_structures_assignment = create_salary_structures_assignment(employee,
salary_structure, from_date, base, variable, income_tax_slab)
salary_structures_assignments.append(salary_structures_assignment)
frappe.publish_progress(count*100/len(set(employees) - set(existing_assignments_for)), title = _("Assigning Structures..."))
if salary_structures_assignments:
frappe.msgprint(_("Structures have been assigned successfully"))
def create_salary_structures_assignment(employee, salary_structure, from_date, base, variable, income_tax_slab=None):
assignment = frappe.new_doc("Salary Structure Assignment")
assignment.employee = employee
assignment.salary_structure = salary_structure.name
assignment.company = salary_structure.company
assignment.from_date = from_date
assignment.base = base
assignment.variable = variable
assignment.income_tax_slab = income_tax_slab
assignment.save(ignore_permissions = True)
assignment.submit()
return assignment.name
def get_existing_assignments(employees, salary_structure, from_date):
salary_structures_assignments = frappe.db.sql_list("""
select distinct employee from `tabSalary Structure Assignment`
where salary_structure=%s and employee in (%s)
and from_date=%s and company= %s and docstatus=1
""" % ('%s', ', '.join(['%s']*len(employees)),'%s', '%s'), [salary_structure.name] + employees+[from_date]+[salary_structure.company])
if salary_structures_assignments:
frappe.msgprint(_("Skipping Salary Structure Assignment for the following employees, as Salary Structure Assignment records already exists against them. {0}")
.format("\n".join(salary_structures_assignments)))
return salary_structures_assignments
@frappe.whitelist()
def make_salary_slip(source_name, target_doc = None, employee = None, as_print = False, print_format = None, for_preview=0, ignore_permissions=False):
def postprocess(source, target):
if employee:
employee_details = frappe.db.get_value("Employee", employee,
["employee_name", "branch", "designation", "department"], as_dict=1)
target.employee = employee
target.employee_name = employee_details.employee_name
target.branch = employee_details.branch
target.designation = employee_details.designation
target.department = employee_details.department
target.run_method('process_salary_structure', for_preview=for_preview)
doc = get_mapped_doc("Salary Structure", source_name, {
"Salary Structure": {
"doctype": "Salary Slip",
"field_map": {
"total_earning": "gross_pay",
"name": "salary_structure"
}
}
}, target_doc, postprocess, ignore_child_tables=True, ignore_permissions=ignore_permissions)
if cint(as_print):
doc.name = 'Preview for {0}'.format(employee)
return frappe.get_print(doc.doctype, doc.name, doc = doc, print_format = print_format)
else:
return doc
@frappe.whitelist()
def get_employees(salary_structure):
employees = frappe.get_list('Salary Structure Assignment',
filters={'salary_structure': salary_structure, 'docstatus': 1}, fields=['employee'])
if not employees:
frappe.throw(_("There's no Employee with Salary Structure: {0}. \
Assign {1} to an Employee to preview Salary Slip").format(salary_structure, salary_structure))
return list(set([d.employee for d in employees]))
| gsnbng/erpnext | erpnext/hr/doctype/salary_structure/salary_structure.py | Python | agpl-3.0 | 8,133 |
var status = 0;
var request;
function start() {
status = -1;
action(1, 0, 0);
}
function action(mode, type, selection) {
if (mode == -1) {
cm.dispose();
} else {
if (mode == 0 && status == 0) {
cm.dispose();
return;
}
if (mode == 1)
status++;
else
status--;
if (status == 0) {
cm.warpParty(980030000, 4);
cm.cancelCPQLobby();
cm.dispose();
}
}
}
| ronancpl/MapleSolaxiaV2 | scripts/npc/2042009.js | JavaScript | agpl-3.0 | 511 |
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.emergency.vo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import ims.framework.enumerations.SortOrder;
public class AttendanceHistoryDialogParamsVoCollection extends ims.vo.ValueObjectCollection implements ims.vo.ImsCloneable, Iterable<AttendanceHistoryDialogParamsVo>
{
private static final long serialVersionUID = 1L;
private ArrayList<AttendanceHistoryDialogParamsVo> col = new ArrayList<AttendanceHistoryDialogParamsVo>();
public String getBoClassName()
{
return null;
}
public boolean add(AttendanceHistoryDialogParamsVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
return this.col.add(value);
}
return false;
}
public boolean add(int index, AttendanceHistoryDialogParamsVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
this.col.add(index, value);
return true;
}
return false;
}
public void clear()
{
this.col.clear();
}
public void remove(int index)
{
this.col.remove(index);
}
public int size()
{
return this.col.size();
}
public int indexOf(AttendanceHistoryDialogParamsVo instance)
{
return col.indexOf(instance);
}
public AttendanceHistoryDialogParamsVo get(int index)
{
return this.col.get(index);
}
public boolean set(int index, AttendanceHistoryDialogParamsVo value)
{
if(value == null)
return false;
this.col.set(index, value);
return true;
}
public void remove(AttendanceHistoryDialogParamsVo instance)
{
if(instance != null)
{
int index = indexOf(instance);
if(index >= 0)
remove(index);
}
}
public boolean contains(AttendanceHistoryDialogParamsVo instance)
{
return indexOf(instance) >= 0;
}
public Object clone()
{
AttendanceHistoryDialogParamsVoCollection clone = new AttendanceHistoryDialogParamsVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
if(this.col.get(x) != null)
clone.col.add((AttendanceHistoryDialogParamsVo)this.col.get(x).clone());
else
clone.col.add(null);
}
return clone;
}
public boolean isValidated()
{
for(int x = 0; x < col.size(); x++)
if(!this.col.get(x).isValidated())
return false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(col.size() == 0)
return null;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
for(int x = 0; x < col.size(); x++)
{
String[] listOfOtherErrors = this.col.get(x).validate();
if(listOfOtherErrors != null)
{
for(int y = 0; y < listOfOtherErrors.length; y++)
{
listOfErrors.add(listOfOtherErrors[y]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
return null;
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
return result;
}
public AttendanceHistoryDialogParamsVoCollection sort()
{
return sort(SortOrder.ASCENDING);
}
public AttendanceHistoryDialogParamsVoCollection sort(boolean caseInsensitive)
{
return sort(SortOrder.ASCENDING, caseInsensitive);
}
public AttendanceHistoryDialogParamsVoCollection sort(SortOrder order)
{
return sort(new AttendanceHistoryDialogParamsVoComparator(order));
}
public AttendanceHistoryDialogParamsVoCollection sort(SortOrder order, boolean caseInsensitive)
{
return sort(new AttendanceHistoryDialogParamsVoComparator(order, caseInsensitive));
}
@SuppressWarnings("unchecked")
public AttendanceHistoryDialogParamsVoCollection sort(Comparator comparator)
{
Collections.sort(col, comparator);
return this;
}
public AttendanceHistoryDialogParamsVo[] toArray()
{
AttendanceHistoryDialogParamsVo[] arr = new AttendanceHistoryDialogParamsVo[col.size()];
col.toArray(arr);
return arr;
}
public Iterator<AttendanceHistoryDialogParamsVo> iterator()
{
return col.iterator();
}
@Override
protected ArrayList getTypedCollection()
{
return col;
}
private class AttendanceHistoryDialogParamsVoComparator implements Comparator
{
private int direction = 1;
private boolean caseInsensitive = true;
public AttendanceHistoryDialogParamsVoComparator()
{
this(SortOrder.ASCENDING);
}
public AttendanceHistoryDialogParamsVoComparator(SortOrder order)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
}
public AttendanceHistoryDialogParamsVoComparator(SortOrder order, boolean caseInsensitive)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
this.caseInsensitive = caseInsensitive;
}
public int compare(Object obj1, Object obj2)
{
AttendanceHistoryDialogParamsVo voObj1 = (AttendanceHistoryDialogParamsVo)obj1;
AttendanceHistoryDialogParamsVo voObj2 = (AttendanceHistoryDialogParamsVo)obj2;
return direction*(voObj1.compareTo(voObj2, this.caseInsensitive));
}
public boolean equals(Object obj)
{
return false;
}
}
public ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] getBeanCollection()
{
return getBeanCollectionArray();
}
public ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] getBeanCollectionArray()
{
ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] result = new ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[col.size()];
for(int i = 0; i < col.size(); i++)
{
AttendanceHistoryDialogParamsVo vo = ((AttendanceHistoryDialogParamsVo)col.get(i));
result[i] = (ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean)vo.getBean();
}
return result;
}
public static AttendanceHistoryDialogParamsVoCollection buildFromBeanCollection(java.util.Collection beans)
{
AttendanceHistoryDialogParamsVoCollection coll = new AttendanceHistoryDialogParamsVoCollection();
if(beans == null)
return coll;
java.util.Iterator iter = beans.iterator();
while (iter.hasNext())
{
coll.add(((ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean)iter.next()).buildVo());
}
return coll;
}
public static AttendanceHistoryDialogParamsVoCollection buildFromBeanCollection(ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] beans)
{
AttendanceHistoryDialogParamsVoCollection coll = new AttendanceHistoryDialogParamsVoCollection();
if(beans == null)
return coll;
for(int x = 0; x < beans.length; x++)
{
coll.add(beans[x].buildVo());
}
return coll;
}
}
| FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/emergency/vo/AttendanceHistoryDialogParamsVoCollection.java | Java | agpl-3.0 | 8,947 |
//////////////////////////////////////////////////////////////////////////
// //
// This is a generated file. You can view the original //
// source in your browser if your browser supports source maps. //
// Source maps are supported by all recent versions of Chrome, Safari, //
// and Firefox, and by Internet Explorer 11. //
// //
//////////////////////////////////////////////////////////////////////////
(function () {
/* Imports */
var Meteor = Package.meteor.Meteor;
var global = Package.meteor.global;
var meteorEnv = Package.meteor.meteorEnv;
var _ = Package.underscore._;
var Random = Package.random.Random;
var check = Package.check.check;
var Match = Package.check.Match;
var Accounts = Package['accounts-base'].Accounts;
var OAuth = Package['clinical:oauth'].OAuth;
var WebApp = Package.webapp.WebApp;
var Log = Package.logging.Log;
var Tracker = Package.deps.Tracker;
var Deps = Package.deps.Deps;
var Session = Package.session.Session;
var DDP = Package['ddp-client'].DDP;
var Mongo = Package.mongo.Mongo;
var Blaze = Package.ui.Blaze;
var UI = Package.ui.UI;
var Handlebars = Package.ui.Handlebars;
var Spacebars = Package.spacebars.Spacebars;
var Template = Package['templating-runtime'].Template;
var $ = Package.jquery.$;
var jQuery = Package.jquery.jQuery;
var EJSON = Package.ejson.EJSON;
var FastClick = Package.fastclick.FastClick;
var LaunchScreen = Package['launch-screen'].LaunchScreen;
var meteorInstall = Package.modules.meteorInstall;
var meteorBabelHelpers = Package['babel-runtime'].meteorBabelHelpers;
var Promise = Package.promise.Promise;
var HTML = Package.htmljs.HTML;
var Symbol = Package['ecmascript-runtime-client'].Symbol;
var Map = Package['ecmascript-runtime-client'].Map;
var Set = Package['ecmascript-runtime-client'].Set;
var require = meteorInstall({"node_modules":{"meteor":{"clinical:accounts-oauth":{"oauth_common.js":function(require,exports,module){
//////////////////////////////////////////////////////////////////////////////////////////////////
// //
// packages/clinical_accounts-oauth/oauth_common.js //
// //
//////////////////////////////////////////////////////////////////////////////////////////////////
//
var get;
module.link("lodash", {
get: function (v) {
get = v;
}
}, 0);
Accounts.oauth = {};
var services = {}; // Helper for registering OAuth based accounts packages.
// On the server, adds an index to the user collection.
Accounts.oauth.registerService = function (name) {
if (get(Meteor, 'settings.public.logging') === "debug") {
console.log('C3 Registering OAuth service in active server memory: ', name);
}
if (_.has(services, name)) throw new Error("Duplicate service: " + name);
services[name] = true;
if (Meteor.server) {
// Accounts.updateOrCreateUserFromExternalService does a lookup by this id,
// so this should be a unique index. You might want to add indexes for other
// fields returned by your service (eg services.github.login) but you can do
// that in your app.
Meteor.users._ensureIndex('services.' + name + '.id', {
unique: 1,
sparse: 1
});
}
}; // Removes a previously registered service.
// This will disable logging in with this service, and serviceNames() will not
// contain it.
// It's worth noting that already logged in users will remain logged in unless
// you manually expire their sessions.
Accounts.oauth.unregisterService = function (name) {
if (!_.has(services, name)) throw new Error("Service not found: " + name);
delete services[name];
};
Accounts.oauth.serviceNames = function () {
return _.keys(services);
};
//////////////////////////////////////////////////////////////////////////////////////////////////
},"oauth_client.js":function(require,exports,module){
//////////////////////////////////////////////////////////////////////////////////////////////////
// //
// packages/clinical_accounts-oauth/oauth_client.js //
// //
//////////////////////////////////////////////////////////////////////////////////////////////////
//
var get;
module.link("lodash", {
get: function (v) {
get = v;
}
}, 0);
// Documentation for Meteor.loginWithExternalService
/**
* @name loginWith<ExternalService>
* @memberOf Meteor
* @function
* @summary Log the user in using an external service.
* @locus Client
* @param {Object} [options]
* @param {String[]} options.requestPermissions A list of permissions to request from the user.
* @param {Boolean} options.requestOfflineToken If true, asks the user for permission to act on their behalf when offline. This stores an additional offline token in the `services` field of the user document. Currently only supported with Google.
* @param {Object} options.loginUrlParameters Provide additional parameters to the authentication URI. Currently only supported with Google. See [Google Identity Platform documentation](https://developers.google.com/identity/protocols/OpenIDConnect#authenticationuriparameters).
* @param {String} options.loginHint An email address that the external service will use to pre-fill the login prompt. Currently only supported with Meteor developer accounts and Google accounts. If used with Google, the Google User ID can also be passed.
* @param {String} options.loginStyle Login style ("popup" or "redirect", defaults to the login service configuration). The "popup" style opens the login page in a separate popup window, which is generally preferred because the Meteor application doesn't need to be reloaded. The "redirect" style redirects the Meteor application's window to the login page, and the login service provider redirects back to the Meteor application which is then reloaded. The "redirect" style can be used in situations where a popup window can't be opened, such as in a mobile UIWebView. The "redirect" style however relies on session storage which isn't available in Safari private mode, so the "popup" style will be forced if session storage can't be used.
* @param {String} options.redirectUrl If using "redirect" login style, the user will be returned to this URL after authorisation has been completed.
* @param {Function} [callback] Optional callback. Called with no arguments on success, or with a single `Error` argument on failure. The callback cannot be called if you are using the "redirect" `loginStyle`, because the app will have reloaded in the meantime; try using [client-side login hooks](#accounts_onlogin) instead.
* @importFromPackage meteor
*/
// Allow server to specify a specify subclass of errors. We should come
// up with a more generic way to do this!
var convertError = function (err) {
if (err && err instanceof Meteor.Error && err.error === Accounts.LoginCancelledError.numericError) return new Accounts.LoginCancelledError(err.reason);else return err;
}; // For the redirect login flow, the final step is that we're
// redirected back to the application. The credentialToken for this
// login attempt is stored in the reload migration data, and the
// credentialSecret for a successful login is stored in session
// storage.
Meteor.startup(function () {
var oauth = OAuth.getDataAfterRedirect();
if (!oauth) return; // We'll only have the credentialSecret if the login completed
// successfully. However we still call the login method anyway to
// retrieve the error if the login was unsuccessful.
var methodName = 'login';
var methodArguments = [{
oauth: _.pick(oauth, 'credentialToken', 'credentialSecret')
}];
if (get(Meteor, 'settings.public.logging') === "debug") {
console.log('Meteor.startup()');
}
var newLoginMethod = {
methodArguments: methodArguments,
userCallback: function (err) {
// The redirect login flow is complete. Construct an
// `attemptInfo` object with the login result, and report back
// to the code which initiated the login attempt
// (e.g. accounts-ui, when that package is being used).
err = convertError(err);
Accounts._pageLoadLogin({
type: oauth.loginService,
allowed: !err,
error: err,
methodName: methodName,
methodArguments: methodArguments
});
}
};
if (get(Meteor, 'settings.public.logging') === "debug") {
console.log('Meteor.startup().newLoginMethod', newLoginMethod);
}
Accounts.callLoginMethod(newLoginMethod);
}); // Send an OAuth login method to the server. If the user authorized
// access in the popup this should log the user in, otherwise
// nothing should happen.
Accounts.oauth.tryLoginAfterPopupClosed = function (credentialToken, callback) {
if (get(Meteor, 'settings.public.logging') === "debug") {
console.log('C9. Trying login now that the popup is closed.', credentialToken);
}
var credentialSecret = OAuth._retrieveCredentialSecret(credentialToken) || null;
Accounts.callLoginMethod({
methodArguments: [{
oauth: {
credentialToken: credentialToken,
credentialSecret: credentialSecret
}
}],
userCallback: callback && function (err) {
callback(convertError(err));
}
});
};
Accounts.oauth.credentialRequestCompleteHandler = function (callback) {
if (get(Meteor, 'settings.public.logging') === "debug") {
console.log('C4. Attempting to handle credetial request completion.');
}
return function (credentialTokenOrError) {
if (credentialTokenOrError && credentialTokenOrError instanceof Error) {
callback && callback(credentialTokenOrError);
} else {
Accounts.oauth.tryLoginAfterPopupClosed(credentialTokenOrError, callback);
}
};
};
//////////////////////////////////////////////////////////////////////////////////////////////////
}}}}},{
"extensions": [
".js",
".json"
]
});
require("/node_modules/meteor/clinical:accounts-oauth/oauth_common.js");
require("/node_modules/meteor/clinical:accounts-oauth/oauth_client.js");
/* Exports */
Package._define("clinical:accounts-oauth");
})();
| clinical-meteor/meteor-on-fhir | cordova-build-release/www/application/packages/clinical_accounts-oauth.js | JavaScript | agpl-3.0 | 10,784 |
# frozen_string_literal: true
# This migration comes from decidim (originally 20210310120640)
class AddFollowableCounterCacheToUsers < ActiveRecord::Migration[5.2]
def change
add_column :decidim_users, :follows_count, :integer, null: false, default: 0, index: true
reversible do |dir|
dir.up do
Decidim::User.reset_column_information
Decidim::User.find_each do |record|
record.class.reset_counters(record.id, :follows)
end
end
end
end
end
| diputacioBCN/decidim-diba | db/migrate/20220203073236_add_followable_counter_cache_to_users.decidim.rb | Ruby | agpl-3.0 | 503 |
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.RefMan.forms.booktheatreslot;
public abstract class BaseLogic extends Handlers
{
public final Class getDomainInterface() throws ClassNotFoundException
{
return ims.RefMan.domain.BookTheatreSlot.class;
}
public final void setContext(ims.framework.UIEngine engine, GenForm form, ims.RefMan.domain.BookTheatreSlot domain)
{
setContext(engine, form);
this.domain = domain;
}
protected final void oncmbAnaesTypeValueSet(Object value)
{
java.util.ArrayList listOfValues = this.form.lyr1().tabSearch().cmbAnaesType().getValues();
if(value == null)
{
if(listOfValues != null && listOfValues.size() > 0)
{
for(int x = 0; x < listOfValues.size(); x++)
{
ims.clinical.vo.lookups.AnaestheticType existingInstance = (ims.clinical.vo.lookups.AnaestheticType)listOfValues.get(x);
if(!existingInstance.isActive())
{
bindcmbAnaesTypeLookup();
return;
}
}
}
}
else if(value instanceof ims.clinical.vo.lookups.AnaestheticType)
{
ims.clinical.vo.lookups.AnaestheticType instance = (ims.clinical.vo.lookups.AnaestheticType)value;
if(listOfValues != null)
{
if(listOfValues.size() == 0)
bindcmbAnaesTypeLookup();
for(int x = 0; x < listOfValues.size(); x++)
{
ims.clinical.vo.lookups.AnaestheticType existingInstance = (ims.clinical.vo.lookups.AnaestheticType)listOfValues.get(x);
if(existingInstance.equals(instance))
return;
}
}
this.form.lyr1().tabSearch().cmbAnaesType().newRow(instance, instance.getText(), instance.getImage(), instance.getTextColor());
}
}
protected final void bindcmbAnaesTypeLookup()
{
this.form.lyr1().tabSearch().cmbAnaesType().clear();
ims.clinical.vo.lookups.AnaestheticTypeCollection lookupCollection = ims.clinical.vo.lookups.LookupHelper.getAnaestheticType(this.domain.getLookupService());
for(int x = 0; x < lookupCollection.size(); x++)
{
this.form.lyr1().tabSearch().cmbAnaesType().newRow(lookupCollection.get(x), lookupCollection.get(x).getText(), lookupCollection.get(x).getImage(), lookupCollection.get(x).getTextColor());
}
}
protected final void setcmbAnaesTypeLookupValue(int id)
{
ims.clinical.vo.lookups.AnaestheticType instance = ims.clinical.vo.lookups.LookupHelper.getAnaestheticTypeInstance(this.domain.getLookupService(), id);
if(instance != null)
this.form.lyr1().tabSearch().cmbAnaesType().setValue(instance);
}
protected final void defaultcmbAnaesTypeLookupValue()
{
this.form.lyr1().tabSearch().cmbAnaesType().setValue((ims.clinical.vo.lookups.AnaestheticType)domain.getLookupService().getDefaultInstance(ims.clinical.vo.lookups.AnaestheticType.class, engine.getFormName().getID(), ims.clinical.vo.lookups.AnaestheticType.TYPE_ID));
}
protected final void oncmbListTypeValueSet(Object value)
{
java.util.ArrayList listOfValues = this.form.lyr1().tabSearch().cmbListType().getValues();
if(value == null)
{
if(listOfValues != null && listOfValues.size() > 0)
{
for(int x = 0; x < listOfValues.size(); x++)
{
ims.scheduling.vo.lookups.ProfileListType existingInstance = (ims.scheduling.vo.lookups.ProfileListType)listOfValues.get(x);
if(!existingInstance.isActive())
{
bindcmbListTypeLookup();
return;
}
}
}
}
else if(value instanceof ims.scheduling.vo.lookups.ProfileListType)
{
ims.scheduling.vo.lookups.ProfileListType instance = (ims.scheduling.vo.lookups.ProfileListType)value;
if(listOfValues != null)
{
if(listOfValues.size() == 0)
bindcmbListTypeLookup();
for(int x = 0; x < listOfValues.size(); x++)
{
ims.scheduling.vo.lookups.ProfileListType existingInstance = (ims.scheduling.vo.lookups.ProfileListType)listOfValues.get(x);
if(existingInstance.equals(instance))
return;
}
}
this.form.lyr1().tabSearch().cmbListType().newRow(instance, instance.getText(), instance.getImage(), instance.getTextColor());
}
}
protected final void bindcmbListTypeLookup()
{
this.form.lyr1().tabSearch().cmbListType().clear();
ims.scheduling.vo.lookups.ProfileListTypeCollection lookupCollection = ims.scheduling.vo.lookups.LookupHelper.getProfileListType(this.domain.getLookupService());
for(int x = 0; x < lookupCollection.size(); x++)
{
this.form.lyr1().tabSearch().cmbListType().newRow(lookupCollection.get(x), lookupCollection.get(x).getText(), lookupCollection.get(x).getImage(), lookupCollection.get(x).getTextColor());
}
}
protected final void setcmbListTypeLookupValue(int id)
{
ims.scheduling.vo.lookups.ProfileListType instance = ims.scheduling.vo.lookups.LookupHelper.getProfileListTypeInstance(this.domain.getLookupService(), id);
if(instance != null)
this.form.lyr1().tabSearch().cmbListType().setValue(instance);
}
protected final void defaultcmbListTypeLookupValue()
{
this.form.lyr1().tabSearch().cmbListType().setValue((ims.scheduling.vo.lookups.ProfileListType)domain.getLookupService().getDefaultInstance(ims.scheduling.vo.lookups.ProfileListType.class, engine.getFormName().getID(), ims.scheduling.vo.lookups.ProfileListType.TYPE_ID));
}
protected final void oncmbTheatreTypeValueSet(Object value)
{
java.util.ArrayList listOfValues = this.form.lyr1().tabSearch().cmbTheatreType().getValues();
if(value == null)
{
if(listOfValues != null && listOfValues.size() > 0)
{
for(int x = 0; x < listOfValues.size(); x++)
{
ims.scheduling.vo.lookups.TheatreType existingInstance = (ims.scheduling.vo.lookups.TheatreType)listOfValues.get(x);
if(!existingInstance.isActive())
{
bindcmbTheatreTypeLookup();
return;
}
}
}
}
else if(value instanceof ims.scheduling.vo.lookups.TheatreType)
{
ims.scheduling.vo.lookups.TheatreType instance = (ims.scheduling.vo.lookups.TheatreType)value;
if(listOfValues != null)
{
if(listOfValues.size() == 0)
bindcmbTheatreTypeLookup();
for(int x = 0; x < listOfValues.size(); x++)
{
ims.scheduling.vo.lookups.TheatreType existingInstance = (ims.scheduling.vo.lookups.TheatreType)listOfValues.get(x);
if(existingInstance.equals(instance))
return;
}
}
this.form.lyr1().tabSearch().cmbTheatreType().newRow(instance, instance.getText(), instance.getImage(), instance.getTextColor());
}
}
protected final void bindcmbTheatreTypeLookup()
{
this.form.lyr1().tabSearch().cmbTheatreType().clear();
ims.scheduling.vo.lookups.TheatreTypeCollection lookupCollection = ims.scheduling.vo.lookups.LookupHelper.getTheatreType(this.domain.getLookupService());
for(int x = 0; x < lookupCollection.size(); x++)
{
this.form.lyr1().tabSearch().cmbTheatreType().newRow(lookupCollection.get(x), lookupCollection.get(x).getText(), lookupCollection.get(x).getImage(), lookupCollection.get(x).getTextColor());
}
}
protected final void setcmbTheatreTypeLookupValue(int id)
{
ims.scheduling.vo.lookups.TheatreType instance = ims.scheduling.vo.lookups.LookupHelper.getTheatreTypeInstance(this.domain.getLookupService(), id);
if(instance != null)
this.form.lyr1().tabSearch().cmbTheatreType().setValue(instance);
}
protected final void defaultcmbTheatreTypeLookupValue()
{
this.form.lyr1().tabSearch().cmbTheatreType().setValue((ims.scheduling.vo.lookups.TheatreType)domain.getLookupService().getDefaultInstance(ims.scheduling.vo.lookups.TheatreType.class, engine.getFormName().getID(), ims.scheduling.vo.lookups.TheatreType.TYPE_ID));
}
public final void free()
{
super.free();
domain = null;
}
protected ims.RefMan.domain.BookTheatreSlot domain;
}
| FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/RefMan/src/ims/RefMan/forms/booktheatreslot/BaseLogic.java | Java | agpl-3.0 | 9,878 |
/*File: tagsService.js
*
* Copyright (c) 2013-2016
* Centre National d’Enseignement à Distance (Cned), Boulevard Nicephore Niepce, 86360 CHASSENEUIL-DU-POITOU, France
* (direction-innovation@cned.fr)
*
* GNU Affero General Public License (AGPL) version 3.0 or later version
*
* This file is part of a program which is free software: you can
* redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*
*/
'use strict';
angular.module('cnedApp').service('tagsService', function ($http, $uibModal, CacheProvider, configuration) {
this.getTags = function () {
return $http.get(configuration.BASE_URL + '/readTags').then(function (result) {
return CacheProvider.setItem(result.data, 'listTags').then(function () {
return result.data;
});
}, function () {
return CacheProvider.getItem('listTags');
});
};
this.openEditModal = function (mode, tag) {
return $uibModal.open({
templateUrl: 'views/tag/edit-tag.modal.html',
controller: 'EditTagModalCtrl',
size: 'lg',
resolve: {
mode: function () {
return mode;
},
tag: function () {
return tag;
}
}
}).result;
};
});
| cnedDI/AccessiDys | app/scripts/services/tagsService.js | JavaScript | agpl-3.0 | 1,918 |
/*
* Copyright 2011 Witoslaw Koczewsi <wi@koczewski.de>, Artjom Kochtchi
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero
* General Public License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
* implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU General Public License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*/
package scrum.server.css;
import ilarkesto.core.logging.Log;
import ilarkesto.io.DynamicClassLoader;
import ilarkesto.ui.web.CssBuilder;
import ilarkesto.webapp.RequestWrapper;
import java.io.IOException;
import scrum.server.ScrumWebApplication;
import scrum.server.WebSession;
import scrum.server.common.AKunagiServlet;
public class CssServlet extends AKunagiServlet {
private static final Log LOG = Log.get(CssServlet.class);
private static final long serialVersionUID = 1;
private transient final KunagiCssBuilder screenCssBuilder = new KunagiCssBuilder();
@Override
protected void onRequest(RequestWrapper<WebSession> req) throws IOException {
req.setContentTypeCss();
CssBuilder css = new CssBuilder(req.getWriter());
ICssBuilder builder = getCssBuilder();
builder.buildCss(css);
css.flush();
// LOG.debug(builder);
}
private ICssBuilder getCssBuilder() {
if (ScrumWebApplication.get().isDevelopmentMode()) {
ClassLoader loader = new DynamicClassLoader(getClass().getClassLoader(), KunagiCssBuilder.class.getName());
Class<? extends ICssBuilder> type;
try {
type = (Class<? extends ICssBuilder>) loader.loadClass(KunagiCssBuilder.class.getName());
return type.newInstance();
} catch (Throwable ex) {
LOG.fatal(ex);
throw new RuntimeException(ex);
}
} else {
return screenCssBuilder;
}
}
}
| JavierPeris/kunagi | src/main/java/scrum/server/css/CssServlet.java | Java | agpl-3.0 | 2,099 |
#include "InformationParser.h"
InformationParser::InformationParser()
{
}
bool InformationParser::readJSON()
{
//Place Function Code Here
return true;
}
| UCSolarCarTeam/Recruit-Resources | Recruit-Training/Advanced-Recruit-Training/Viscomm-Teaser/Viscomm-Teaser-Training/src/InformationParser.cpp | C++ | agpl-3.0 | 164 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package qa.qcri.aidr.dbmanager.ejb.remote.facade.imp;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Date;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.Persistence;
import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import qa.qcri.aidr.common.exception.PropertyNotSetException;
import qa.qcri.aidr.dbmanager.dto.CollectionDTO;
import qa.qcri.aidr.dbmanager.dto.CrisisTypeDTO;
import qa.qcri.aidr.dbmanager.dto.DocumentDTO;
import qa.qcri.aidr.dbmanager.dto.DocumentNominalLabelDTO;
import qa.qcri.aidr.dbmanager.dto.DocumentNominalLabelIdDTO;
import qa.qcri.aidr.dbmanager.dto.UsersDTO;
/**
*
* @author nalemadi
*/
public class TestDocumentNominalLabelResourceFacadeImp {
static DocumentNominalLabelResourceFacadeImp documentNominalLabelResourceFacadeImp;
static DocumentResourceFacadeImp documentResourceFacadeImp;
static EntityManager entityManager;
static CrisisTypeResourceFacadeImp crisisTypeResourceFacadeImp;
static UsersResourceFacadeImp userResourceFacadeImp;
static CollectionResourceFacadeImp crisisResourceFacadeImp;
static DocumentDTO document;
static CollectionDTO crisis;
static UsersDTO user;
static DocumentNominalLabelDTO documentNominalLabel;
static NominalLabelResourceFacadeImp nominalLabelResourceFacadeImp;
private static Logger logger = Logger.getLogger("db-manager-log");
@BeforeClass
public static void setUpClass() {
documentNominalLabelResourceFacadeImp = new DocumentNominalLabelResourceFacadeImp();
documentResourceFacadeImp = new DocumentResourceFacadeImp();
entityManager = Persistence.createEntityManagerFactory(
"ProjectDBManagerTest-ejbPU").createEntityManager();
documentNominalLabelResourceFacadeImp.setEntityManager(entityManager);
documentResourceFacadeImp.setEntityManager(entityManager);
crisisTypeResourceFacadeImp = new CrisisTypeResourceFacadeImp();
userResourceFacadeImp = new UsersResourceFacadeImp();
crisisResourceFacadeImp = new CollectionResourceFacadeImp();
crisisResourceFacadeImp.setEntityManager(entityManager);
crisisTypeResourceFacadeImp.setEntityManager(entityManager);
userResourceFacadeImp.setEntityManager(entityManager);
nominalLabelResourceFacadeImp = new NominalLabelResourceFacadeImp();
nominalLabelResourceFacadeImp.setEntityManager(entityManager);
document = addDocument();
}
@AfterClass
public static void tearDownClass() {
if (document != null) {
entityManager.getTransaction().begin();
documentResourceFacadeImp.deleteDocument(document);
entityManager.getTransaction().commit();
if (crisis != null) {
entityManager.getTransaction().begin();
try {
crisisResourceFacadeImp.deleteCrisis(crisis);
} catch (PropertyNotSetException e) {
e.printStackTrace();
}
entityManager.getTransaction().commit();
}
}
try {
if (user != null) {
entityManager.getTransaction().begin();
user = userResourceFacadeImp.getUserByName(user.getName());
userResourceFacadeImp.deleteUser(user.getUserID());
entityManager.getTransaction().commit();
}
}catch (PropertyNotSetException e) {
logger.error("PropertyNotSetException while deleting user "+e.getMessage());
}
documentNominalLabelResourceFacadeImp.getEntityManager().close();
}
@Before
public void setUp() {
try {
documentNominalLabel = getDocumentNominalLabel();
documentNominalLabel = documentNominalLabelResourceFacadeImp
.addDocument(documentNominalLabel);
} catch (PropertyNotSetException e) {
logger.error("PropertyNotSetException while adding document nominal label "+e.getMessage());
}
}
@After
public void tearDown() {
if (documentNominalLabel != null) {
documentNominalLabelResourceFacadeImp
.deleteDocument(documentNominalLabel);
}
}
private static DocumentNominalLabelDTO getDocumentNominalLabel() {
DocumentNominalLabelDTO documentNominalLabel = new DocumentNominalLabelDTO();
DocumentNominalLabelIdDTO idDTO = new DocumentNominalLabelIdDTO();
idDTO.setUserId(1L);
idDTO.setDocumentId(document.getDocumentID());
idDTO.setNominalLabelId(1L);
documentNominalLabel.setIdDTO(idDTO);
return documentNominalLabel;
}
private static DocumentDTO addDocument() {
DocumentDTO documentDTO = new DocumentDTO();
CrisisTypeDTO crisisTypeDTO = crisisTypeResourceFacadeImp.findCrisisTypeByID(1100L);
user = new UsersDTO("userDBTest"+new Date(), "normal"+new Date());
entityManager.getTransaction().begin();
user = userResourceFacadeImp.addUser(user);
entityManager.getTransaction().commit();
CollectionDTO crisisDTO = new CollectionDTO("testCrisisName"+new Date(), "testCrisisCode"+new Date(), false, false, crisisTypeDTO, user, user);
entityManager.getTransaction().begin();
crisis = crisisResourceFacadeImp.addCrisis(crisisDTO);
entityManager.getTransaction().commit();
String tweet = "\"filter_level\":\"medium\",\"retweeted\":false,\"in_reply_to_screen_name\":null,\"possibly_sensitive\":false,\"truncated\":false,\"lang\":\"en\",\"in_reply_to_status_id_str\":null,"
+ "\"id\":445125937915387905,\"in_reply_to_user_id_str\":null,\"in_reply_to_status_id\":null,\"created_at\":\"Sun Mar 16 09:14:28 +0000 2014\",\"favorite_count\":0,\"place\":null,\"coordinates\":null,"
+ "\"text\":\"'Those in the #cockpit' behind #missing #flight? http://t.co/OYHvM1t0CT\",\"contributors\":null,\"geo\":null,\"entities\":{\"hashtags\":[{\"text\":\"cockpit\",\"indices\":[14,22]},{\"text\":\"missing\","
+ "\"indices\":[31,39]},{\"text\":\"flight\",\"indices\":[40,47]}],\"symbols\":[],\"urls\":[{\"expanded_url\":\"http://www.cnn.com/2014/03/15/world/asia/malaysia-airlines-plane/index.html\""
+ ",\"indices\":[49,71],\"display_url\":\"cnn.com/2014/03/15/wor\u2026\",\"url\":\"http://t.co/OYHvM1t0CT\"}],\"user_mentions\":[]},\"aidr\":{\"crisis_code\":\"2014-03-mh370\""
+ ",\"doctype\":\"twitter\",\"crisis_name\":\"Malaysia Airlines flight #MH370\"},\"source\":\"\",\"favorited\":false,"
+ "\"retweet_count\":0,\"in_reply_to_user_id\":null,\"id_str\":\"445125937915387905\",\"user\":{\"location\":\"Mexico, Distrito Federal. \",\"default_profile\":true,\"statuses_count\":1033,"
+ "\"profile_background_tile\":false,\"lang\":\"en\",\"profile_link_color\":\"0084B4\",\"profile_banner_url\":\"https://pbs.twimg.com/profile_banners/135306436/1394809176\",\"id\":135306436,\"following\":null,"
+ "\"favourites_count\":6,\"protected\":false,\"profile_text_color\":\"333333\",\"description\":\"Licenciado en derecho, he ocupado cargos dentro de la industria privada as\u00ED como dentro de la Administraci\u00F3n P\u00FAblica, tanto local (GDF), como Federal.\","
+ "\"verified\":false,\"contributors_enabled\":false,\"profile_sidebar_border_color\":\"C0DEED\",\"name\":\"Leonardo Larraga\",\"profile_background_color\":\"C0DEED\",\"created_at\":\"Tue Apr 20 23:12:25 +0000 2010\","
+ "\"is_translation_enabled\":false,\"default_profile_image\":false,\"followers_count\":726,\"profile_image_url_https\":\"https://pbs.twimg.com/profile_images/440767007290429441/GkHsYcJj_normal.jpeg\","
+ "\"geo_enabled\":false,\"profile_background_image_url\":\"http://abs.twimg.com/images/themes/theme1/bg.png\",\"profile_background_image_url_https\":\"https://abs.twimg.com/images/themes/theme1/bg.png\","
+ "\"follow_request_sent\":null,\"url\":\"http://instagram.com/larraga_ld\",\"utc_offset\":-21600,\"time_zone\":\"Mexico City\",\"notifications\":null,\"friends_count\":150,\"profile_use_background_image\":true,"
+ "\"profile_sidebar_fill_color\":\"DDEEF6\",\"screen_name\":\"larraga_ld\",\"id_str\":\"135306436\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/440767007290429441/GkHsYcJj_normal.jpeg\","
+ "\"is_translator\":false,\"listed_count\":0}}";
String word = "{\"words\":[\"#prayformh370\"]}";
documentDTO.setCrisisDTO(crisis);
documentDTO.setHasHumanLabels(false);
documentDTO.setIsEvaluationSet(true);
documentDTO.setReceivedAt(new Date());
documentDTO.setLanguage("en");
documentDTO.setDoctype("Tweet");
documentDTO.setData(tweet);
documentDTO.setWordFeatures(word);
documentDTO.setValueAsTrainingSample(0.5);
entityManager.getTransaction().begin();
documentDTO = documentResourceFacadeImp.addDocument(documentDTO);
entityManager.getTransaction().commit();
return documentDTO;
}
/**
* Test of saveDocumentNominalLabel method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
/*
* @Test public void testSaveDocumentNominalLabel() throws Exception {
* DocumentNominalLabelDTO
* documentNominalLabel = new DocumentNominalLabelDTO();
* documentNominalLabel.setDocumentDTO(document);
*
* DocumentNominalLabelIdDTO documentNominalLabelIdDTO = new
* DocumentNominalLabelIdDTO(); documentNominalLabelIdDTO.setUserId(1L);
* documentNominalLabelIdDTO.setDocumentId(document.getDocumentID());
* documentNominalLabelIdDTO.setNominalLabelId(1L);
* documentNominalLabel.setNominalLabelDTO
* (nominalLabelResourceFacadeImp.getNominalLabelByID(1L));
* documentNominalLabelResourceFacadeImp
* .saveDocumentNominalLabel(documentNominalLabel); }
*/
/**
* Test of foundDuplicate method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
/*
* @Test public void testFoundDuplicate() { try {
* DocumentNominalLabelDTO
* documentNominalLabel =
* documentNominalLabelResourceFacadeImp.getAllDocuments().get(0); boolean
* result =
* documentNominalLabelResourceFacadeImp.foundDuplicate(documentNominalLabel
* ); assertEquals(false, result); } catch (PropertyNotSetException ex) {
* fail("foundDuplicate failed");
* //Logger.getLogger(DocumentNominalLabelResourceFacadeImpTest
* .class.getName()).log(Level.SEVERE, null, ex); } }
*/
/**
* Test of addDocument method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testAddDocument() throws Exception {
assertEquals(document.getDocumentID(), documentNominalLabel.getIdDTO()
.getDocumentId());
}
/**
* Test of editDocument method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
/*
* @Test public void testEditDocument() throws Exception {
* documentNominalLabel =
* getDocumentNominalLabel(); entityManager.getTransaction().begin();
* documentNominalLabel =
* documentNominalLabelResourceFacadeImp.addDocument(documentNominalLabel);
* entityManager.getTransaction().commit(); Date date = new Date();
* documentNominalLabel.setTimestamp(date);
* entityManager.getTransaction().begin(); documentNominalLabel =
* documentNominalLabelResourceFacadeImp.editDocument(documentNominalLabel);
* entityManager.getTransaction().commit(); assertEquals(date,
* documentNominalLabel.getTimestamp()); }
*/
/**
* Test of deleteDocument method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testDeleteDocument() throws Exception {
Integer result = documentNominalLabelResourceFacadeImp
.deleteDocument(documentNominalLabel);
assertEquals(Integer.valueOf(1), result);
documentNominalLabel = null;
}
/**
* Test of findByCriteria method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testFindByCriteria() {
try {
String columnName = "id.documentId";
Long value = documentNominalLabel.getIdDTO().getDocumentId();
List<DocumentNominalLabelDTO> result = documentNominalLabelResourceFacadeImp
.findByCriteria(columnName, value);
assertNotNull(result);
assertEquals(value, result.get(0).getIdDTO().getDocumentId());
} catch (PropertyNotSetException ex) {
logger.error("PropertyNotSetException while finding document nominal label by criteria "+ex.getMessage());
fail("findByCriteria failed");
}
}
/**
* Test of findDocumentByPrimaryKey method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testFindDocumentByPrimaryKey() {
try {
DocumentNominalLabelDTO result = documentNominalLabelResourceFacadeImp
.findDocumentByPrimaryKey(documentNominalLabel.getIdDTO());
assertEquals(documentNominalLabel.getIdDTO().getDocumentId(),
result.getDocumentDTO().getDocumentID());
} catch (PropertyNotSetException ex) {
logger.error("PropertyNotSetException while finding document nominal label by primary key "+ex.getMessage());
fail("findDocumentByPrimaryKey failed");
}
}
/**
* Test of isDocumentExists method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testIsDocumentExists_DocumentNominalLabelIdDTO()
throws Exception {
boolean result = documentNominalLabelResourceFacadeImp
.isDocumentExists(documentNominalLabel.getIdDTO());
assertEquals(true, result);
}
/**
* Test of isDocumentExists method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testIsDocumentExists_Long() throws Exception {
boolean result = documentNominalLabelResourceFacadeImp
.isDocumentExists(documentNominalLabel.getIdDTO());
assertEquals(true, result);
boolean result2 = documentNominalLabelResourceFacadeImp
.isDocumentExists(documentNominalLabel.getIdDTO()
.getDocumentId());
assertEquals(true, result2);
}
/**
* Test of getAllDocuments method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testGetAllDocuments() {
try {
List<DocumentNominalLabelDTO> result = documentNominalLabelResourceFacadeImp
.getAllDocuments();
assertNotNull(result);
assertTrue(result.size() >= 1);
} catch (PropertyNotSetException ex) {
logger.error("PropertyNotSetException while fetching all document nominal label "+ex.getMessage());
fail("getAllDocuments failed");
}
}
/**
* Test of findLabeledDocumentByID method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testFindLabeledDocumentByID() {
try {
DocumentNominalLabelDTO result = documentNominalLabelResourceFacadeImp
.findLabeledDocumentByID(documentNominalLabel.getIdDTO()
.getDocumentId());
assertEquals(documentNominalLabel.getIdDTO().getDocumentId(),
result.getDocumentDTO().getDocumentID());
} catch (PropertyNotSetException ex) {
logger.error("PropertyNotSetException while finding labelled documents by id "+ex.getMessage());
fail("findLabeledDocumentByID failed");
}
}
/**
* Test of getLabeledDocumentCollectionForNominalLabel method, of class
* DocumentNominalLabelResourceFacadeImp.
*/
@Test
public void testGetLabeledDocumentCollectionForNominalLabel()
throws Exception {
List<DocumentNominalLabelDTO> result = documentNominalLabelResourceFacadeImp
.getLabeledDocumentCollectionForNominalLabel(documentNominalLabel
.getIdDTO().getNominalLabelId().intValue());
assertNotNull(result);
assertTrue(result.size() >= 1);
}
}
| qcri-social/Crisis-Computing | aidr-db-manager/src/test/java/qa/qcri/aidr/dbmanager/ejb/remote/facade/imp/TestDocumentNominalLabelResourceFacadeImp.java | Java | agpl-3.0 | 15,577 |
#!/usr/bin/env python
import argparse
import os
import sys
from loomengine import server
from loomengine import verify_has_connection_settings, \
get_server_url, verify_server_is_running, get_token
from loomengine_utils.connection import Connection
from loomengine_utils.exceptions import LoomengineUtilsError
class RunLabelAdd(object):
"""Add a new run labels
"""
def __init__(self, args=None, silent=False):
# Args may be given as an input argument for testing purposes
# or from the main parser.
# Otherwise get them from the parser.
if args is None:
args = self._get_args()
self.args = args
self.silent = silent
verify_has_connection_settings()
server_url = get_server_url()
verify_server_is_running(url=server_url)
self.connection = Connection(server_url, token=get_token())
def _get_args(self):
self.parser = self.get_parser()
return self.parser.parse_args()
@classmethod
def get_parser(cls, parser=None):
# If called from main, use the subparser provided.
# Otherwise create a top-level parser here.
if parser is None:
parser = argparse.ArgumentParser(__file__)
parser.add_argument(
'target',
metavar='TARGET',
help='identifier for run to be labeled')
parser.add_argument(
'label',
metavar='LABEL', help='label name to be added')
return parser
def run(self):
try:
runs = self.connection.get_run_index(
min=1, max=1,
query_string=self.args.target)
except LoomengineUtilsError as e:
raise SystemExit("ERROR! Failed to get run list: '%s'" % e)
label_data = {'label': self.args.label}
try:
label = self.connection.post_run_label(runs[0]['uuid'], label_data)
except LoomengineUtilsError as e:
raise SystemExit("ERROR! Failed to create label: '%s'" % e)
if not self.silent:
print 'Target "%s@%s" has been labeled as "%s"' % \
(runs[0].get('name'),
runs[0].get('uuid'),
label.get('label'))
class RunLabelRemove(object):
"""Remove a run label
"""
def __init__(self, args=None, silent=False):
if args is None:
args = self._get_args()
self.args = args
self.silent = silent
verify_has_connection_settings()
server_url = get_server_url()
verify_server_is_running(url=server_url)
self.connection = Connection(server_url, token=get_token())
def _get_args(self):
self.parser = self.get_parser()
return self.parser.parse_args()
@classmethod
def get_parser(cls, parser=None):
# If called from main, use the subparser provided.
# Otherwise create a top-level parser here.
if parser is None:
parser = argparse.ArgumentParser(__file__)
parser.add_argument(
'target',
metavar='TARGET',
help='identifier for run to be unlabeled')
parser.add_argument(
'label',
metavar='LABEL', help='label name to be removed')
return parser
def run(self):
try:
runs = self.connection.get_run_index(
min=1, max=1,
query_string=self.args.target)
except LoomengineUtilsError as e:
raise SystemExit("ERROR! Failed to get run list: '%s'" % e)
label_data = {'label': self.args.label}
try:
label = self.connection.remove_run_label(
runs[0]['uuid'], label_data)
except LoomengineUtilsError as e:
raise SystemExit("ERROR! Failed to remove label: '%s'" % e)
if not self.silent:
print 'Label %s has been removed from run "%s@%s"' % \
(label.get('label'),
runs[0].get('name'),
runs[0].get('uuid'))
class RunLabelList(object):
def __init__(self, args=None, silent=False):
if args is None:
args = self._get_args()
self.args = args
self.silent = silent
verify_has_connection_settings()
server_url = get_server_url()
verify_server_is_running(url=server_url)
self.connection = Connection(server_url, token=get_token())
def _get_args(self):
self.parser = self.get_parser()
return self.parser.parse_args()
@classmethod
def get_parser(cls, parser=None):
# If called from main, use the subparser provided.
# Otherwise create a top-level parser here.
if parser is None:
parser = argparse.ArgumentParser(__file__)
parser.add_argument(
'target',
metavar='TARGET',
nargs='?',
help='show labels only for the specified run')
return parser
def run(self):
if self.args.target:
try:
runs = self.connection.get_run_index(
min=1, max=1,
query_string=self.args.target)
except LoomengineUtilsError as e:
raise SystemExit("ERROR! Failed to get run list: '%s'" % e)
try:
label_data = self.connection.list_run_labels(runs[0]['uuid'])
except LoomengineUtilsError as e:
raise SystemExit("ERROR! Failed to get label list: '%s'" % e)
labels = label_data.get('labels', [])
if not self.silent:
print '[showing %s labels]' % len(labels)
for label in labels:
print label
else:
try:
label_list = self.connection.get_run_label_index()
except LoomengineUtilsError as e:
raise SystemExit("ERROR! Failed to get label list: '%s'" % e)
label_counts = {}
for item in label_list:
label_counts.setdefault(item.get('label'), 0)
label_counts[item.get('label')] += 1
if not self.silent:
print '[showing %s labels]' % len(label_counts)
for key in label_counts:
print "%s (%s)" % (key, label_counts[key])
class RunLabel(object):
"""Configures and executes subcommands under "label" on the parent parser.
"""
def __init__(self, args=None, silent=False):
if args is None:
args = self._get_args()
self.args = args
self.silent = silent
def _get_args(self):
parser = self.get_parser()
return parser.parse_args()
@classmethod
def get_parser(cls, parser=None):
# If called from main, a subparser should be provided.
# Otherwise we create a top-level parser here.
if parser is None:
parser = argparse.ArgumentParser(__file__)
subparsers = parser.add_subparsers()
add_subparser = subparsers.add_parser(
'add', help='add a run label')
RunLabelAdd.get_parser(add_subparser)
add_subparser.set_defaults(SubSubSubcommandClass=RunLabelAdd)
remove_subparser = subparsers.add_parser(
'remove', help='remove a run label')
RunLabelRemove.get_parser(remove_subparser)
remove_subparser.set_defaults(SubSubSubcommandClass=RunLabelRemove)
list_subparser = subparsers.add_parser(
'list', help='list run labels')
RunLabelList.get_parser(list_subparser)
list_subparser.set_defaults(SubSubSubcommandClass=RunLabelList)
return parser
def run(self):
return self.args.SubSubSubcommandClass(
self.args, silent=self.silent).run()
if __name__ == '__main__':
response = RunLabel().run()
| StanfordBioinformatics/loom | client/loomengine/run_label.py | Python | agpl-3.0 | 7,876 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
class stock_picking(osv.osv):
_inherit = 'stock.picking'
def action_invoice_create(self, cr, uid, ids, journal_id=False,
group=False, type='out_invoice', context=None):
obj_bud_mov = self.pool.get('budget.move')
obj_bud_line = self.pool.get('budget.move.line')
purchase_line_obj = self.pool.get('purchase.order.line')
invoice_obj = self.pool.get('account.invoice')
purchase_obj = self.pool.get('purchase.order')
invoice_line_obj = self.pool.get('account.invoice.line')
invoices= super(stock_picking, self).action_invoice_create(cr, uid, ids, journal_id=journal_id, group=group, type=type, context=context)
res = {}
res= {'res':invoices,}
for picking in res.keys():
invoice_id = res[picking]
invoice = invoice_obj.browse(cr, uid, invoice_id, context=context)
for invoice_line in invoice.invoice_line:
#purchase_order_line_invoice_rel
cr.execute('''SELECT order_line_id FROM purchase_order_line_invoice_rel \
WHERE invoice_id = %s''',(invoice_line.id,))
count = cr.fetchall()
for po_line_id in count:
po_line = purchase_line_obj.browse(cr, uid, [po_line_id[0]], context=context)
asoc_bud_line_id = obj_bud_line.search(cr, uid, [('po_line_id','=',po_line.id), ])[0]
obj_bud_line.write(cr, uid, [asoc_bud_line_id],{'inv_line_id': invoice_line.id}, context=context)
move_id = po_line.order_id.budget_move_id.id
invoice_obj.write(cr, uid, invoice_id, {'budget_move_id': move_id, 'from_order':True}, context=context)
obj_bud_mov.signal_workflow(cr, uid, [move_id], 'button_execute', context=context)
return invoices
| ClearCorp/odoo-clearcorp | TODO-9.0/budget/stock.py | Python | agpl-3.0 | 3,050 |
<?php
/**
* @file classes/file/wrappers/HTTPFileWrapper.inc.php
*
* Copyright (c) 2000-2012 John Willinsky
* Distributed under the GNU GPL v2. For full terms see the file docs/COPYING.
*
* @package file.wrappers
* @ingroup file_wrappers
*
* Class providing a wrapper for the HTTP protocol.
* (for when allow_url_fopen is disabled).
*
* $Id$
*/
class HTTPFileWrapper extends FileWrapper {
var $headers;
var $defaultPort;
var $defaultHost;
var $defaultPath;
var $redirects;
var $proxyHost;
var $proxyPort;
var $proxyUsername;
var $proxyPassword;
function HTTPFileWrapper($url, &$info, $redirects = 5) {
parent::FileWrapper($url, $info);
$this->setDefaultPort(80);
$this->setDefaultHost('localhost');
$this->setDefaultPath('/');
$this->redirects = 5;
$this->proxyHost = Config::getVar('proxy', 'http_host');
$this->proxyPort = Config::getVar('proxy', 'http_port');
$this->proxyUsername = Config::getVar('proxy', 'proxy_username');
$this->proxyPassword = Config::getVar('proxy', 'proxy_password');
}
function setDefaultPort($port) {
$this->defaultPort = $port;
}
function setDefaultHost($host) {
$this->defaultHost = $host;
}
function setDefaultPath($path) {
$this->defaultPath = $path;
}
function addHeader($name, $value) {
if (!isset($this->headers)) {
$this->headers = array();
}
$this->headers[$name] = $value;
}
function open($mode = 'r') {
$realHost = $host = isset($this->info['host']) ? $this->info['host'] : $this->defaultHost;
$port = isset($this->info['port']) ? (int)$this->info['port'] : $this->defaultPort;
$path = isset($this->info['path']) ? $this->info['path'] : $this->defaultPath;
if (isset($this->info['query'])) $path .= '?' . $this->info['query'];
if (!empty($this->proxyHost)) {
$realHost = $host;
$host = $this->proxyHost;
$port = $this->proxyPort;
if (!empty($this->proxyUsername)) {
$this->headers['Proxy-Authorization'] = 'Basic ' . base64_encode($this->proxyUsername . ':' . $this->proxyPassword);
}
}
if (!($this->fp = fsockopen($host, $port, $errno, $errstr)))
return false;
$additionalHeadersString = '';
if (is_array($this->headers)) foreach ($this->headers as $name => $value) {
$additionalHeadersString .= "$name: $value\r\n";
}
$requestHost = preg_replace("!^.*://!", "", $realHost);
$request = 'GET ' . (empty($this->proxyHost)?$path:$this->url) . " HTTP/1.0\r\n" .
"Host: $requestHost\r\n" .
$additionalHeadersString .
"Connection: Close\r\n\r\n";
fwrite($this->fp, $request);
$response = fgets($this->fp, 4096);
$rc = 0;
sscanf($response, "HTTP/%*s %u %*[^\r\n]\r\n", $rc);
if ($rc == 200) {
while(fgets($this->fp, 4096) !== "\r\n");
return true;
}
if(preg_match('!^3\d\d$!', $rc) && $this->redirects >= 1) {
for($response = '', $time = time(); !feof($this->fp) && $time >= time() - 15; ) $response .= fgets($this->fp, 128);
if (preg_match('!^(?:(?:Location)|(?:URI)|(?:location)): ([^\s]+)[\r\n]!m', $response, $matches)) {
$this->close();
$location = $matches[1];
if (preg_match('!^[a-z]+://!', $location)) {
$this->url = $location;
} else {
$newPath = ($this->info['path'] !== '' && strpos($location, '/') !== 0 ? dirname($this->info['path']) . '/' : (strpos($location, '/') === 0 ? '' : '/')) . $location;
$this->info['path'] = $newPath;
$this->url = $this->glue_url($this->info);
}
$returner =& FileWrapper::wrapper($this->url);
$returner->redirects = $this->redirects - 1;
return $returner;
}
}
$this->close();
return false;
}
function glue_url ($parsed) {
// Thanks to php dot net at NOSPAM dot juamei dot com
// See http://www.php.net/manual/en/function.parse-url.php
if (! is_array($parsed)) return false;
$uri = isset($parsed['scheme']) ? $parsed['scheme'].':'.((strtolower($parsed['scheme']) == 'mailto') ? '':'//'): '';
$uri .= isset($parsed['user']) ? $parsed['user'].($parsed['pass']? ':'.$parsed['pass']:'').'@':'';
$uri .= isset($parsed['host']) ? $parsed['host'] : '';
$uri .= isset($parsed['port']) ? ':'.$parsed['port'] : '';
$uri .= isset($parsed['path']) ? $parsed['path'] : '';
$uri .= isset($parsed['query']) ? '?'.$parsed['query'] : '';
$uri .= isset($parsed['fragment']) ? '#'.$parsed['fragment'] : '';
return $uri;
}
}
?>
| ingmarschuster/MindResearchRepository | ojs-2.3.8/lib/pkp/classes/file/wrappers/HTTPFileWrapper.inc.php | PHP | agpl-3.0 | 4,336 |
describe('Markdown plugin', function() {
const Vue = require('vue');
Vue.use(require('plugins/markdown'));
Vue.config.async = false;
afterEach(function() {
fixture.cleanup();
});
/**
* Remove invisible nodes generated by Vue.js
*/
function strip(el) {
[...el.childNodes].forEach(function(node) {
const is_comment = node.nodeType === Node.COMMENT_NODE;
const is_empty_text = node.nodeType === Node.TEXT_NODE && !/\S/.test(node.nodeValue);
if (is_comment || is_empty_text) {
node.parentNode.removeChild(node);
}
});
return el;
}
describe('markdown filter', function() {
function el(text) {
const vm = new Vue({
el: fixture.set('<div>{{{text | markdown}}}</div>')[0],
replace: false,
data: {
text: text
}
});
return strip(vm.$el);
}
it('should render empty string as ""', function() {
expect(el('').childNodes).to.be.emtpy;
});
it('should render null value as ""', function() {
expect(el(null).childNodes).to.be.empty;
});
it('should render undefined value as ""', function() {
expect(el(undefined).childNodes).to.be.empty;
});
it('should markdown content', function() {
expect(el('**aaa**')).to.have.html('<p><strong>aaa</strong></p>');
});
});
describe('markdown directive', function() {
function el(text) {
const vm = new Vue({
el: fixture.set('<div v-markdown="text"></div>')[0],
data: {
'text': text
}
});
return strip(vm.$el);
}
it('should render empty string as ""', function() {
expect(el('').childNodes).to.be.emtpy;
});
it('should render null value as ""', function() {
expect(el(null).childNodes).to.be.empty;
});
it('should render undefined value as ""', function() {
expect(el(undefined).childNodes).to.be.empty;
});
it('should markdown content', function() {
expect(el('**aaa**')).to.have.html('<p><strong>aaa</strong></p>');
});
});
});
describe('Markdown backend compliance', function() {
const markdown = require('helpers/markdown').default;
/**
* An expect wrapper rendering the markdown
* and then allowing to perform chai-dom expectation on it
*/
function md(source) {
const div = document.createElement('div');
div.innerHTML = markdown(source);
return div;
}
it('should transform urls to anchors', function() {
const source = 'http://example.net/';
expect(md(source)).to.have.html('<p><a href="http://example.net/">http://example.net/</a></p>');
});
it('should handles autolink', function() {
const source = '<http://example.net/>';
expect(md(source)).to.have.html('<p><a href="http://example.net/">http://example.net/</a></p>');
});
it('should not transform emails to anchors', function() {
const source = 'coucou@cmoi.fr';
expect(md(source)).to.have.html('<p>coucou@cmoi.fr</p>');
});
it('should not transform links within pre', function() {
const source = '<pre>http://example.net/</pre>';
expect(md(source)).to.have.html('<pre>http://example.net/</pre>');
});
it('should sanitize evil code', function() {
const source = 'an <script>evil()</script>';
expect(md(source)).to.have.html('<p>an <script>evil()</script></p>');
});
it('should handle soft breaks as <br/>', function() {
const source = 'line 1\nline 2';
expect(md(source)).to.have.html('<p>line 1<br>\nline 2</p>');
});
it('should properly render markdown tags not in allowed tags', function() {
const source = '### titre';
expect(md(source)).to.have.html('<h3>titre</h3>');
});
it('should render GFM tables (extension)', function() {
const source = [
'| first | second |',
'|-------|--------|',
'| value | value |',
].join('\n');
const expected = [
'<table>',
'<thead>',
'<tr>',
'<th>first</th>',
'<th>second</th>',
'</tr>',
'</thead>',
'<tbody>',
'<tr>',
'<td>value</td>',
'<td>value</td>',
'</tr>',
'</tbody>',
'</table>'
].join('\n');
expect(md(source)).to.have.html(expected);
});
it('should render GFM strikethrough (extension)', function() {
const source = 'Yay ~~Hi~~ Hello, world!';
const expected = '<p>Yay <del>Hi</del> Hello, world!</p>';
expect(md(source)).to.have.html(expected);
});
it('should handle GFM tagfilter extension', function() {
// Test extracted from https://github.github.com/gfm/#disallowed-raw-html-extension-
const source = [
'<strong> <title>My Title</title></strong>',
'<blockquote>',
' <xmp> is disallowed.</xmp> <XMP> is also disallowed.</XMP>',
'</blockquote>',
].join('\n')
const expected = [
'<p><strong> <title>My Title</title></strong></p>',
'<blockquote>',
' <xmp> is disallowed.</xmp> <XMP> is also disallowed.</XMP>',
'</blockquote>',
].join('\n')
expect(md(source)).to.have.html(expected)
});
it('should not filter legit markup', function() {
const source = [
'> This is a blockquote',
'> with <script>evil()</script> inside',
].join('\n');
const expected = [
'<blockquote>',
'<p>This is a blockquote<br>',
'with <script>evil()</script> inside</p>',
'</blockquote>',
].join('\n');
expect(md(source)).to.have.html(expected);
});
});
| opendatateam/udata | specs/plugins/markdown.specs.js | JavaScript | agpl-3.0 | 6,254 |
# -*- encoding : utf-8 -*-
module GrabName
protected
def self.grab_name(id)
e = self.find(id,:select => 'name')
e = e.nil? ? '' : e.name
end
end
| codeforeurope/samenspel | lib/grab_name.rb | Ruby | agpl-3.0 | 167 |
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.core.forms.nokdetails;
import java.io.Serializable;
import ims.framework.Context;
import ims.framework.FormName;
import ims.framework.FormAccessLogic;
public class BaseAccessLogic extends FormAccessLogic implements Serializable
{
private static final long serialVersionUID = 1L;
public final void setContext(Context context, FormName formName)
{
form = new CurrentForm(new GlobalContext(context), new CurrentForms());
engine = new CurrentEngine(formName);
}
public boolean isAccessible()
{
if(!form.getGlobalContext().Core.getParentFormModeIsNotNull())
return false;
return true;
}
public boolean isReadOnly()
{
return false;
}
public CurrentEngine engine;
public CurrentForm form;
public final static class CurrentForm implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentForm(GlobalContext globalcontext, CurrentForms forms)
{
this.globalcontext = globalcontext;
this.forms = forms;
}
public final GlobalContext getGlobalContext()
{
return globalcontext;
}
public final CurrentForms getForms()
{
return forms;
}
private GlobalContext globalcontext;
private CurrentForms forms;
}
public final static class CurrentEngine implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentEngine(FormName formName)
{
this.formName = formName;
}
public final FormName getFormName()
{
return formName;
}
private FormName formName;
}
public static final class CurrentForms implements Serializable
{
private static final long serialVersionUID = 1L;
protected final class LocalFormName extends FormName
{
private static final long serialVersionUID = 1L;
protected LocalFormName(int value)
{
super(value);
}
}
private CurrentForms()
{
}
}
}
| FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/Core/src/ims/core/forms/nokdetails/BaseAccessLogic.java | Java | agpl-3.0 | 4,011 |
require "uri"
module Noosfero
module API
class Session < Grape::API
# Login to get token
#
# Parameters:
# login (*required) - user login or email
# password (required) - user password
#
# Example Request:
# POST http://localhost:3000/api/v1/login?login=adminuser&password=admin
post "/login" do
begin
user ||= User.authenticate(params[:login], params[:password], environment)
rescue NoosferoExceptions::UserNotActivated => e
render_api_error!(e.message, 401)
end
return unauthorized! unless user
@current_user = user
present user, :with => Entities::UserLogin, :current_person => current_person
end
post "/login_from_cookie" do
return unauthorized! if cookies[:auth_token].blank?
user = User.where(remember_token: cookies[:auth_token]).first
return unauthorized! unless user && user.activated?
@current_user = user
present user, :with => Entities::UserLogin, :current_person => current_person
end
# Create user.
#
# Parameters:
# email (required) - Email
# password (required) - Password
# login - login
# Example Request:
# POST /register?email=some@mail.com&password=pas&password_confirmation=pas&login=some
params do
requires :email, type: String, desc: _("Email")
requires :login, type: String, desc: _("Login")
requires :password, type: String, desc: _("Password")
end
post "/register" do
attrs = attributes_for_keys [:email, :login, :password, :password_confirmation] + environment.signup_person_fields
name = params[:name].present? ? params[:name] : attrs[:email]
attrs[:password_confirmation] = attrs[:password] if !attrs.has_key?(:password_confirmation)
user = User.new(attrs.merge(:name => name))
begin
user.signup!
user.generate_private_token! if user.activated?
present user, :with => Entities::UserLogin, :current_person => user.person
rescue ActiveRecord::RecordInvalid
message = user.errors.as_json.merge((user.person.present? ? user.person.errors : {}).as_json).to_json
render_api_error!(message, 400)
end
end
params do
requires :activation_code, type: String, desc: _("Activation token")
end
# Activate a user.
#
# Parameter:
# activation_code (required) - Activation token
# Example Request:
# PATCH /activate?activation_code=28259abd12cc6a64ef9399cf3286cb998b96aeaf
patch "/activate" do
user = User.find_by activation_code: params[:activation_code]
if user
unless user.environment.enabled?('admin_must_approve_new_users')
if user.activate
user.generate_private_token!
present user, :with => Entities::UserLogin, :current_person => current_person
end
else
if user.create_moderate_task
user.activation_code = nil
user.save!
# Waiting for admin moderate user registration
status 202
body({
:message => 'Waiting for admin moderate user registration'
})
end
end
else
# Token not found in database
render_api_error!(_('Token is invalid'), 412)
end
end
# Request a new password.
#
# Parameters:
# value (required) - Email or login
# Example Request:
# POST /forgot_password?value=some@mail.com
post "/forgot_password" do
requestors = fetch_requestors(params[:value])
not_found! if requestors.blank?
remote_ip = (request.respond_to?(:remote_ip) && request.remote_ip) || (env && env['REMOTE_ADDR'])
requestors.each do |requestor|
ChangePassword.create!(:requestor => requestor)
end
end
# Resend activation code.
#
# Parameters:
# value (required) - Email or login
# Example Request:
# POST /resend_activation_code?value=some@mail.com
post "/resend_activation_code" do
requestors = fetch_requestors(params[:value])
not_found! if requestors.blank?
remote_ip = (request.respond_to?(:remote_ip) && request.remote_ip) || (env && env['REMOTE_ADDR'])
requestors.each do |requestor|
requestor.user.resend_activation_code
end
present requestors.map(&:user), :with => Entities::UserLogin
end
params do
requires :code, type: String, desc: _("Forgot password code")
end
# Change password
#
# Parameters:
# code (required) - Change password code
# password (required)
# password_confirmation (required)
# Example Request:
# PATCH /new_password?code=xxxx&password=secret&password_confirmation=secret
patch "/new_password" do
change_password = ChangePassword.find_by code: params[:code]
not_found! if change_password.nil?
if change_password.update_attributes(:password => params[:password], :password_confirmation => params[:password_confirmation])
change_password.finish
present change_password.requestor.user, :with => Entities::UserLogin, :current_person => current_person
else
something_wrong!
end
end
end
end
end
| tallysmartins/noosfero | lib/noosfero/api/session.rb | Ruby | agpl-3.0 | 5,667 |
using System;
using System.Collections.Generic;
using System.Text;
namespace Hybrasyl.Plugins
{
// TODO: interface
public class Message
{
public string Sender = string.Empty;
public string Recipient = string.Empty;
public Xml.MessageType Type { get; set; }
public string Text = string.Empty;
public string Subject = string.Empty;
public Message(Xml.MessageType type, string sender, string recipient, string subject, string body)
{
Type = type;
Sender = sender;
Recipient = recipient;
Text = body;
Subject = subject;
}
public Message() { }
}
}
| hybrasyl/server | hybrasyl/Plugins/Message.cs | C# | agpl-3.0 | 697 |
/* This file is part of VoltDB.
* Copyright (C) 2008-2014 VoltDB Inc.
*
* This file contains original code and/or modifications of original code.
* Any modifications made by VoltDB Inc. are licensed under the following
* terms and conditions:
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
*/
/* Copyright (C) 2008 by H-Store Project
* Brown University
* Massachusetts Institute of Technology
* Yale University
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#include "parametervalueexpression.h"
#include "common/debuglog.h"
#include "common/valuevector.h"
#include "common/executorcontext.hpp"
#include "execution/VoltDBEngine.h"
namespace voltdb {
ParameterValueExpression::ParameterValueExpression(int value_idx)
: AbstractExpression(EXPRESSION_TYPE_VALUE_PARAMETER),
m_valueIdx(value_idx), m_paramValue()
{
VOLT_TRACE("ParameterValueExpression %d", value_idx);
ExecutorContext* context = ExecutorContext::getExecutorContext();
VoltDBEngine* engine = context->getEngine();
assert(engine != NULL);
NValueArray& params = engine->getParameterContainer();
assert(value_idx < params.size());
m_paramValue = ¶ms[value_idx];
};
}
| zheguang/voltdb | src/ee/expressions/parametervalueexpression.cpp | C++ | agpl-3.0 | 2,859 |
<?php
$TRANSLATIONS = array(
"Unable to load list from App Store" => "앱 스토어에서 목록을 가져올 수 없습니다",
"Authentication error" => "인증 오류",
"Group already exists" => "그룹이 이미 존재함",
"Unable to add group" => "그룹을 추가할 수 없음",
"Email saved" => "이메일 저장됨",
"Invalid email" => "잘못된 이메일 주소",
"Unable to delete group" => "그룹을 삭제할 수 없음",
"Unable to delete user" => "사용자를 삭제할 수 없음.",
"Language changed" => "언어가 변경됨",
"Invalid request" => "잘못된 요청",
"Admins can't remove themself from the admin group" => "관리자 자신을 관리자 그룹에서 삭제할 수 없음",
"Unable to add user to group %s" => "그룹 %s에 사용자를 추가할 수 없음",
"Unable to remove user from group %s" => "그룹 %s에서 사용자를 삭제할 수 없음",
"Couldn't update app." => "앱을 업데이트할 수 없습니다.",
"Wrong password" => "잘못된 비밀번호",
"Unable to change password" => "비밀번호를 변경하실수 없습니다",
"Update to {appversion}" => "버전 {appversion}(으)로 업데이트",
"Disable" => "비활성화",
"Enable" => "사용함",
"Please wait...." => "기다려 주십시오....",
"Error while disabling app" => "앱을 사용중지하는 도중 에러발생",
"Error while enabling app" => "앱을 사용토록 하는 중에 에러발생",
"Updating...." => "업데이트 중....",
"Error while updating app" => "앱을 업데이트하는 중 오류 발생",
"Error" => "오류",
"Update" => "업데이트",
"Updated" => "업데이트됨",
"Select a profile picture" => "프로필 사진 선택",
"Decrypting files... Please wait, this can take some time." => "파일 해독중... 잠시만 기다려주세요, 시간이 걸릴수 있습니다.",
"Saving..." => "저장 중...",
"deleted" => "삭제됨",
"undo" => "실행 취소",
"Unable to remove user" => "사용자를 삭제할 수 없음",
"Groups" => "그룹",
"Group Admin" => "그룹 관리자",
"Delete" => "삭제",
"add group" => "그룹 추가",
"A valid username must be provided" => "올바른 사용자 이름을 입력해야 함",
"Error creating user" => "사용자 생성 오류",
"A valid password must be provided" => "올바른 암호를 입력해야 함",
"__language_name__" => "한국어",
"Security Warning" => "보안 경고",
"Setup Warning" => "설정 경고",
"Your web server is not yet properly setup to allow files synchronization because the WebDAV interface seems to be broken." => "WebDAV 인터페이스가 제대로 작동하지 않습니다. 웹 서버에서 파일 동기화를 사용할 수 있도록 설정이 제대로 되지 않은 것 같습니다.",
"Module 'fileinfo' missing" => "모듈 'fileinfo'가 없음",
"The PHP module 'fileinfo' is missing. We strongly recommend to enable this module to get best results with mime-type detection." => "PHP 모듈 'fileinfo'가 존재하지 않습니다. MIME 형식 감지 결과를 향상시키기 위하여 이 모듈을 활성화하는 것을 추천합니다.",
"Locale not working" => "로캘이 작동하지 않음",
"Internet connection not working" => "인터넷에 연결할 수 없음",
"Cron" => "크론",
"Execute one task with each page loaded" => "개별 페이지를 불러올 때마다 실행",
"Sharing" => "공유",
"Enable Share API" => "공유 API 사용하기",
"Allow apps to use the Share API" => "앱에서 공유 API를 사용할 수 있도록 허용",
"Allow links" => "링크 허용",
"Allow users to share items to the public with links" => "사용자가 개별 항목의 링크를 공유할 수 있도록 허용",
"Allow public uploads" => "퍼블릭 업로드 허용",
"Allow resharing" => "재공유 허용",
"Allow users to share items shared with them again" => "사용자에게 공유된 항목을 다시 공유할 수 있도록 허용",
"Allow users to share with anyone" => "누구나와 공유할 수 있도록 허용",
"Allow users to only share with users in their groups" => "사용자가 속해 있는 그룹의 사용자와만 공유할 수 있도록 허용",
"Allow mail notification" => "메일 알림을 허용",
"Allow user to send mail notification for shared files" => "사용자에게 공유 파일에 대한 메일 알림을 허용합니다",
"Security" => "보안",
"Enforce HTTPS" => "HTTPS 강제 사용",
"Log" => "로그",
"Log level" => "로그 단계",
"More" => "더 중요함",
"Less" => "덜 중요함",
"Version" => "버전",
"Developed by the <a href=\"http://ownCloud.org/contact\" target=\"_blank\">ownCloud community</a>, the <a href=\"https://github.com/owncloud\" target=\"_blank\">source code</a> is licensed under the <a href=\"http://www.gnu.org/licenses/agpl-3.0.html\" target=\"_blank\"><abbr title=\"Affero General Public License\">AGPL</abbr></a>." => "<a href=\"http://ownCloud.org/contact\" target=\"_blank\">ownCloud 커뮤니티</a>에 의해서 개발되었습니다. <a href=\"https://github.com/owncloud\" target=\"_blank\">원본 코드</a>는 <a href=\"http://www.gnu.org/licenses/agpl-3.0.html\" target=\"_blank\"><abbr title=\"Affero General Public License\">AGPL</abbr></a>에 따라 사용이 허가됩니다.",
"Add your App" => "내 앱 추가",
"More Apps" => "더 많은 앱",
"Select an App" => "앱 선택",
"See application page at apps.owncloud.com" => "apps.owncloud.com에 있는 앱 페이지를 참고하십시오",
"<span class=\"licence\"></span>-licensed by <span class=\"author\"></span>" => "<span class=\"licence\"></span>-라이선스됨: <span class=\"author\"></span>",
"User Documentation" => "사용자 문서",
"Administrator Documentation" => "관리자 문서",
"Online Documentation" => "온라인 문서",
"Forum" => "포럼",
"Bugtracker" => "버그 트래커",
"Commercial Support" => "상업용 지원",
"Get the apps to sync your files" => "파일 동기화 앱 가져오기",
"Show First Run Wizard again" => "첫 실행 마법사 다시 보이기",
"You have used <strong>%s</strong> of the available <strong>%s</strong>" => "현재 공간 중 <strong>%s</strong>/<strong>%s</strong>을(를) 사용 중입니다",
"Password" => "암호",
"Your password was changed" => "암호가 변경되었습니다",
"Unable to change your password" => "암호를 변경할 수 없음",
"Current password" => "현재 암호",
"New password" => "새 암호",
"Change password" => "암호 변경",
"Email" => "이메일",
"Your email address" => "이메일 주소",
"Fill in an email address to enable password recovery" => "암호 찾기 기능을 사용하려면 이메일 주소를 입력하십시오",
"Profile picture" => "프로필 사진",
"Upload new" => "새로이 업로드하기",
"Select new from Files" => "파일에서 선택",
"Remove image" => "그림 삭제",
"Abort" => "대하여",
"Choose as profile image" => "프로필 사진을 선택해주세요",
"Language" => "언어",
"Help translate" => "번역 돕기",
"WebDAV" => "WebDAV",
"Encryption" => "암호화",
"Log-in password" => "로그인 비밀번호",
"Decrypt all Files" => "모든 파일 해독",
"Login Name" => "로그인 이름",
"Create" => "만들기",
"Admin Recovery Password" => "관리자 복구 암호",
"Enter the recovery password in order to recover the users files during password change" => "암호 변경 시 변경된 사용자 파일을 복구하려면 복구 암호를 입력하십시오",
"Default Storage" => "기본 저장소",
"Unlimited" => "무제한",
"Other" => "기타",
"Username" => "사용자 이름",
"Storage" => "저장소",
"set new password" => "새 암호 설정",
"Default" => "기본값"
);
$PLURAL_FORMS = "nplurals=1; plural=0;";
| ipit-international/learning-environment | owncloud/settings/l10n/ko.php | PHP | agpl-3.0 | 7,680 |
package io.scrollback.neighborhoods.bundle;
import android.support.annotation.NonNull;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
public class Checksum {
public static String MD5(@NonNull InputStream stream) throws IOException, NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] buffer = new byte[8192];
int numOfBytesRead;
while ((numOfBytesRead = stream.read(buffer)) > 0) {
md.update(buffer, 0, numOfBytesRead);
}
byte[] hash = md.digest();
return String.format("%032x", new BigInteger(1, hash));
}
public static String MD5(@NonNull File file) throws IOException, NoSuchAlgorithmException {
InputStream stream = new FileInputStream(file);
try {
return MD5(stream);
} finally {
stream.close();
}
}
}
| wesley1001/io.scrollback.neighborhoods | android/app/src/main/java/io/scrollback/neighborhoods/bundle/Checksum.java | Java | agpl-3.0 | 1,065 |
/*!
* CanJS - 2.2.9
* http://canjs.com/
* Copyright (c) 2015 Bitovi
* Fri, 11 Sep 2015 23:12:43 GMT
* Licensed MIT
*/
/*can@2.2.9#util/hashchange*/
define(['can/util/can'], function (can) {
(function () {
var addEvent = function (el, ev, fn) {
if (el.addEventListener) {
el.addEventListener(ev, fn, false);
} else if (el.attachEvent) {
el.attachEvent('on' + ev, fn);
} else {
el['on' + ev] = fn;
}
}, onHashchange = function () {
can.trigger(window, 'hashchange');
};
addEvent(window, 'hashchange', onHashchange);
}());
});
| et304383/passbolt_api | app/webroot/js/lib/can/dist/amd/can/hashchange.js | JavaScript | agpl-3.0 | 719 |
<?php
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
/*********************************************************************************
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2011 SugarCRM Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo. If the display of the logo is not reasonably feasible for
* technical reasons, the Appropriate Legal Notices must display the words
* "Powered by SugarCRM".
********************************************************************************/
/*********************************************************************************
* Description: This file handles the Data base functionality for the application.
* It acts as the DB abstraction layer for the application. It depends on helper classes
* which generate the necessary SQL. This sql is then passed to PEAR DB classes.
* The helper class is chosen in DBManagerFactory, which is driven by 'db_type' in 'dbconfig' under config.php.
*
* All the functions in this class will work with any bean which implements the meta interface.
* The passed bean is passed to helper class which uses these functions to generate correct sql.
*
* The meta interface has the following functions:
* getTableName() Returns table name of the object.
* getFieldDefinitions() Returns a collection of field definitions in order.
* getFieldDefintion(name) Return field definition for the field.
* getFieldValue(name) Returns the value of the field identified by name.
* If the field is not set, the function will return boolean FALSE.
* getPrimaryFieldDefinition() Returns the field definition for primary key
*
* The field definition is an array with the following keys:
*
* name This represents name of the field. This is a required field.
* type This represents type of the field. This is a required field and valid values are:
* int
* long
* varchar
* text
* date
* datetime
* double
* float
* uint
* ulong
* time
* short
* enum
* length This is used only when the type is varchar and denotes the length of the string.
* The max value is 255.
* enumvals This is a list of valid values for an enum separated by "|".
* It is used only if the type is ?enum?;
* required This field dictates whether it is a required value.
* The default value is ?FALSE?.
* isPrimary This field identifies the primary key of the table.
* If none of the fields have this flag set to ?TRUE?,
* the first field definition is assume to be the primary key.
* Default value for this field is ?FALSE?.
* default This field sets the default value for the field definition.
*
*
* Portions created by SugarCRM are Copyright (C) SugarCRM, Inc.
* All Rights Reserved.
* Contributor(s): ______________________________________..
********************************************************************************/
class MssqlManager extends DBManager
{
/**
* @see DBManager::$dbType
*/
public $dbType = 'mssql';
/**
* @see DBManager::$backendFunctions
*/
protected $backendFunctions = array(
'free_result' => 'mssql_free_result',
'close' => 'mssql_close',
'row_count' => 'mssql_num_rows'
);
/**
* @see DBManager::connect()
*/
public function connect(
array $configOptions = null,
$dieOnError = false
)
{
global $sugar_config;
if (is_null($configOptions))
$configOptions = $sugar_config['dbconfig'];
//SET DATEFORMAT to 'YYYY-MM-DD''
ini_set('mssql.datetimeconvert', '0');
//set the text size and textlimit to max number so that blob columns are not truncated
ini_set('mssql.textlimit','2147483647');
ini_set('mssql.textsize','2147483647');
//set the connections parameters
$connect_param = '';
$configOptions['db_host_instance'] = trim($configOptions['db_host_instance']);
if (empty($configOptions['db_host_instance']))
$connect_param = $configOptions['db_host_name'];
else
$connect_param = $configOptions['db_host_name']."\\".$configOptions['db_host_instance'];
//create persistent connection
if ($sugar_config['dbconfigoption']['persistent'] == true) {
$this->database =@mssql_pconnect(
$connect_param ,
$configOptions['db_user_name'],
$configOptions['db_password']
);
}
//if no persistent connection created, then create regular connection
if(!$this->database){
$this->database = mssql_connect(
$connect_param ,
$configOptions['db_user_name'],
$configOptions['db_password']
);
if(!$this->database){
$GLOBALS['log']->fatal("Could not connect to server ".$configOptions['db_host_name'].
" as ".$configOptions['db_user_name'].".");
sugar_die($GLOBALS['app_strings']['ERR_NO_DB']);
}
if($this->database && $sugar_config['dbconfigoption']['persistent'] == true){
$_SESSION['administrator_error'] = "<B>Severe Performance Degradation: Persistent Database Connections "
. "not working. Please set \$sugar_config['dbconfigoption']['persistent'] to false in your "
. "config.php file</B>";
}
}
//make sure connection exists
if(!$this->database){
sugar_die($GLOBALS['app_strings']['ERR_NO_DB']);
}
//select database
//Adding sleep and retry for mssql connection. We have come across scenarios when
//an error is thrown.' Unable to select database'. Following will try to connect to
//mssql db maximum number of 5 times at the interval of .2 second. If can not connect
//it will throw an Unable to select database message.
if(!@mssql_select_db($configOptions['db_name'], $this->database)){
$connected = false;
for($i=0;$i<5;$i++){
usleep(200000);
if(@mssql_select_db($configOptions['db_name'], $this->database)){
$connected = true;
break;
}
}
if(!$connected){
$GLOBALS['log']->fatal( "Unable to select database {$configOptions['db_name']}");
sugar_die($GLOBALS['app_strings']['ERR_NO_DB']);
}
}
if($this->checkError('Could Not Connect', $dieOnError))
$GLOBALS['log']->info("connected to db");
$GLOBALS['log']->info("Connect:".$this->database);
}
/**
* @see DBManager::version()
*/
public function version()
{
return $this->getOne("SELECT @@VERSION as version");
}
/**
* @see DBManager::checkError()
*/
public function checkError(
$msg = '',
$dieOnError = false
)
{
if (parent::checkError($msg, $dieOnError))
return true;
$sqlmsg = mssql_get_last_message();
$sqlpos = strpos($sqlmsg, 'Changed database context to');
$sqlpos2 = strpos($sqlmsg, 'Warning:');
$sqlpos3 = strpos($sqlmsg, 'Checking identity information:');
if ( $sqlpos !== false || $sqlpos2 !== false || $sqlpos3 !== false )
$sqlmsg = ''; // empty out sqlmsg if its either of the two error messages described above
else {
global $app_strings;
//ERR_MSSQL_DB_CONTEXT: localized version of 'Changed database context to' message
if (empty($app_strings) or !isset($app_strings['ERR_MSSQL_DB_CONTEXT'])) {
//ignore the message from sql-server if $app_strings array is empty. This will happen
//only if connection if made before languge is set.
$GLOBALS['log']->debug("Ignoring this database message: " . $sqlmsg);
$sqlmsg = '';
}
else {
$sqlpos = strpos($sqlmsg, $app_strings['ERR_MSSQL_DB_CONTEXT']);
if ( $sqlpos !== false )
$sqlmsg = '';
}
}
if ( strlen($sqlmsg) > 2 ) {
$GLOBALS['log']->fatal("$msg: SQL Server error: " . $sqlmsg);
return true;
}
return false;
}
/**
* @see DBManager::query()
*/
public function query(
$sql,
$dieOnError = false,
$msg = '',
$suppress = false
)
{
// Flag if there are odd number of single quotes
if ((substr_count($sql, "'") & 1))
$GLOBALS['log']->error("SQL statement[" . $sql . "] has odd number of single quotes.");
$this->countQuery($sql);
$GLOBALS['log']->info('Query:' . $sql);
$this->checkConnection();
$this->query_time = microtime(true);
// Bug 34892 - Clear out previous error message by checking the @@ERROR global variable
$errorNumberHandle = mssql_query("SELECT @@ERROR",$this->database);
$errorNumber = array_shift(mssql_fetch_row($errorNumberHandle));
if ($suppress) {
}
else {
$result = @mssql_query($sql, $this->database);
}
if (!$result) {
// awu Bug 10657: ignoring mssql error message 'Changed database context to' - an intermittent
// and difficult to reproduce error. The message is only a warning, and does
// not affect the functionality of the query
$sqlmsg = mssql_get_last_message();
$sqlpos = strpos($sqlmsg, 'Changed database context to');
$sqlpos2 = strpos($sqlmsg, 'Warning:');
$sqlpos3 = strpos($sqlmsg, 'Checking identity information:');
if ($sqlpos !== false || $sqlpos2 !== false || $sqlpos3 !== false) // if sqlmsg has 'Changed database context to', just log it
$GLOBALS['log']->debug($sqlmsg . ": " . $sql );
else {
$GLOBALS['log']->fatal($sqlmsg . ": " . $sql );
if($dieOnError)
sugar_die('SQL Error : ' . $sqlmsg);
else
echo 'SQL Error : ' . $sqlmsg;
}
}
$this->lastmysqlrow = -1;
$this->query_time = microtime(true) - $this->query_time;
$GLOBALS['log']->info('Query Execution Time:'.$this->query_time);
$this->checkError($msg.' Query Failed: ' . $sql, $dieOnError);
return $result;
}
/**
* This function take in the sql for a union query, the start and offset,
* and wraps it around an "mssql friendly" limit query
*
* @param string $sql
* @param int $start record to start at
* @param int $count number of records to retrieve
* @return string SQL statement
*/
private function handleUnionLimitQuery(
$sql,
$start,
$count
)
{
//set the start to 0, no negs
if ($start < 0)
$start=0;
$GLOBALS['log']->debug(print_r(func_get_args(),true));
$this->lastsql = $sql;
//change the casing to lower for easier string comparison, and trim whitespaces
$sql = strtolower(trim($sql)) ;
//set default sql
$limitUnionSQL = $sql;
$order_by_str = 'order by';
//make array of order by's. substring approach was proving too inconsistent
$orderByArray = explode($order_by_str, $sql);
$unionOrderBy = '';
$rowNumOrderBy = '';
//count the number of array elements
$unionOrderByCount = count($orderByArray);
$arr_count = 0;
//process if there are elements
if ($unionOrderByCount){
//we really want the last ordery by, so reconstruct string
//adding a 1 to count, as we dont wish to process the last element
$unionsql = '';
while ($unionOrderByCount>$arr_count+1) {
$unionsql .= $orderByArray[$arr_count];
$arr_count = $arr_count+1;
//add an "order by" string back if we are coming into loop again
//remember they were taken out when array was created
if ($unionOrderByCount>$arr_count+1) {
$unionsql .= "order by";
}
}
//grab the last order by element, set both order by's'
$unionOrderBy = $orderByArray[$arr_count];
$rowNumOrderBy = $unionOrderBy;
//if last element contains a "select", then this is part of the union query,
//and there is no order by to use
if (strpos($unionOrderBy, "select")) {
$unionsql = $sql;
//with no guidance on what to use for required order by in rownumber function,
//resort to using name column.
$rowNumOrderBy = 'id';
$unionOrderBy = "";
}
}
else {
//there are no order by elements, so just pass back string
$unionsql = $sql;
//with no guidance on what to use for required order by in rownumber function,
//resort to using name column.
$rowNumOrderBy = 'id';
$unionOrderBy = '';
}
//Unions need the column name being sorted on to match acroos all queries in Union statement
//so we do not want to strip the alias like in other queries. Just add the "order by" string and
//pass column name as is
if ($unionOrderBy != '') {
$unionOrderBy = ' order by ' . $unionOrderBy;
}
//if start is 0, then just use a top query
if($start == 0) {
$limitUnionSQL = "select top $count * from (" .$unionsql .") as top_count ".$unionOrderBy;
}
else {
//if start is more than 0, then use top query in conjunction
//with rownumber() function to create limit query.
$limitUnionSQL = "select top $count * from( select ROW_NUMBER() OVER ( order by "
.$rowNumOrderBy.") AS row_number, * from ("
.$unionsql .") As numbered) "
. "As top_count_limit WHERE row_number > $start "
.$unionOrderBy;
}
return $limitUnionSQL;
}
/**
* @see DBManager::limitQuery()
*/
public function limitQuery(
$sql,
$start,
$count,
$dieOnError = false,
$msg = '')
{
$newSQL = $sql;
$distinctSQLARRAY = array();
if (strpos($sql, "UNION") && !preg_match("/(\')(UNION).?(\')/i", $sql))
$newSQL = $this->handleUnionLimitQuery($sql,$start,$count);
else {
if ($start < 0)
$start = 0;
$GLOBALS['log']->debug(print_r(func_get_args(),true));
$this->lastsql = $sql;
$matches = array();
preg_match('/^(.*SELECT )(.*?FROM.*WHERE)(.*)$/isxU',$sql, $matches);
if (!empty($matches[3])) {
if ($start == 0) {
$match_two = strtolower($matches[2]);
if (!strpos($match_two, "distinct")> 0 && strpos($match_two, "distinct") !==0) {
//proceed as normal
$newSQL = $matches[1] . " TOP $count " . $matches[2] . $matches[3];
}
else {
$distinct_o = strpos($match_two, "distinct");
$up_to_distinct_str = substr($match_two, 0, $distinct_o);
//check to see if the distinct is within a function, if so, then proceed as normal
if (strpos($up_to_distinct_str,"(")) {
//proceed as normal
$newSQL = $matches[1] . " TOP $count " . $matches[2] . $matches[3];
}
else {
//if distinct is not within a function, then parse
//string contains distinct clause, "TOP needs to come after Distinct"
//get position of distinct
$match_zero = strtolower($matches[0]);
$distinct_pos = strpos($match_zero , "distinct");
//get position of where
$where_pos = strpos($match_zero, "where");
//parse through string
$beg = substr($matches[0], 0, $distinct_pos+9 );
$mid = substr($matches[0], strlen($beg), ($where_pos+5) - (strlen($beg)));
$end = substr($matches[0], strlen($beg) + strlen($mid) );
//repopulate matches array
$matches[1] = $beg; $matches[2] = $mid; $matches[3] = $end;
$newSQL = $matches[1] . " TOP $count " . $matches[2] . $matches[3];
}
}
}
else {
$orderByMatch = array();
preg_match('/^(.*)(ORDER BY)(.*)$/is',$matches[3], $orderByMatch);
//if there is a distinct clause, parse sql string as we will have to insert the rownumber
//for paging, AFTER the distinct clause
$hasDistinct = strpos(strtolower($matches[0]), "distinct");
if ($hasDistinct) {
$matches_sql = strtolower($matches[0]);
//remove reference to distinct and select keywords, as we will use a group by instead
//we need to use group by because we are introducing rownumber column which would make every row unique
//take out the select and distinct from string so we can reuse in group by
$dist_str = ' distinct ';
$distinct_pos = strpos($matches_sql, $dist_str);
$matches_sql = substr($matches_sql,$distinct_pos+ strlen($dist_str));
//get the position of where and from for further processing
$from_pos = strpos($matches_sql , " from ");
$where_pos = strpos($matches_sql, "where");
//split the sql into a string before and after the from clause
//we will use the columns being selected to construct the group by clause
if ($from_pos>0 ) {
$distinctSQLARRAY[0] = substr($matches_sql,0, $from_pos+1);
$distinctSQLARRAY[1] = substr($matches_sql,$from_pos+1);
//get position of order by (if it exists) so we can strip it from the string
$ob_pos = strpos($distinctSQLARRAY[1], "order by");
if ($ob_pos) {
$distinctSQLARRAY[1] = substr($distinctSQLARRAY[1],0,$ob_pos);
}
// strip off last closing parathese from the where clause
$distinctSQLARRAY[1] = preg_replace("/\)\s$/"," ",$distinctSQLARRAY[1]);
}
//place group by string into array
$grpByArr = explode(',', $distinctSQLARRAY[0]);
$grpByStr = '';
$first = true;
//remove the aliases for each group by element, sql server doesnt like these in group by.
foreach ($grpByArr as $gb) {
$gb = trim($gb);
//clean out the extra stuff added if we are concating first_name and last_name together
//this way both fields are added in correctly to the group by
$gb = str_replace("isnull(","",$gb);
$gb = str_replace("'') + ' ' + ","",$gb);
//remove outer reference if they exist
if (strpos($gb,"'")!==false){
continue;
}
//if there is a space, then an alias exists, remove alias
if (strpos($gb,' ')){
$gb = substr( $gb, 0,strpos($gb,' '));
}
//if resulting string is not empty then add to new group by string
if (!empty($gb)) {
if ($first) {
$grpByStr .= " $gb";
$first = false;
}
else {
$grpByStr .= ", $gb";
}
}
}
}
if (!empty($orderByMatch[3])) {
//if there is a distinct clause, form query with rownumber after distinct
if ($hasDistinct) {
$newSQL = "SELECT TOP $count * FROM
(
SELECT ROW_NUMBER()
OVER (ORDER BY ".$this->returnOrderBy($sql, $orderByMatch[3]).") AS row_number,
count(*) counter, " . $distinctSQLARRAY[0] . "
" . $distinctSQLARRAY[1] . "
group by " . $grpByStr . "
) AS a
WHERE row_number > $start";
}
else {
$newSQL = "SELECT TOP $count * FROM
(
" . $matches[1] . " ROW_NUMBER()
OVER (ORDER BY " . $this->returnOrderBy($sql, $orderByMatch[3]) . ") AS row_number,
" . $matches[2] . $orderByMatch[1]. "
) AS a
WHERE row_number > $start";
}
}else{
//bug: 22231 Records in campaigns' subpanel may not come from
//table of $_REQUEST['module']. Get it directly from query
$upperQuery = strtoupper($matches[2]);
if (!strpos($upperQuery,"JOIN")){
$from_pos = strpos($upperQuery , "FROM") + 4;
$where_pos = strpos($upperQuery, "WHERE");
$tablename = trim(substr($upperQuery,$from_pos, $where_pos - $from_pos));
}else{
$tablename = $this->getTableNameFromModuleName($_REQUEST['module'],$sql);
}
//if there is a distinct clause, form query with rownumber after distinct
if ($hasDistinct) {
$newSQL = "SELECT TOP $count * FROM
(
SELECT ROW_NUMBER() OVER (ORDER BY ".$tablename.".id) AS row_number, count(*) counter, " . $distinctSQLARRAY[0] . "
" . $distinctSQLARRAY[1] . "
group by " . $grpByStr . "
)
AS a
WHERE row_number > $start";
}
else {
$newSQL = "SELECT TOP $count * FROM
(
" . $matches[1] . " ROW_NUMBER() OVER (ORDER BY ".$tablename.".id) AS row_number, " . $matches[2] . $matches[3]. "
)
AS a
WHERE row_number > $start";
}
}
}
}
}
$GLOBALS['log']->debug('Limit Query: ' . $newSQL);
$result = $this->query($newSQL, $dieOnError, $msg);
$this->dump_slow_queries($newSQL);
return $result;
}
/**
* Searches for begginning and ending characters. It places contents into
* an array and replaces contents in original string. This is used to account for use of
* nested functions while aliasing column names
*
* @param string $p_sql SQL statement
* @param string $strip_beg Beginning character
* @param string $strip_end Ending character
* @param string $patt Optional, pattern to
*/
private function removePatternFromSQL(
$p_sql,
$strip_beg,
$strip_end,
$patt = 'patt')
{
//strip all single quotes out
$beg_sin = 0;
$sec_sin = 0;
$count = substr_count ( $p_sql, $strip_beg);
$increment = 1;
if ($strip_beg != $strip_end)
$increment = 2;
$i=0;
$offset = 0;
$strip_array = array();
while ($i<$count && $offset<strlen($p_sql)) {
if ($offset > strlen($p_sql))
{
break;
}
$beg_sin = strpos($p_sql, $strip_beg, $offset);
if (!$beg_sin)
{
break;
}
$sec_sin = strpos($p_sql, $strip_end, $beg_sin+1);
$strip_array[$patt.$i] = substr($p_sql, $beg_sin, $sec_sin - $beg_sin +1);
if ($increment > 1) {
//we are in here because beginning and end patterns are not identical, so search for nesting
$exists = strpos($strip_array[$patt.$i], $strip_beg );
if ($exists>=0) {
$nested_pos = (strrpos($strip_array[$patt.$i], $strip_beg ));
$strip_array[$patt.$i] = substr($p_sql,$nested_pos+$beg_sin,$sec_sin - ($nested_pos+$beg_sin)+1);
$p_sql = substr($p_sql, 0, $nested_pos+$beg_sin) . " ##". $patt.$i."## " . substr($p_sql, $sec_sin+1);
$i = $i + 1;
$beg_sin = $nested_pos;
continue;
}
}
$p_sql = substr($p_sql, 0, $beg_sin) . " ##". $patt.$i."## " . substr($p_sql, $sec_sin+1);
//move the marker up
$offset = $sec_sin+1;
$i = $i + 1;
}
$strip_array['sql_string'] = $p_sql;
return $strip_array;
}
/**
* adds a pattern
*
* @param string $token
* @param array $pattern_array
* @return string
*/
private function addPatternToSQL(
$token,
array $pattern_array
)
{
//strip all single quotes out
$pattern_array = array_reverse($pattern_array);
foreach ($pattern_array as $key => $replace) {
$token = str_replace( "##".$key."##", $replace,$token);
}
return $token;
}
/**
* gets an alias from the sql statement
*
* @param string $sql
* @param string $alias
* @return string
*/
private function getAliasFromSQL(
$sql,
$alias
)
{
$matches = array();
preg_match('/^(.*SELECT)(.*?FROM.*WHERE)(.*)$/isU',$sql, $matches);
//parse all single and double quotes out of array
$sin_array = $this->removePatternFromSQL($matches[2], "'", "'","sin_");
$new_sql = array_pop($sin_array);
$dub_array = $this->removePatternFromSQL($new_sql, "\"", "\"","dub_");
$new_sql = array_pop($dub_array);
//search for parenthesis
$paren_array = $this->removePatternFromSQL($new_sql, "(", ")", "par_");
$new_sql = array_pop($paren_array);
//all functions should be removed now, so split the array on comma's
$mstr_sql_array = explode(",", $new_sql);
foreach($mstr_sql_array as $token ) {
if (strpos($token, $alias)) {
//found token, add back comments
$token = $this->addPatternToSQL($token, $paren_array);
$token = $this->addPatternToSQL($token, $dub_array);
$token = $this->addPatternToSQL($token, $sin_array);
//log and break out of this function
return $token;
}
}
return null;
}
/**
* Finds the alias of the order by column, and then return the preceding column name
*
* @param string $sql
* @param string $orderMatch
* @return string
*/
private function findColumnByAlias(
$sql,
$orderMatch
)
{
//change case to lowercase
$sql = strtolower($sql);
$patt = '/\s+'.trim($orderMatch).'\s*,/';
//check for the alias, it should contain comma, may contain space, \n, or \t
$matches = array();
preg_match($patt, $sql, $matches, PREG_OFFSET_CAPTURE);
$found_in_sql = isset($matches[0][1]) ? $matches[0][1] : false;
//set default for found variable
$found = $found_in_sql;
//if still no match found, then we need to parse through the string
if (!$found_in_sql){
//get count of how many times the match exists in string
$found_count = substr_count($sql, $orderMatch);
$i = 0;
$first_ = 0;
$len = strlen($orderMatch);
//loop through string as many times as there is a match
while ($found_count > $i) {
//get the first match
$found_in_sql = strpos($sql, $orderMatch,$first_);
//make sure there was a match
if($found_in_sql){
//grab the next 2 individual characters
$str_plusone = substr($sql,$found_in_sql + $len,1);
$str_plustwo = substr($sql,$found_in_sql + $len+1,1);
//if one of those characters is a comma, then we have our alias
if ($str_plusone === "," || $str_plustwo === ","){
//keep track of this position
$found = $found_in_sql;
}
}
//set the offset and increase the iteration counter
$first_ = $found_in_sql+$len;
$i = $i+1;
}
}
//return $found, defaults have been set, so if no match was found it will be a negative number
return $found;
}
/**
* Return the order by string to use in case the column has been aliased
*
* @param string $sql
* @param string $orig_order_match
* @return string
*/
private function returnOrderBy(
$sql,
$orig_order_match
)
{
$sql = strtolower($sql);
$orig_order_match = trim($orig_order_match);
if (strpos($orig_order_match, ".") != 0)
//this has a tablename defined, pass in the order match
return $orig_order_match;
//grab first space in order by
$firstSpace = strpos($orig_order_match, " ");
//split order by into column name and ascending/descending
$orderMatch = " " . strtolower(substr($orig_order_match, 0, $firstSpace));
$asc_desc = substr($orig_order_match,$firstSpace);
//look for column name as an alias in sql string
$found_in_sql = $this->findColumnByAlias($sql, $orderMatch);
if (!$found_in_sql) {
//check if this column needs the tablename prefixed to it
$orderMatch = ".".trim($orderMatch);
$colMatchPos = strpos($sql, $orderMatch);
if ($colMatchPos !== false) {
//grab sub string up to column name
$containsColStr = substr($sql,0, $colMatchPos);
//get position of first space, so we can grab table name
$lastSpacePos = strrpos($containsColStr, " ");
//use positions of column name, space before name, and length of column to find the correct column name
$col_name = substr($sql, $lastSpacePos, $colMatchPos-$lastSpacePos+strlen($orderMatch));
//bug 25485. When sorting by a custom field in Account List and then pressing NEXT >, system gives an error
$containsCommaPos = strpos($col_name, ",");
if($containsCommaPos !== false) {
$col_name = substr($col_name, $containsCommaPos+1);
}
//return column name
return $col_name;
}
//break out of here, log this
$GLOBALS['log']->debug("No match was found for order by, pass string back untouched as: $orig_order_match");
return $orig_order_match;
}
else {
//if found, then parse and return
//grab string up to the aliased column
$GLOBALS['log']->debug("order by found, process sql string");
$psql = (trim($this->getAliasFromSQL($sql, $orderMatch )));
if (empty($psql))
$psql = trim(substr($sql, 0, $found_in_sql));
//grab the last comma before the alias
$comma_pos = strrpos($psql, " ");
//substring between the comma and the alias to find the joined_table alias and column name
$col_name = substr($psql,0, $comma_pos);
//make sure the string does not have an end parenthesis
//and is not part of a function (i.e. "ISNULL(leads.last_name,'') as name" )
//this is especially true for unified search from home screen
$alias_beg_pos = 0;
if(strpos($psql, " as "))
$alias_beg_pos = strpos($psql, " as ");
// Bug # 44923 - This breaks the query and does not properly filter isnull
// as there are other functions such as ltrim and rtrim.
/* else if (strncasecmp($psql, 'isnull', 6) != 0)
$alias_beg_pos = strpos($psql, " "); */
if ($alias_beg_pos > 0) {
$col_name = substr($psql,0, $alias_beg_pos );
}
//add the "asc/desc" order back
$col_name = $col_name. " ". $asc_desc;
//pass in new order by
$GLOBALS['log']->debug("order by being returned is " . $col_name);
return $col_name;
}
}
/**
* Take in a string of the module and retrieve the correspondent table name
*
* @param string $module_str module name
* @param string $sql SQL statement
* @return string table name
*/
private function getTableNameFromModuleName(
$module_str,
$sql
)
{
global $beanList, $beanFiles;
$GLOBALS['log']->debug("Module being processed is " . $module_str);
//get the right module files
//the module string exists in bean list, then process bean for correct table name
//note that we exempt the reports module from this, as queries from reporting module should be parsed for
//correct table name.
if (($module_str != 'Reports' && $module_str != 'SavedReport') && isset($beanList[$module_str]) && isset($beanFiles[$beanList[$module_str]])){
//if the class is not already loaded, then load files
if (!class_exists($beanList[$module_str]))
require_once($beanFiles[$beanList[$module_str]]);
//instantiate new bean
$module_bean = new $beanList[$module_str]();
//get table name from bean
$tbl_name = $module_bean->table_name;
//make sure table name is not just a blank space, or empty
$tbl_name = trim($tbl_name);
if(empty($tbl_name)){
$GLOBALS['log']->debug("Could not find table name for module $module_str. ");
$tbl_name = $module_str;
}
}
else {
//since the module does NOT exist in beanlist, then we have to parse the string
//and grab the table name from the passed in sql
$GLOBALS['log']->debug("Could not find table name from module in request, retrieve from passed in sql");
$tbl_name = $module_str;
$sql = strtolower($sql);
//look for the location of the "from" in sql string
$fromLoc = strpos($sql," from " );
if ($fromLoc>0){
//found from, substring from the " FROM " string in sql to end
$tableEnd = substr($sql, $fromLoc+6);
//We know that tablename will be next parameter after from, so
//grab the next space after table name.
// MFH BUG #14009: Also check to see if there are any carriage returns before the next space so that we don't grab any arbitrary joins or other tables.
$carriage_ret = strpos($tableEnd,"\n");
$next_space = strpos($tableEnd," " );
if ($carriage_ret < $next_space)
$next_space = $carriage_ret;
if ($next_space > 0) {
$tbl_name= substr($tableEnd,0, $next_space);
if(empty($tbl_name)){
$GLOBALS['log']->debug("Could not find table name sql either, return $module_str. ");
$tbl_name = $module_str;
}
}
//grab the table, to see if it is aliased
$aliasTableEnd = trim(substr($tableEnd, $next_space));
$alias_space = strpos ($aliasTableEnd, " " );
if ($alias_space > 0){
$alias_tbl_name= substr($aliasTableEnd,0, $alias_space);
strtolower($alias_tbl_name);
if(empty($alias_tbl_name)
|| $alias_tbl_name == "where"
|| $alias_tbl_name == "inner"
|| $alias_tbl_name == "left"
|| $alias_tbl_name == "join"
|| $alias_tbl_name == "outer"
|| $alias_tbl_name == "right") {
//not aliased, do nothing
}
elseif ($alias_tbl_name == "as") {
//the next word is the table name
$aliasTableEnd = trim(substr($aliasTableEnd, $alias_space));
$alias_space = strpos ($aliasTableEnd, " " );
if ($alias_space > 0) {
$alias_tbl_name= trim(substr($aliasTableEnd,0, $alias_space));
if (!empty($alias_tbl_name))
$tbl_name = $alias_tbl_name;
}
}
else {
//this is table alias
$tbl_name = $alias_tbl_name;
}
}
}
}
//return table name
$GLOBALS['log']->debug("Table name for module $module_str is: ".$tbl_name);
return $tbl_name;
}
/**
* @see DBManager::getFieldsArray()
*/
public function getFieldsArray(
&$result,
$make_lower_case = false
)
{
$field_array = array();
if(! isset($result) || empty($result))
return 0;
$i = 0;
while ($i < mssql_num_fields($result)) {
$meta = mssql_fetch_field($result, $i);
if (!$meta)
return 0;
if($make_lower_case==true)
$meta->name = strtolower($meta->name);
$field_array[] = $meta->name;
$i++;
}
return $field_array;
}
/**
* @see DBManager::getAffectedRowCount()
*/
public function getAffectedRowCount()
{
return $this->getOne("SELECT @@ROWCOUNT");
}
/**
* @see DBManager::describeField()
*/
protected function describeField(
$name,
$tablename
)
{
global $table_descriptions;
if(isset($table_descriptions[$tablename]) && isset($table_descriptions[$tablename][$name])){
return $table_descriptions[$tablename][$name];
}
$table_descriptions[$tablename] = array();
$sql = sprintf( "SELECT COLUMN_NAME AS Field
, DATA_TYPE + CASE WHEN CHARACTER_MAXIMUM_LENGTH IS NOT NULL
THEN '(' + RTRIM(CAST(CHARACTER_MAXIMUM_LENGTH AS CHAR)) + ')'
ELSE '' END as 'Type'
, CHARACTER_MAXIMUM_LENGTH
, IS_NULLABLE AS 'Null'
, CASE WHEN COLUMN_DEFAULT LIKE '((0))' THEN '(''0'')' ELSE COLUMN_DEFAULT END as 'Default'
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = '%s'",
$tablename
);
$result = $this->query($sql);
while ($row = $this->fetchByAssoc($result) )
$table_descriptions[$tablename][$row['Field']] = $row;
if (isset($table_descriptions[$tablename][$name]))
return $table_descriptions[$tablename][$name];
return array();
}
/**
* @see DBManager::fetchByAssoc()
*/
public function fetchByAssoc(
&$result,
$rowNum = -1,
$encode = true
)
{
if (!$result)
return false;
if ($result && $rowNum < 0) {
$row = mssql_fetch_assoc($result);
//MSSQL returns a space " " when a varchar column is empty ("") and not null.
//We need to iterate through the returned row array and strip empty spaces
if(!empty($row)){
foreach($row as $key => $column) {
//notice we only strip if one space is returned. we do not want to strip
//strings with intentional spaces (" foo ")
if (!empty($column) && $column ==" ") {
$row[$key] = '';
}
}
}
if($encode && $this->encode&& is_array($row))
return array_map('to_html', $row);
return $row;
}
if ($this->getRowCount($result) > $rowNum) {
if ( $rowNum == -1 )
$rowNum = 0;
@mssql_data_seek($result, $rowNum);
}
$this->lastmysqlrow = $rowNum;
$row = @mssql_fetch_assoc($result);
if($encode && $this->encode && is_array($row))
return array_map('to_html', $row);
return $row;
}
/**
* @see DBManager::quote()
*/
public function quote(
$string,
$isLike = true
)
{
return $string = str_replace("'","''", parent::quote($string));
}
/**
* @see DBManager::quoteForEmail()
*/
public function quoteForEmail(
$string,
$isLike = true
)
{
return str_replace("'","''", $string);
}
/**
* @see DBManager::tableExists()
*/
public function tableExists(
$tableName
)
{
$GLOBALS['log']->info("tableExists: $tableName");
$this->checkConnection();
$result = $this->query(
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE' AND TABLE_NAME='".$tableName."'");
$rowCount = $this->getRowCount($result);
$this->freeResult($result);
return ($rowCount == 0) ? false : true;
}
/**
* @see DBManager::addIndexes()
*/
public function addIndexes(
$tablename,
$indexes,
$execute = true
)
{
$alters = $this->helper->indexSQL($tablename,array(),$indexes);
if ($execute)
$this->query($alters);
return $alters;
}
/**
* @see DBManager::dropIndexes()
*/
public function dropIndexes(
$tablename,
$indexes,
$execute = true
)
{
$sql = '';
foreach ($indexes as $index) {
if ( !empty($sql) ) $sql .= ";";
$name = $index['name'];
if($execute)
unset($GLOBALS['table_descriptions'][$tablename]['indexes'][$name]);
if ($index['type'] == 'primary')
$sql .= "ALTER TABLE $tablename DROP CONSTRAINT $name";
else
$sql .= "DROP INDEX $name on $tablename";
}
if (!empty($sql))
if($execute)
$this->query($sql);
return $sql;
}
/**
* @see DBManager::checkQuery()
*/
protected function checkQuery(
$sql
)
{
return true;
}
/**
* @see DBManager::getTablesArray()
*/
public function getTablesArray()
{
$GLOBALS['log']->debug('MSSQL fetching table list');
if($this->getDatabase()) {
$tables = array();
$r = $this->query('SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES');
if (is_resource($r)) {
while ($a = $this->fetchByAssoc($r))
$tables[] = $a['TABLE_NAME'];
return $tables;
}
}
return false; // no database available
}
/**
* This call is meant to be used during install, when Full Text Search is enabled
* Indexing would always occur after a fresh sql server install, so this code creates
* a catalog and table with full text index.
*/
public function wakeupFTS()
{
$GLOBALS['log']->debug('MSSQL about to wakeup FTS');
if($this->getDatabase()) {
//create wakup catalog
$FTSqry[] = "if not exists( select * from sys.fulltext_catalogs where name ='wakeup_catalog' )
CREATE FULLTEXT CATALOG wakeup_catalog
";
//drop wakeup table if it exists
$FTSqry[] = "IF EXISTS(SELECT 'fts_wakeup' FROM sysobjects WHERE name = 'fts_wakeup' AND xtype='U')
DROP TABLE fts_wakeup
";
//create wakeup table
$FTSqry[] = "CREATE TABLE fts_wakeup(
id varchar(36) NOT NULL CONSTRAINT pk_fts_wakeup_id PRIMARY KEY CLUSTERED (id ASC ),
body text NULL,
kb_index int IDENTITY(1,1) NOT NULL CONSTRAINT wakeup_fts_unique_idx UNIQUE NONCLUSTERED
)
";
//create full text index
$FTSqry[] = "CREATE FULLTEXT INDEX ON fts_wakeup
(
body
Language 0X0
)
KEY INDEX wakeup_fts_unique_idx ON wakeup_catalog
WITH CHANGE_TRACKING AUTO
";
//insert dummy data
$FTSqry[] = "INSERT INTO fts_wakeup (id ,body)
VALUES ('".create_guid()."', 'SugarCRM Rocks' )";
//create queries to stop and restart indexing
$FTSqry[] = 'ALTER FULLTEXT INDEX ON fts_wakeup STOP POPULATION';
$FTSqry[] = 'ALTER FULLTEXT INDEX ON fts_wakeup DISABLE';
$FTSqry[] = 'ALTER FULLTEXT INDEX ON fts_wakeup ENABLE';
$FTSqry[] = 'ALTER FULLTEXT INDEX ON fts_wakeup SET CHANGE_TRACKING MANUAL';
$FTSqry[] = 'ALTER FULLTEXT INDEX ON fts_wakeup START FULL POPULATION';
$FTSqry[] = 'ALTER FULLTEXT INDEX ON fts_wakeup SET CHANGE_TRACKING AUTO';
foreach($FTSqry as $q){
sleep(3);
$this->query($q);
}
}
return false; // no database available
}
/**
* @see DBManager::convert()
*/
public function convert(
$string,
$type,
array $additional_parameters = array(),
array $additional_parameters_oracle_only = array()
)
{
// convert the parameters array into a comma delimited string
$additional_parameters_string = '';
if (!empty($additional_parameters))
$additional_parameters_string = ','.implode(',',$additional_parameters);
switch ($type) {
case 'today': return "GETDATE()";
case 'left': return "LEFT($string".$additional_parameters_string.")";
case 'date_format':
if(!empty($additional_parameters) && in_array("'%Y-%m'", $additional_parameters))
return "CONVERT(varchar(7),". $string . ",120)";
else
return "CONVERT(varchar(10),". $string . ",120)";
case 'IFNULL': return "ISNULL($string".$additional_parameters_string.")";
case 'CONCAT': return "$string+".implode("+",$additional_parameters);
case 'text2char': return "CAST($string AS varchar(8000))";
}
return "$string";
}
/**
* @see DBManager::concat()
*/
public function concat(
$table,
array $fields
)
{
$ret = '';
foreach ( $fields as $index => $field )
if (empty($ret))
$ret = db_convert($table.".".$field,'IFNULL', array("''"));
else
$ret .= " + ' ' + ".db_convert($table.".".$field,'IFNULL', array("''"));
return empty($ret)?$ret:"LTRIM(RTRIM($ret))";
}
/**
* @see DBManager::fromConvert()
*/
public function fromConvert(
$string,
$type)
{
switch($type) {
case 'datetime': return substr($string, 0,19);
case 'date': return substr($string, 0,11);
case 'time': return substr($string, 11);
}
return $string;
}
}
| shahrooz33ce/sugarcrm | include/database/MssqlManager.php | PHP | agpl-3.0 | 51,509 |
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from bson import ObjectId
from superdesk import get_resource_service
from test_factory import SuperdeskTestCase
from eve.utils import date_to_str
from superdesk.utc import get_expiry_date, utcnow
from apps.archive.commands import get_overdue_scheduled_items
from apps.archive.archive import SOURCE as ARCHIVE
from superdesk.errors import SuperdeskApiError
from datetime import timedelta, datetime
from pytz import timezone
from apps.archive.common import validate_schedule, remove_media_files, \
format_dateline_to_locmmmddsrc, convert_task_attributes_to_objectId, \
is_genre, BROADCAST_GENRE
from settings import ORGANIZATION_NAME_ABBREVIATION
class RemoveSpikedContentTestCase(SuperdeskTestCase):
articles = [{'guid': 'tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4f9',
'_id': '1',
'type': 'text',
'last_version': 3,
'_current_version': 4,
'body_html': 'Test body',
'urgency': 4,
'headline': 'Two students missing',
'pubstatus': 'usable',
'firstcreated': utcnow(),
'byline': 'By Alan Karben',
'ednote': 'Andrew Marwood contributed to this article',
'keywords': ['Student', 'Crime', 'Police', 'Missing'],
'subject':[{'qcode': '17004000', 'name': 'Statistics'},
{'qcode': '04001002', 'name': 'Weather'}],
'state': 'draft',
'expiry': utcnow() + timedelta(minutes=20),
'unique_name': '#1'},
{'guid': 'tag:localhost:2015:69b961ab-2816-4b8a-a974-xy4532fe33f9',
'_id': '2',
'last_version': 3,
'_current_version': 4,
'body_html': 'Test body of the second article',
'urgency': 4,
'headline': 'Another two students missing',
'pubstatus': 'usable',
'firstcreated': utcnow(),
'byline': 'By Alan Karben',
'ednote': 'Andrew Marwood contributed to this article',
'keywords': ['Student', 'Crime', 'Police', 'Missing'],
'subject':[{'qcode': '17004000', 'name': 'Statistics'},
{'qcode': '04001002', 'name': 'Weather'}],
'expiry': utcnow() + timedelta(minutes=20),
'state': 'draft',
'type': 'text',
'unique_name': '#2'},
{'guid': 'tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4fa',
'_id': '3',
'_current_version': 4,
'body_html': 'Test body',
'urgency': 4,
'headline': 'Two students missing killed',
'pubstatus': 'usable',
'firstcreated': utcnow(),
'byline': 'By Alan Karben',
'ednote': 'Andrew Marwood contributed to this article killed',
'keywords': ['Student', 'Crime', 'Police', 'Missing'],
'subject':[{'qcode': '17004000', 'name': 'Statistics'},
{'qcode': '04001002', 'name': 'Weather'}],
'state': 'draft',
'expiry': utcnow() + timedelta(minutes=20),
'type': 'text',
'unique_name': '#3'},
{'guid': 'tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4fc',
'_id': '4',
'_current_version': 3,
'state': 'draft',
'type': 'composite',
'groups': [{'id': 'root', 'refs': [{'idRef': 'main'}], 'role': 'grpRole:NEP'},
{
'id': 'main',
'refs': [
{
'location': 'archive',
'guid': '1',
'residRef': '1',
'type': 'text'
},
{
'location': 'archive',
'residRef': '2',
'guid': '2',
'type': 'text'
}
],
'role': 'grpRole:main'}],
'firstcreated': utcnow(),
'expiry': utcnow() + timedelta(minutes=20),
'unique_name': '#4'},
{'guid': 'tag:localhost:2015:69b961ab-4b8a-a584-2816-a7b402fed4fc',
'_id': '5',
'_current_version': 3,
'state': 'draft',
'type': 'composite',
'groups': [{'id': 'root', 'refs': [{'idRef': 'main'}, {'idRef': 'story'}], 'role': 'grpRole:NEP'},
{
'id': 'main',
'refs': [
{
'location': 'archive',
'guid': '1',
'residRef': '1',
'type': 'text'
}
],
'role': 'grpRole:main'},
{
'id': 'story',
'refs': [
{
'location': 'archive',
'guid': '4',
'residRef': '4',
'type': 'composite'
}
],
'role': 'grpRole:story'}],
'firstcreated': utcnow(),
'expiry': utcnow() + timedelta(minutes=20),
'unique_name': '#5'}]
media = {
'viewImage': {
'media': '1592730d582080f4e9fcc2fcf43aa357bda0ed19ffe314ee3248624cd4d4bc54',
'mimetype': 'image/jpeg',
'href': 'http://192.168.220.209/api/upload/abc/raw?_schema=http',
'height': 452,
'width': 640
},
'thumbnail': {
'media': '52250b4f37da50ee663fdbff057a5f064479f8a8bbd24fb8fdc06135d3f807bb',
'mimetype': 'image/jpeg',
'href': 'http://192.168.220.209/api/upload/abc/raw?_schema=http',
'height': 120,
'width': 169
},
'baseImage': {
'media': '7a608aa8f51432483918027dd06d0ef385b90702bfeba84ac4aec38ed1660b18',
'mimetype': 'image/jpeg',
'href': 'http://192.168.220.209/api/upload/abc/raw?_schema=http',
'height': 990,
'width': 1400
},
'original': {
'media': 'stub.jpeg',
'mimetype': 'image/jpeg',
'href': 'http://192.168.220.209/api/upload/stub.jpeg/raw?_schema=http',
'height': 2475,
'width': 3500
}
}
def setUp(self):
super().setUp()
def test_query_getting_expired_content(self):
self.app.data.insert(ARCHIVE, [{'expiry': get_expiry_date(-10), 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'expiry': get_expiry_date(0), 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'expiry': get_expiry_date(10), 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'expiry': get_expiry_date(20), 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'expiry': get_expiry_date(30), 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'expiry': None, 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'unique_id': 97, 'state': 'spiked'}])
now = utcnow()
expired_items = get_resource_service(ARCHIVE).get_expired_items(now)
self.assertEquals(2, expired_items.count())
def test_query_removing_media_files_keeps(self):
self.app.data.insert(ARCHIVE, [{'state': 'spiked',
'expiry': get_expiry_date(-10),
'type': 'picture',
'renditions': self.media}])
self.app.data.insert('ingest', [{'type': 'picture', 'renditions': self.media}])
self.app.data.insert('archive_versions', [{'type': 'picture', 'renditions': self.media}])
self.app.data.insert('legal_archive', [{'_id': 1, 'type': 'picture', 'renditions': self.media}])
self.app.data.insert('legal_archive_versions', [{'_id': 1, 'type': 'picture', 'renditions': self.media}])
archive_items = self.app.data.find_all('archive', None)
self.assertEqual(archive_items.count(), 1)
deleted = remove_media_files(archive_items[0])
self.assertFalse(deleted)
def test_query_getting_overdue_scheduled_content(self):
self.app.data.insert(ARCHIVE, [{'publish_schedule': get_expiry_date(-10), 'state': 'published'}])
self.app.data.insert(ARCHIVE, [{'publish_schedule': get_expiry_date(-10), 'state': 'scheduled'}])
self.app.data.insert(ARCHIVE, [{'publish_schedule': get_expiry_date(0), 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'publish_schedule': get_expiry_date(10), 'state': 'scheduled'}])
self.app.data.insert(ARCHIVE, [{'unique_id': 97, 'state': 'spiked'}])
now = date_to_str(utcnow())
overdueItems = get_overdue_scheduled_items(now, 'archive')
self.assertEquals(1, overdueItems.count())
class ArchiveTestCase(SuperdeskTestCase):
def setUp(self):
super().setUp()
def test_validate_schedule(self):
validate_schedule(utcnow() + timedelta(hours=2))
def test_validate_schedule_date_with_datetime_as_string_raises_superdeskApiError(self):
self.assertRaises(SuperdeskApiError, validate_schedule, "2015-04-27T10:53:48+00:00")
def test_validate_schedule_date_with_datetime_in_past_raises_superdeskApiError(self):
self.assertRaises(SuperdeskApiError, validate_schedule, utcnow() + timedelta(hours=-2))
def _get_located_and_current_utc_ts(self):
current_ts = utcnow()
located = {"dateline": "city", "city_code": "Sydney", "state": "NSW", "city": "Sydney", "state_code": "NSW",
"country_code": "AU", "tz": "Australia/Sydney", "country": "Australia"}
current_timestamp = datetime.fromtimestamp(current_ts.timestamp(), tz=timezone(located['tz']))
if current_timestamp.month == 9:
formatted_date = 'Sept {}'.format(current_timestamp.strftime('%d'))
elif 3 <= current_timestamp.month <= 7:
formatted_date = current_timestamp.strftime('%B %d')
else:
formatted_date = current_timestamp.strftime('%b %d')
return located, formatted_date, current_ts
def test_format_dateline_to_format_when_only_city_is_present(self):
located, formatted_date, current_ts = self._get_located_and_current_utc_ts()
formatted_dateline = format_dateline_to_locmmmddsrc(located, current_ts)
self.assertEqual(formatted_dateline, 'SYDNEY %s %s -' % (formatted_date, ORGANIZATION_NAME_ABBREVIATION))
def test_format_dateline_to_format_when_only_city_and_state_are_present(self):
located, formatted_date, current_ts = self._get_located_and_current_utc_ts()
located['dateline'] = "city,state"
formatted_dateline = format_dateline_to_locmmmddsrc(located, current_ts)
self.assertEqual(formatted_dateline, 'SYDNEY, NSW %s %s -' % (formatted_date, ORGANIZATION_NAME_ABBREVIATION))
def test_format_dateline_to_format_when_only_city_and_country_are_present(self):
located, formatted_date, current_ts = self._get_located_and_current_utc_ts()
located['dateline'] = "city,country"
formatted_dateline = format_dateline_to_locmmmddsrc(located, current_ts)
self.assertEqual(formatted_dateline, 'SYDNEY, AU %s %s -' % (formatted_date, ORGANIZATION_NAME_ABBREVIATION))
def test_format_dateline_to_format_when_city_state_and_country_are_present(self):
located, formatted_date, current_ts = self._get_located_and_current_utc_ts()
located['dateline'] = "city,state,country"
formatted_dateline = format_dateline_to_locmmmddsrc(located, current_ts)
self.assertEqual(formatted_dateline, 'SYDNEY, NSW, AU %s %s -' % (formatted_date,
ORGANIZATION_NAME_ABBREVIATION))
def test_if_task_attributes_converted_to_objectid(self):
doc = {
'task': {
'user': '562435231d41c835d7b5fb55',
'desk': ObjectId("562435241d41c835d7b5fb5d"),
'stage': 'test',
'last_authoring_desk': 3245,
'last_production_desk': None
}
}
convert_task_attributes_to_objectId(doc)
self.assertIsInstance(doc['task']['user'], ObjectId)
self.assertEqual(doc['task']['desk'], ObjectId("562435241d41c835d7b5fb5d"))
self.assertEqual(doc['task']['stage'], 'test')
self.assertEqual(doc['task']['last_authoring_desk'], 3245)
self.assertIsNone(doc['task']['last_production_desk'])
class ArchiveCommonTestCase(SuperdeskTestCase):
def setUp(self):
super().setUp()
def test_broadcast_content(self):
content = {
'genre': [{'name': 'Broadcast Script', 'value': 'Broadcast Script'}]
}
self.assertTrue(is_genre(content, BROADCAST_GENRE))
def test_broadcast_content_if_genre_is_none(self):
content = {
'genre': None
}
self.assertFalse(is_genre(content, BROADCAST_GENRE))
def test_broadcast_content_if_genre_is_empty_list(self):
content = {
'genre': []
}
self.assertFalse(is_genre(content, BROADCAST_GENRE))
def test_broadcast_content_if_genre_is_other_than_broadcast(self):
content = {
'genre': [{'name': 'Article', 'value': 'Article'}]
}
self.assertFalse(is_genre(content, BROADCAST_GENRE))
self.assertTrue(is_genre(content, 'Article'))
| sivakuna-aap/superdesk | server/apps/archive/archive_test.py | Python | agpl-3.0 | 14,852 |
require File.expand_path(File.dirname(__FILE__) + '/../../qti_helper')
if Qti.migration_executable
describe "QTI 1.2 zip with id prepender value" do
before(:all) do
@archive_file_path = File.join(BASE_FIXTURE_DIR, 'qti', 'plain_qti.zip')
unzipped_file_path = File.join(File.dirname(@archive_file_path), "qti_#{File.basename(@archive_file_path, '.zip')}", 'oi')
@dir = File.join(File.dirname(@archive_file_path), "qti_plain_qti")
@course = Course.create!(:name => 'tester')
@migration = ContentMigration.create(:context => @course)
@converter = Qti::Converter.new(:export_archive_path=>@archive_file_path, :base_download_dir=>unzipped_file_path, :id_prepender=>'prepend_test', :content_migration => @migration)
@converter.export
@course_data = @converter.course.with_indifferent_access
@course_data['all_files_export'] ||= {}
@course_data['all_files_export']['file_path'] = @course_data['all_files_zip']
@migration.migration_settings[:migration_ids_to_import] = {:copy=>{}}
@migration.migration_settings[:files_import_root_path] = @course_data[:files_import_root_path]
@course.import_from_migration(@course_data, nil, @migration)
end
after(:all) do
truncate_all_tables
@converter.delete_unzipped_archive
if File.exists?(@dir)
FileUtils::rm_rf(@dir)
end
end
it "should convert the assessments" do
@converter.course[:assessments].should == QTI_EXPORT_ASSESSMENT
@course.quizzes.count.should == 1
quiz = @course.quizzes.first
quiz.title.should == 'Quiz'
quiz.quiz_questions.count.should == 10
end
it "should convert the questions" do
@course_data[:assessment_questions][:assessment_questions].length.should == 10
@course.assessment_questions.count.should == 10
end
it "should create an assessment question bank for the quiz" do
@course.assessment_question_banks.count.should == 1
bank = @course.assessment_question_banks.first
bank.title.should == 'Quiz'
bank.assessment_questions.count.should == 10
end
it "should have file paths" do
@course_data[:overview_file_path].index("oi/overview.json").should_not be_nil
@course_data[:export_folder_path].index('spec_canvas/fixtures/qti/qti_plain_qti/oi').should_not be_nil
@course_data[:full_export_file_path].index('spec_canvas/fixtures/qti/qti_plain_qti/oi/course_export.json').should_not be_nil
end
it "should import the included files" do
@course.attachments.count.should == 4
dir = Canvas::Migration::MigratorHelper::QUIZ_FILE_DIRECTORY
@course.attachments.find_by_migration_id("prepend_test_f3e5ead7f6e1b25a46a4145100566821").full_path.should == "course files/#{dir}/#{@migration.id}/exam1/my_files/org1/images/image.png"
@course.attachments.find_by_migration_id("prepend_test_c16566de1661613ef9e5517ec69c25a1").full_path.should == "course files/#{dir}/#{@migration.id}/contact info.png"
@course.attachments.find_by_migration_id("prepend_test_4d348a246af616c7d9a7d403367c1a30").full_path.should == "course files/#{dir}/#{@migration.id}/exam1/my_files/org0/images/image.png"
@course.attachments.find_by_migration_id("prepend_test_d2b5ca33bd970f64a6301fa75ae2eb22").full_path.should == "course files/#{dir}/#{@migration.id}/image.png"
end
it "should use expected file links in questions" do
aq = @course.assessment_questions.find_by_migration_id("prepend_test_QUE_1003")
c_att = @course.attachments.find_by_migration_id("prepend_test_4d348a246af616c7d9a7d403367c1a30")
att = aq.attachments.find_by_migration_id(CC::CCHelper.create_key(c_att))
aq.question_data["question_text"].should =~ %r{files/#{att.id}/download}
aq = @course.assessment_questions.find_by_migration_id("prepend_test_QUE_1007")
c_att = @course.attachments.find_by_migration_id("prepend_test_f3e5ead7f6e1b25a46a4145100566821")
att = aq.attachments.find_by_migration_id(CC::CCHelper.create_key(c_att))
aq.question_data["question_text"].should =~ %r{files/#{att.id}/download}
aq = @course.assessment_questions.find_by_migration_id("prepend_test_QUE_1014")
c_att = @course.attachments.find_by_migration_id("prepend_test_d2b5ca33bd970f64a6301fa75ae2eb22")
att = aq.attachments.find_by_migration_id(CC::CCHelper.create_key(c_att))
aq.question_data["question_text"].should =~ %r{files/#{att.id}/download}
aq = @course.assessment_questions.find_by_migration_id("prepend_test_QUE_1053")
c_att = @course.attachments.find_by_migration_id("prepend_test_c16566de1661613ef9e5517ec69c25a1")
att = aq.attachments.find_by_migration_id(CC::CCHelper.create_key(c_att))
aq.question_data["question_text"].should =~ %r{files/#{att.id}/download}
end
it "should hide the quiz directory" do
folder = @course.folders.find_by_name(Canvas::Migration::MigratorHelper::QUIZ_FILE_DIRECTORY)
folder.hidden?.should be_true
end
it "should use new attachments for imports with same file names" do
# run a second migration and check that there are different attachments on the questions
migration = ContentMigration.create(:context => @course)
converter = Qti::Converter.new(:export_archive_path=>@archive_file_path, :id_prepender=>'test2', :content_migration => migration)
converter.export
course_data = converter.course.with_indifferent_access
course_data['all_files_export'] ||= {}
course_data['all_files_export']['file_path'] = course_data['all_files_zip']
migration.migration_settings[:migration_ids_to_import] = {:copy=>{}}
migration.migration_settings[:files_import_root_path] = course_data[:files_import_root_path]
@course.import_from_migration(course_data, nil, migration)
# Check the first import
aq = @course.assessment_questions.find_by_migration_id("prepend_test_QUE_1003")
c_att = @course.attachments.find_by_migration_id("prepend_test_4d348a246af616c7d9a7d403367c1a30")
att = aq.attachments.find_by_migration_id(CC::CCHelper.create_key(c_att))
aq.question_data["question_text"].should =~ %r{files/#{att.id}/download}
# check the second import
aq = @course.assessment_questions.find_by_migration_id("test2_QUE_1003")
c_att = @course.attachments.find_by_migration_id("test2_4d348a246af616c7d9a7d403367c1a30")
att = aq.attachments.find_by_migration_id(CC::CCHelper.create_key(c_att))
aq.question_data["question_text"].should =~ %r{files/#{att.id}/download}
end
end
QTI_EXPORT_ASSESSMENT = {
:assessments=>
[{:migration_id=>"prepend_test_A1001",
:questions=>
[{:migration_id=>"prepend_test_QUE_1003", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1007", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1014", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1018", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1022", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1031", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1037", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1043", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1049", :question_type=>"question_reference"},
{:migration_id=>"prepend_test_QUE_1053", :question_type=>"question_reference"}],
:question_count=>10,
:quiz_type=>nil,
:quiz_name=>"Quiz",
:title=>"Quiz"}]}
end
| arrivu/hoodemo | vendor/plugins/qti_exporter/spec_canvas/lib/qti/qti_1_2_zip_spec.rb | Ruby | agpl-3.0 | 8,059 |
<?php
/**
* @copyright Copyright (C) 2010-2022, the Friendica project
*
* @license GNU AGPL version 3 or any later version
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*
*/
namespace Friendica\Module;
use Friendica\App;
use Friendica\BaseModule;
use Friendica\Core\L10n;
use Friendica\Core\System;
use Friendica\Database\Database;
use Friendica\Model\Contact;
use Friendica\Model\User;
use Friendica\Network\HTTPClient\Capability\ICanSendHttpRequests;
use Friendica\Network\HTTPClient\Client\HttpClientOptions;
use Friendica\Util\HTTPSignature;
use Friendica\Util\Profiler;
use Friendica\Util\Strings;
use Psr\Log\LoggerInterface;
/**
* Magic Auth (remote authentication) module.
*
* Ported from Hubzilla: https://framagit.org/hubzilla/core/blob/master/Zotlabs/Module/Magic.php
*/
class Magic extends BaseModule
{
/** @var App */
protected $app;
/** @var Database */
protected $dba;
/** @var ICanSendHttpRequests */
protected $httpClient;
public function __construct(App $app, L10n $l10n, App\BaseURL $baseUrl, App\Arguments $args, LoggerInterface $logger, Profiler $profiler, Response $response, Database $dba, ICanSendHttpRequests $httpClient, array $server, array $parameters = [])
{
parent::__construct($l10n, $baseUrl, $args, $logger, $profiler, $response, $server, $parameters);
$this->app = $app;
$this->dba = $dba;
$this->httpClient = $httpClient;
}
protected function rawContent(array $request = [])
{
$this->logger->info('magic module: invoked');
$this->logger->debug('args', ['request' => $_REQUEST]);
$addr = $_REQUEST['addr'] ?? '';
$dest = $_REQUEST['dest'] ?? '';
$owa = (!empty($_REQUEST['owa']) ? intval($_REQUEST['owa']) : 0);
$cid = 0;
if (!empty($addr)) {
$cid = Contact::getIdForURL($addr);
} elseif (!empty($dest)) {
$cid = Contact::getIdForURL($dest);
}
if (!$cid) {
$this->logger->info('No contact record found', $_REQUEST);
// @TODO Finding a more elegant possibility to redirect to either internal or external URL
$this->app->redirect($dest);
}
$contact = $this->dba->selectFirst('contact', ['id', 'nurl', 'url'], ['id' => $cid]);
// Redirect if the contact is already authenticated on this site.
if ($this->app->getContactId() && strpos($contact['nurl'], Strings::normaliseLink($this->baseUrl->get())) !== false) {
$this->logger->info('Contact is already authenticated');
System::externalRedirect($dest);
}
// OpenWebAuth
if (local_user() && $owa) {
$user = User::getById(local_user());
// Extract the basepath
// NOTE: we need another solution because this does only work
// for friendica contacts :-/ . We should have the basepath
// of a contact also in the contact table.
$exp = explode('/profile/', $contact['url']);
$basepath = $exp[0];
$header = [
'Accept' => ['application/x-dfrn+json', 'application/x-zot+json'],
'X-Open-Web-Auth' => [Strings::getRandomHex()],
];
// Create a header that is signed with the local users private key.
$header = HTTPSignature::createSig(
$header,
$user['prvkey'],
'acct:' . $user['nickname'] . '@' . $this->baseUrl->getHostname() . ($this->baseUrl->getUrlPath() ? '/' . $this->baseUrl->getUrlPath() : '')
);
// Try to get an authentication token from the other instance.
$curlResult = $this->httpClient->get($basepath . '/owa', [HttpClientOptions::HEADERS => $header]);
if ($curlResult->isSuccess()) {
$j = json_decode($curlResult->getBody(), true);
if ($j['success']) {
$token = '';
if ($j['encrypted_token']) {
// The token is encrypted. If the local user is really the one the other instance
// thinks he/she is, the token can be decrypted with the local users public key.
openssl_private_decrypt(Strings::base64UrlDecode($j['encrypted_token']), $token, $user['prvkey']);
} else {
$token = $j['token'];
}
$args = (strpbrk($dest, '?&') ? '&' : '?') . 'owt=' . $token;
$this->logger->info('Redirecting', ['path' => $dest . $args]);
System::externalRedirect($dest . $args);
}
}
System::externalRedirect($dest);
}
// @TODO Finding a more elegant possibility to redirect to either internal or external URL
$this->app->redirect($dest);
}
}
| annando/friendica | src/Module/Magic.php | PHP | agpl-3.0 | 4,902 |
define([
'collections/catalog_collection',
'test/mock_data/catalogs'
],
function(CatalogCollection,
MockCatalogs) {
'use strict';
var collection,
response = MockCatalogs;
beforeEach(function() {
collection = new CatalogCollection();
});
describe('Catalog collection', function() {
describe('parse', function() {
it('should fetch the next page of results', function() {
spyOn(collection, 'fetch').and.returnValue(null);
response.next = '/api/v2/catalogs/course_catalogs/?page=2';
collection.parse(response);
expect(collection.fetch).toHaveBeenCalledWith(
{remove: false, url: '/api/v2/catalogs/course_catalogs/?page=2'}
);
});
});
});
}
);
| eduNEXT/edunext-ecommerce | ecommerce/static/js/test/specs/collections/catalog_collection_spec.js | JavaScript | agpl-3.0 | 924 |
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// Preprocessed version of "boost/mpl/quote.hpp" header
// -- DO NOT modify by hand!
namespace abt_boost{} namespace boost = abt_boost; namespace abt_boost{ namespace mpl {
template< bool > struct quote_impl
{
template< typename T > struct result_
: T
{
};
};
template<> struct quote_impl<false>
{
template< typename T > struct result_
{
typedef T type;
};
};
template<
template< typename P1 > class F
, typename Tag = void_
>
struct quote1
{
template< typename U1 > struct apply
: quote_impl< aux::has_type< F<U1> >::value >
::template result_< F<U1> >
{
};
};
template<
template< typename P1, typename P2 > class F
, typename Tag = void_
>
struct quote2
{
template< typename U1, typename U2 > struct apply
: quote_impl< aux::has_type< F< U1,U2 > >::value >
::template result_< F< U1,U2 > >
{
};
};
template<
template< typename P1, typename P2, typename P3 > class F
, typename Tag = void_
>
struct quote3
{
template< typename U1, typename U2, typename U3 > struct apply
: quote_impl< aux::has_type< F< U1,U2,U3 > >::value >
::template result_< F< U1,U2,U3 > >
{
};
};
template<
template< typename P1, typename P2, typename P3, typename P4 > class F
, typename Tag = void_
>
struct quote4
{
template<
typename U1, typename U2, typename U3, typename U4
>
struct apply
: quote_impl< aux::has_type< F< U1,U2,U3,U4 > >::value >
::template result_< F< U1,U2,U3,U4 > >
{
};
};
template<
template<
typename P1, typename P2, typename P3, typename P4
, typename P5
>
class F
, typename Tag = void_
>
struct quote5
{
template<
typename U1, typename U2, typename U3, typename U4
, typename U5
>
struct apply
: quote_impl< aux::has_type< F< U1,U2,U3,U4,U5 > >::value >
::template result_< F< U1,U2,U3,U4,U5 > >
{
};
};
}}
| jbruestle/aggregate_btree | tiny_boost/boost/mpl/aux_/preprocessed/no_ctps/quote.hpp | C++ | agpl-3.0 | 2,290 |
@extends('layouts/default')
@section('title0')
@if ((Input::get('company_id')) && ($company))
{{ $company->name }}
@endif
@if (Input::get('status'))
@if (Input::get('status')=='Pending')
{{ trans('general.pending') }}
@elseif (Input::get('status')=='RTD')
{{ trans('general.ready_to_deploy') }}
@elseif (Input::get('status')=='Deployed')
{{ trans('general.deployed') }}
@elseif (Input::get('status')=='Undeployable')
{{ trans('general.undeployable') }}
@elseif (Input::get('status')=='Deployable')
{{ trans('general.deployed') }}
@elseif (Input::get('status')=='Requestable')
{{ trans('admin/hardware/general.requestable') }}
@elseif (Input::get('status')=='Archived')
{{ trans('general.archived') }}
@elseif (Input::get('status')=='Deleted')
{{ trans('general.deleted') }}
@endif
@else
{{ trans('general.all') }}
@endif
{{ trans('general.assets') }}
@if (Input::has('order_number'))
: Order #{{ Input::get('order_number') }}
@endif
@stop
{{-- Page title --}}
@section('title')
@yield('title0') @parent
@stop
@section('header_right')
<a href="{{ route('reports.export.assets', ['status'=> e(Input::get('status'))]) }}" style="margin-right: 5px;" class="btn btn-default"><i class="fa fa-download icon-white"></i>
{{ trans('admin/hardware/table.dl_csv') }}</a>
<a href="{{ route('hardware.create') }}" class="btn btn-primary pull-right"></i> {{ trans('general.create') }}</a>
@stop
{{-- Page content --}}
@section('content')
<div class="row">
<div class="col-md-12">
<div class="box">
<div class="box-body">
{{ Form::open([
'method' => 'POST',
'route' => ['hardware/bulkedit'],
'class' => 'form-inline',
'id' => 'bulkForm']) }}
<div class="row">
<div class="col-md-12">
@if (Input::get('status')!='Deleted')
<div id="toolbar">
<select name="bulk_actions" class="form-control select2">
<option value="edit">Edit</option>
<option value="delete">Delete</option>
<option value="labels">Generate Labels</option>
</select>
<button class="btn btn-primary" id="bulkEdit" disabled>Go</button>
</div>
@endif
<table
name="assets"
{{-- data-row-style="rowStyle" --}}
data-toolbar="#toolbar"
class="table table-striped snipe-table"
id="table"
data-advanced-search="true"
data-id-table="advancedTable"
data-url="{{ route('api.assets.index',
array('status' => e(Input::get('status')),
'order_number'=>e(Input::get('order_number')),
'company_id'=>e(Input::get('company_id')),
'status_id'=>e(Input::get('status_id'))))}}"
data-click-to-select="true"
data-cookie-id-table="{{ e(Input::get('status')) }}assetTable-{{ config('version.hash_version') }}">
</table>
</div><!-- /.col -->
</div><!-- /.row -->
{{ Form::close() }}
</div><!-- ./box-body -->
</div><!-- /.box -->
</div>
</div>
@stop
@section('moar_scripts')
@include ('partials.bootstrap-table', [
'exportFile' => 'assets-export',
'search' => true,
'showFooter' => true,
'columns' => \App\Presenters\AssetPresenter::dataTableLayout()
])
@stop
| madd15/snipe-it | resources/views/hardware/index.blade.php | PHP | agpl-3.0 | 3,507 |
<?php
$module_name = 'OfficeReportsHistory';
$metafiles[$module_name] = array(
'detailviewdefs' => 'modules/' . $module_name . '/metadata/detailviewdefs.php',
'editviewdefs' => 'modules/' . $module_name . '/metadata/editviewdefs.php',
'listviewdefs' => 'modules/' . $module_name . '/metadata/listviewdefs.php',
'searchdefs' => 'modules/' . $module_name . '/metadata/searchdefs.php',
'popupdefs' => 'modules/' . $module_name . '/metadata/popupdefs.php',
'searchfields' => 'modules/' . $module_name . '/metadata/SearchFields.php',
);
?>
| MarStan/sugar_work | modules/OfficeReportsHistory/metadata/metafiles.php | PHP | agpl-3.0 | 581 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.gis.db.models.fields
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AreaSoltura',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('processo', models.IntegerField()),
('nome', models.CharField(verbose_name='Nome da propriedade', max_length=255)),
('endereco', models.CharField(verbose_name='Endereço', max_length=400)),
('municipio', models.CharField(verbose_name='Município', max_length=255)),
('uf', models.CharField(verbose_name='Unidade da Federação', max_length=2)),
('proprietario', models.CharField(verbose_name='Nome do proprietário', max_length=255)),
('cpf', models.IntegerField(verbose_name='CPF')),
('telefone', models.BigIntegerField()),
('email', models.EmailField(max_length=254)),
('area', models.FloatField(verbose_name='Área da Propriedade (ha)')),
('arl_app', models.FloatField(verbose_name='Área de reserva legal e proteção permanente')),
('bioma', models.CharField(verbose_name='Bioma', max_length=255)),
('fitofisionomia', models.CharField(max_length=255)),
('atividade', models.CharField(verbose_name='Atividade Econômica', max_length=255)),
('viveiro', models.IntegerField(verbose_name='Número de viveiros')),
('distancia', models.FloatField(verbose_name='Área da Propriedade (ha)')),
('tempo', models.FloatField(verbose_name='Tempo de viagem ao CETAS mais próximo')),
('vistoria', models.DateField()),
('geom', django.contrib.gis.db.models.fields.PolygonField(srid=4674)),
],
),
]
| ibamacsr/casv | casv/core/migrations/0002_areasoltura.py | Python | agpl-3.0 | 2,089 |
//
// FakeEnvironment.cs
//
// Author:
// Giacomo Tesio <giacomo@tesio.it>
//
// Copyright (c) 2010-2013 Giacomo Tesio
//
// This file is part of Epic.NET.
//
// Epic.NET is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Epic.NET is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
using System;
using Epic.Environment;
namespace Epic.Fakes
{
[Serializable]
public sealed class FakeEnvironment : EnvironmentBase
{
public FakeEnvironment ()
{
int i = Get<int>(new InstanceName<int>("CoverTheMethod"));
++i;
}
#region implemented abstract members of Epic.Environment.EnvironmentBase
public override TObject Get<TObject> (InstanceName<TObject> name)
{
return default(TObject);
}
#endregion
}
}
| Shamar/Epic.NET | Code/UnitTests/Epic.Core.UnitTests/Fakes/FakeEnvironment.cs | C# | agpl-3.0 | 1,300 |
(function (plugin, core, scene) {
var plug = new plugin.GlobalRendering({
name: "Axes",
tooltip: "Show world space axes",
icon: "img/icons/axis.png",
toggle: true,
on: false
});
var DEFAULTS = {
planeSize: 10,
gridSpacing: 10,
gridColor: new THREE.Color(0x000066),
ticksSpacing: 3
};
// Label parameters
var lblParameters = {
fontSize: 24,
borderColor : {r:0, g:0, b:0, a:0},
bgColor : {r:255, g:255, b:255, a:0}
};
var planeWidget, planeOrientationWidget, gridColorWidget, planeSizeWidget, gridSpacingWidget, ticksSpacingWidget;
plug._init = function (guiBuilder) {
planeWidget = guiBuilder.Choice({
label: "Grid Plane",
tooltip: "If on, a grid plane will be drawn",
options: [
{content: "Off", value: "0", selected: true},
{content: "On", value: "1"}
],
bindTo: (function() {
var bindToFun = function() {
plug._onAxesParamChange();
};
bindToFun.toString = function() {
return 'planeWidget';
}
return bindToFun;
}())
});
planeOrientationWidget = guiBuilder.Choice({
label: "Grid Plane Orientation",
tooltip: "Choose which plane to draw",
options: [
{content: "XZ", value: "0", selected: true},
{content: "XY", value: "1"},
{content: "YZ", value: "2"},
],
bindTo: (function() {
var bindToFun = function() {
if(parseInt(planeWidget.getValue()))
plug._onAxesParamChange();
};
bindToFun.toString = function() {
return 'planeOrientationWidget';
}
return bindToFun;
}())
});
gridColorWidget = guiBuilder.Color({
label: "Grid Color",
tooltip: "",
color: "#" + DEFAULTS.gridColor.getHexString(),
bindTo: (function() {
var bindToFun = function() {
if(parseInt(planeWidget.getValue()))
plug._onAxesParamChange();
};
bindToFun.toString = function() {
return 'gridColorWidget';
}
return bindToFun;
}())
});
planeSizeWidget = guiBuilder.Integer({
label: "Plane Size",
tooltip: "Defines the size of the plane",
step: 1,
defval: DEFAULTS.planeSize,
min: 0,
bindTo: (function() {
var bindToFun = function() {
if(planeSizeWidget.getValue() > 0 && parseInt(planeWidget.getValue()))
plug._onAxesParamChange();
};
bindToFun.toString = function() {
return "planeSizeWidget";
};
return bindToFun;
})()
});
gridSpacingWidget = guiBuilder.Integer({
label: "Grid Spacing",
tooltip: "Defines the spacing of the grid",
step: 1,
defval: DEFAULTS.planeSize,
min: 0,
bindTo: (function() {
var bindToFun = function() {
if(gridSpacingWidget.getValue() > 0 && parseInt(planeWidget.getValue()))
plug._onAxesParamChange();
};
bindToFun.toString = function() {
return "gridSpacingWidget";
};
return bindToFun;
})()
});
ticksSpacingWidget = guiBuilder.Float({
label: "Ticks Spacing",
tooltip: "Defines the spacing between the ticks",
step: 0.5,
defval: DEFAULTS.ticksSpacing,
min: 0,
bindTo: (function() {
var bindToFun = function() {
if(ticksSpacingWidget.getValue() > 0)
plug._onAxesParamChange();
};
bindToFun.toString = function() {
return "ticksSpacingWidget";
};
return bindToFun;
})()
});
};
plug._onAxesParamChange = function() {
// var currentLayer = MLJ.core.Scene.getSelectedLayer();
// if (currentLayer.properties.getByKey(plug.getName()) === true) {
if(scene._axes)
{
this._applyTo(false);
this._applyTo(true);
}
};
plug._applyTo = function (on) {
if (on) {
scene._axes = true;
var bbox = scene.getBBox();
var axisLength = bbox.min.distanceTo(bbox.max)/2;
// Creating the Object3D of the axes. The other parts (arrows, labels, ticks) will be added to this object
var axes = new THREE.AxisHelper(axisLength);
// Parameters needed to define the size of the arrows on the axes
var arrowLength = 1;
var headLength = 0.2 * arrowLength;
var headWidth = 0.5 * headLength;
// Array that will contain the colors of each axis
var colors = [];
// X arrow parameters
var arrowDirectionX = new THREE.Vector3(1, 0, 0);
var arrowOriginAxisX = new THREE.Vector3(axisLength, 0, 0);
colors.push(0xff9900);
// Y arrow parameters
var arrowDirectionY = new THREE.Vector3(0, 1, 0);
var arrowOriginAxisY = new THREE.Vector3(0, axisLength, 0);
colors.push(0x99ff00);
// Z arrow parameters
var arrowDirectionZ = new THREE.Vector3(0, 0, 1);
var arrowOriginAxisZ = new THREE.Vector3(0, 0, axisLength);
colors.push(0x0099ff);
var arrowAxisX = new THREE.ArrowHelper(arrowDirectionX, arrowOriginAxisX, arrowLength, colors[0], headLength, headWidth);
var arrowAxisY = new THREE.ArrowHelper(arrowDirectionY, arrowOriginAxisY, arrowLength, colors[1], headLength, headWidth);
var arrowAxisZ = new THREE.ArrowHelper(arrowDirectionZ, arrowOriginAxisZ, arrowLength, colors[2], headLength, headWidth);
axes.add(arrowAxisX);
axes.add(arrowAxisY);
axes.add(arrowAxisZ);
// Now we draw the labels as sprite; first, we compute the distance
var labelDistanceFromOrigin = axisLength + arrowLength + 0.1;
// Creating the sprite with the helper function
var spriteX = makeTextSprite("X", { 'x' : labelDistanceFromOrigin, 'y' : 0, 'z': 0}, lblParameters);
var spriteY = makeTextSprite("Y", { 'x' : 0, 'y' : labelDistanceFromOrigin, 'z': 0}, lblParameters);
var spriteZ = makeTextSprite("Z", { 'x' : 0, 'y' : 0, 'z': labelDistanceFromOrigin}, lblParameters);
axes.add(spriteX);
axes.add(spriteY);
axes.add(spriteZ);
// Now we draw the white ticks on the axes
var origin = new THREE.Vector3(0, 0, 0);
// Computing the distance between the ticks for each axis. Ticks will be displayed between the origin of the axis and the origin of the arrow
var tickDistanceX = ticksSpacingWidget.getValue();
var tickDistanceY = ticksSpacingWidget.getValue();
var tickDistanceZ = ticksSpacingWidget.getValue();
// Total length to consider when drawing the ticks
var totalLength = axisLength + headLength;
// Creating the ticks mesh only if the distance is below the total length (meaning that there is at least 1 tick)
if(tickDistanceX < totalLength)
{
var ticksMeshX = createTicksMesh(origin, arrowOriginAxisX, totalLength, tickDistanceX);
axes.add(ticksMeshX);
}
if(tickDistanceY < totalLength)
{
var ticksMeshY = createTicksMesh(origin, arrowOriginAxisY, totalLength, tickDistanceY);
axes.add(ticksMeshY);
}
if(tickDistanceZ < totalLength)
{
var ticksMeshZ = createTicksMesh(origin, arrowOriginAxisZ, totalLength, tickDistanceZ);
axes.add(ticksMeshZ);
}
// If the grid is enabled, it needs to be created
if(parseInt(planeWidget.getValue()))
{
// Grid size and spacing
var gridSize = planeSizeWidget.getValue();
var gridSpacing = gridSpacingWidget.getValue();
var planeOrientation = parseInt(planeOrientationWidget.getValue());
var grid = createGrid(gridSize, gridSpacing, planeOrientation, colors);
axes.add(grid);
}
scene.addSceneDecorator(plug.getName(), axes);
} else {
scene.removeSceneDecorator(plug.getName());
scene._axes = false;
}
};
/**
* Creates a grid rotated according to the plane orientation
*
* @param {integer} gridSize size of the grid
* @param {integer} gridSpacing spacing in the grid
* @param {integer} plane the orientation of the plane (0 == XZ, 1 == XY, 2 == YZ)
* @param {THREE.Color} colors
* @returns {THREE.GridHelper}
*/
function createGrid(gridSize, gridSpacing, plane, colors)
{
// Gird mesh and color
var grid = new THREE.GridHelper(gridSize, gridSize/gridSpacing);
grid.setColors(gridColorWidget.getColor(), gridColorWidget.getColor());
// Coordinate vectors and colors for the line to be drawn across the plane axes
var negativeVec1 = new THREE.Vector3(-gridSize, 0, 0);
var positiveVec1 = new THREE.Vector3(gridSize, 0, 0);
var negativeVec2 = new THREE.Vector3(0, 0, -gridSize);
var positiveVec2 = new THREE.Vector3(0, 0, gridSize);
var color1 = colors[0];
var color2 = colors[2];
// Depending on the plane orientation, the grid needs to be rotated around an axis
switch(plane)
{
case 1:
color2 = colors[1];
grid.rotation.x = Math.PI/2;
break;
case 2:
color1 = colors[1];
color2 = colors[2];
grid.rotation.z = Math.PI/2;
break;
}
// Creating the line along the first axis of the plane (for example if the plane is XY it will be the line
// across the X axis; if it's the YZ plane it will be the line across Y)
var geometry1 = new THREE.Geometry();
var material1 = new THREE.LineBasicMaterial({color: color1});
geometry1.vertices.push(negativeVec1, positiveVec1);
var line1 = new THREE.Line(geometry1, material1);
grid.add(line1);
// Second line along the second axis
var geometry2 = new THREE.Geometry();
var material2 = new THREE.LineBasicMaterial({color: color2});
geometry2.vertices.push(negativeVec2, positiveVec2);
var line2 = new THREE.Line(geometry2, material2);
grid.add(line2)
return grid;
}
/**
* Function that creates "ticks" from one point to another under a given dimension and with a fixed distance between the points
*
* @param {type} startPoint starting point
* @param {type} endPoint ending point
* @param {type} dim total size to consider
* @param {type} tickDistance distance between a tick and the next one
* @returns {THREE.Object3D|THREE.PointCloud}
*/
function createTicksMesh(startPoint, endPoint, dim, tickDistance)
{
// Considering the difference between the starting and ending point
var v = new THREE.Vector3();
v.subVectors(endPoint, startPoint);
// Normalizing without computing square roots and powers
v.divideScalar(dim);
var ticksMesh = new THREE.Object3D();
var ticksGeometry = new THREE.Geometry();
var i;
// Creating the points. Each point is separated by "tickDistance" pixels. Since the
for(i = tickDistance; i < dim; i += tickDistance)
ticksGeometry.vertices.push(new THREE.Vector3(startPoint.x + i*v.x, startPoint.y + i*v.y, startPoint.z + i*v.z));
var ticksMaterial = new THREE.PointCloudMaterial({
size: 3,
sizeAttenuation: false
});
// Creating the ticks as a cloud of points
ticksMesh = new THREE.PointCloud(ticksGeometry, ticksMaterial);
return ticksMesh;
}
/**
* Make a text texture <code>message</code> with HTML approach
* @param {String} message Message to be applied to texture
* @param {Vector3} position The position of the texture sprite
* @param {Object} parameters The sprite's parameters
* @memberOf MLJ.plugins.rendering.Box
* @author Stefano Giammori
*/
function makeTextSprite(message, position, parameters)
{
if ( parameters === undefined ) parameters = {};
//extract label params
var fontface = parameters.hasOwnProperty("fontFace") ?
parameters["fontFace"] : "Arial";
var fontsize = parameters.hasOwnProperty("fontSize") ?
parameters["fontSize"] : 10;
var fontweight = parameters.hasOwnProperty("fontWeight") ?
parameters["fontWeight"] : "normal" //white, visible
var borderThickness = parameters.hasOwnProperty("borderThickness") ?
parameters["borderThickness"] : 4;
var borderColor = parameters.hasOwnProperty("borderColor") ?
parameters["borderColor"] : { r:0, g:0, b:0, a:1.0 }; //black, visible
var backgroundColor = parameters.hasOwnProperty("bgColor") ?
parameters["bgColor"] : {r:255, g:255, b:255, a:1.0} //white, visible
//prepare label
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
context.font = fontweight + " " + fontsize + "px " + fontface;
// get size data (height depends only on font size)
var textWidth = context.measureText(message).width;
canvas.width = textWidth + borderThickness * 2;
canvas.height = fontsize + borderThickness * 2;
//set the param font into context
context.font = fontweight + " " + fontsize + "px " + fontface;
//set context background color
context.fillStyle = "rgba(" + backgroundColor.r + "," + backgroundColor.g + ","
+ backgroundColor.b + "," + backgroundColor.a + ")";
//set context border color
context.strokeStyle = "rgba(" + borderColor.r + "," + borderColor.g + ","
+ borderColor.b + "," + borderColor.a + ")";
//set border thickness
context.lineWidth = borderThickness;
/** //MEMO : (add +x) ~~ go right; (add +y) ~~ go down) ]
Set the rectangular frame (ctx, top-left, top, width, height, radius of the 4 corners)
*/
context.fillStyle = "rgba(255, 255, 255, 1.0)";
/** Set starting point of text, in which pt(borderThickness, fontsize+borderThickness/2) represent the
top left of the top-left corner of the texture text in the canvas. */
context.fillText(message, borderThickness, fontsize + borderThickness/2);
//canvas contents will be used for create a texture
var texture = new THREE.Texture(canvas)
texture.needsUpdate = true;
texture.minFilter = THREE.LinearFilter;
var spriteMaterial = new THREE.SpriteMaterial({ map: texture, useScreenCoordinates: false, color: 0xffffff, fog: true } );
var sprite = new THREE.Sprite(spriteMaterial);
sprite.scale.set( textWidth/100, fontsize/100, 1 );
sprite.position.set( position.x , position.y, position.z);
return sprite;
}
plugin.Manager.install(plug);
})(MLJ.core.plugin, MLJ.core, MLJ.core.Scene);
| cignoni/meshlabjs | js/mlj/plugins/rendering/Axes.js | JavaScript | agpl-3.0 | 16,533 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.