code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
import { all, takeEvery } from 'redux-saga/effects';
import actions from '#actions';
import handleShareFormChange from './startAlbumsSharingService/handleShareFormChange';
import handleShareFormSubmit from './startAlbumsSharingService/handleShareFormSubmit';
import handleShareItemsSelect from './startAlbumsSharingService/handleShareItemsSelect';
function* startAlbumsSharingService(apis) {
yield all([
takeEvery(actions.uiShareItemsSelected, handleShareItemsSelect, apis),
takeEvery(actions.uiShareFormSubmited, handleShareFormSubmit, apis),
takeEvery(actions.uiShareFormChanged, handleShareFormChange, apis),
]);
}
export default startAlbumsSharingService;
|
pathephone/pathephone-desktop
|
src/renderer/sagas/startApp/startServices/startAlbumsSharingService.js
|
JavaScript
|
mit
| 680 |
<?php
include_once('conexao.class.php');
class Disciplina{
public $id_disciplina;
public $nome;
public $professor;
public $curso;
public $carga_horaria;
public function __construct(){
//print "Disciplina instanciada!";
}
public function gravar(){
$sql = "insert into disciplina (nome, professor, curso, carga_horaria) values (?,?,?,?)";
$con = new Conexao();
$stm = $con->prepare($sql);
$stm->bindParam(1, $this->nome);
$stm->bindParam(2, $this->professor);
$stm->bindParam(3, $this->curso);
$stm->bindParam(4, $this->carga_horaria);
$stm->execute();
//echo "gravado";
}
public function __get($var){
return $this->$var;
}
public function __set($var, $valor){
$this->$var = $valor;
}
public function listar(){
$sql = "select * from disciplina";
$con = new Conexao();
$stm = $con->prepare($sql);
$stm->execute();
return $stm;
}
}
?>
|
STRVIRTU/tcc-2017
|
less/disciplina.class.php
|
PHP
|
mit
| 960 |
//
// MIT License
//
// Copyright 2019
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
#include "controller.hh"
#include "log.hh"
#include <boost/core/ignore_unused.hpp>
#include <boost/static_assert.hpp>
#include <map>
using namespace jones;
using button = jones::controller::button;
using button_state = jones::controller::button_state;
using button_state_map = std::map<button, button_state>;
namespace {
auto position_to_button(const uint8_t position) -> button {
switch (position) {
case 0:
return button::BUTTON_A;
case 1:
return button::BUTTON_B;
case 2:
return button::BUTTON_SELECT;
case 3:
return button::BUTTON_START;
case 4:
return button::BUTTON_UP;
case 5:
return button::BUTTON_DOWN;
case 6:
return button::BUTTON_LEFT;
case 7:
return button::BUTTON_RIGHT;
default:
BOOST_STATIC_ASSERT("unexpected button found");
return button::BUTTON_INVALID;
}
}
} // namespace
auto controller::button_to_string(const button button) -> auto {
switch (button) {
case button::BUTTON_A:
return "BUTTON_A";
case button::BUTTON_B:
return "BUTTON_B";
case button::BUTTON_SELECT:
return "BUTTON_SELECT";
case button::BUTTON_START:
return "BUTTON_START";
case button::BUTTON_UP:
return "BUTTON_UP";
case button::BUTTON_DOWN:
return "BUTTON_DOWN";
case button::BUTTON_LEFT:
return "BUTTON_LEFT";
case button::BUTTON_RIGHT:
return "BUTTON_RIGHT";
default:
return "BUTTON_INVALID";
}
}
auto controller::button_state_to_string(const button_state button_state) -> auto {
switch (button_state) {
case button_state::BUTTON_STATE_DOWN:
return "BUTTON_STATE_DOWN";
case button_state::BUTTON_STATE_UP:
return "BUTTON_STATE_UP";
default:
return "BUTTON_STATE_INVALID";
}
}
auto controller::controller_state_to_string(const controller_state controller_state) -> auto {
switch (controller_state) {
case controller_state::CONTROLLER_STATE_CONNECTED:
return "CONTROLLER_STATE_CONNECTED";
case controller_state::CONTROLLER_STATE_DISCONNECTED:
return "CONTROLLER_STATE_DISCONNECTED";
default:
return "CONTROLLER_STATE_INVALID";
}
}
class controller::controller::impl {
public:
explicit impl(const memory &memory)
: memory_(memory), strobe_(0), index_(0), button_states_(), controller_state_(controller_state::CONTROLLER_STATE_DISCONNECTED) {
boost::ignore_unused(memory_);
}
~impl() = default;
auto set_button_state(const button button, const button_state button_state) -> void {
button_states_[button] = button_state;
LOG_DEBUG << "controller::set_button_state : "
<< "button [" << button_to_string(button) << "] "
<< "button_state [" << button_state_to_string(button_state) << "]";
}
auto get_button_state(const button button) -> auto {
return button_states_[button];
}
auto set_controller_state(const controller_state controller_state) -> void {
controller_state_ = controller_state;
LOG_DEBUG << "controller::set_controller_state : "
<< "controller_state [" << controller_state_to_string(controller_state) << "]";
}
auto get_controller_state() -> auto {
return controller_state_;
}
auto peek(uint16_t const address) const -> uint8_t {
boost::ignore_unused(address);
uint8_t data = 0;
if (index_ < 8) {
auto const button_state = button_states_.find(position_to_button(index_));
if (button_state != button_states_.end() && button_state->second == button_state::BUTTON_STATE_DOWN) {
data = 1;
}
}
return data;
}
auto read(const uint16_t address) -> uint8_t {
auto const data = peek(address);
index_++;
update_button_index();
return data;
}
auto write(uint16_t const address, uint8_t const data) -> void {
boost::ignore_unused(address);
strobe_ = data;
update_button_index();
}
private:
auto update_button_index() -> void {
if ((strobe_ & 0x1U) == 1) {
index_ = 0;
}
}
private:
const memory &memory_;
uint8_t strobe_;
uint8_t index_;
button_state_map button_states_;
controller_state controller_state_;
};
controller::controller::controller(memory const &memory)
: impl_(std::make_unique<impl>(memory)) {
}
controller::controller::~controller() = default;
auto controller::controller::set_button_state(button const button, button_state const state) -> void {
impl_->set_button_state(button, state);
}
auto controller::controller::get_button_state(button const button) const -> button_state {
return impl_->get_button_state(button);
}
auto controller::controller::set_controller_state(controller_state const state) -> void {
impl_->set_controller_state(state);
}
auto controller::controller::get_controller_state() const -> controller_state {
return impl_->get_controller_state();
}
auto controller::controller::peek(uint16_t const address) const -> uint8_t {
return impl_->peek(address);
}
auto controller::controller::read(uint16_t const address) const -> uint8_t {
return impl_->read(address);
}
auto controller::controller::write(uint16_t const address, uint8_t const data) -> void {
impl_->write(address, data);
}
|
thejunkjon/jones
|
source/jones/controller/controller.cc
|
C++
|
mit
| 6,251 |
export const Camera = `
<svg viewBox="0 0 28 28">
<g fill="none" fill-rule="evenodd">
<path d="M3 3h22a2 2 0 012 2v18a2 2 0 01-2 2H3a2 2 0 01-2-2V5a2 2 0 012-2z" stroke="currentColor"/>
<circle stroke="currentColor" cx="14" cy="14" r="5"/>
<path d="M22 7h1" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"/>
</g>
</svg>`;
|
clair-design/clair
|
packages/icons/icons/Camera.ts
|
TypeScript
|
mit
| 359 |
import datetime
import time
import boto
import redis
import requests
import random
import zlib
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.template.loader import render_to_string
from django.db import IntegrityError
from django.db.models import Q
from django.views.decorators.cache import never_cache
from django.core.urlresolvers import reverse
from django.contrib.auth import login as login_user
from django.contrib.auth import logout as logout_user
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, Http404
from django.conf import settings
from django.core.mail import mail_admins
from django.core.validators import email_re
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.models import Site
from django.utils import feedgenerator
from mongoengine.queryset import OperationError
from mongoengine.queryset import NotUniqueError
from apps.recommendations.models import RecommendedFeed
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds
from apps.analyzer.models import apply_classifier_authors, apply_classifier_tags
from apps.analyzer.models import get_classifiers_for_user, sort_classifiers_by_feed
from apps.profile.models import Profile
from apps.reader.models import UserSubscription, UserSubscriptionFolders, RUserStory, Feature
from apps.reader.forms import SignupForm, LoginForm, FeatureForm
from apps.rss_feeds.models import MFeedIcon, MStarredStoryCounts
from apps.search.models import MUserSearch
from apps.statistics.models import MStatistics
# from apps.search.models import SearchStarredStory
try:
from apps.rss_feeds.models import Feed, MFeedPage, DuplicateFeed, MStory, MStarredStory
except:
pass
from apps.social.models import MSharedStory, MSocialProfile, MSocialServices
from apps.social.models import MSocialSubscription, MActivity, MInteraction
from apps.categories.models import MCategory
from apps.social.views import load_social_page
from apps.rss_feeds.tasks import ScheduleImmediateFetches
from utils import json_functions as json
from utils.user_functions import get_user, ajax_login_required
from utils.feed_functions import relative_timesince
from utils.story_functions import format_story_link_date__short
from utils.story_functions import format_story_link_date__long
from utils.story_functions import strip_tags
from utils import log as logging
from utils.view_functions import get_argument_or_404, render_to, is_true
from utils.view_functions import required_params
from utils.ratelimit import ratelimit
from vendor.timezones.utilities import localtime_for_timezone
BANNED_URLS = [
"brentozar.com",
]
@never_cache
@render_to('reader/dashboard.xhtml')
def index(request, **kwargs):
if request.method == "GET" and request.subdomain and request.subdomain not in ['dev', 'www', 'debug']:
username = request.subdomain
try:
if '.' in username:
username = username.split('.')[0]
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
return HttpResponseRedirect('http://%s%s' % (
Site.objects.get_current().domain,
reverse('index')))
return load_social_page(request, user_id=user.pk, username=request.subdomain, **kwargs)
if request.user.is_anonymous():
return welcome(request, **kwargs)
else:
return dashboard(request, **kwargs)
def dashboard(request, **kwargs):
user = request.user
feed_count = UserSubscription.objects.filter(user=request.user).count()
recommended_feeds = RecommendedFeed.objects.filter(is_public=True,
approved_date__lte=datetime.datetime.now()
).select_related('feed')[:2]
unmoderated_feeds = []
if user.is_staff:
unmoderated_feeds = RecommendedFeed.objects.filter(is_public=False,
declined_date__isnull=True
).select_related('feed')[:2]
statistics = MStatistics.all()
social_profile = MSocialProfile.get_user(user.pk)
start_import_from_google_reader = request.session.get('import_from_google_reader', False)
if start_import_from_google_reader:
del request.session['import_from_google_reader']
if not user.is_active:
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
logging.user(request, "~FBLoading dashboard")
return {
'user_profile' : user.profile,
'feed_count' : feed_count,
'account_images' : range(1, 4),
'recommended_feeds' : recommended_feeds,
'unmoderated_feeds' : unmoderated_feeds,
'statistics' : statistics,
'social_profile' : social_profile,
'start_import_from_google_reader': start_import_from_google_reader,
'debug' : settings.DEBUG,
}, "reader/dashboard.xhtml"
def welcome(request, **kwargs):
user = get_user(request)
statistics = MStatistics.all()
social_profile = MSocialProfile.get_user(user.pk)
if request.method == "POST":
if request.POST.get('submit', '').startswith('log'):
login_form = LoginForm(request.POST, prefix='login')
signup_form = SignupForm(prefix='signup')
else:
login_form = LoginForm(prefix='login')
signup_form = SignupForm(request.POST, prefix='signup')
else:
login_form = LoginForm(prefix='login')
signup_form = SignupForm(prefix='signup')
logging.user(request, "~FBLoading welcome")
return {
'user_profile' : hasattr(user, 'profile') and user.profile,
'login_form' : login_form,
'signup_form' : signup_form,
'statistics' : statistics,
'social_profile' : social_profile,
'post_request' : request.method == 'POST',
}, "reader/welcome.xhtml"
@never_cache
def login(request):
code = -1
message = ""
if request.method == "POST":
form = LoginForm(request.POST, prefix='login')
if form.is_valid():
login_user(request, form.get_user())
if request.POST.get('api'):
logging.user(form.get_user(), "~FG~BB~SKiPhone Login~FW")
code = 1
else:
logging.user(form.get_user(), "~FG~BBLogin~FW")
return HttpResponseRedirect(reverse('index'))
else:
message = form.errors.items()[0][1][0]
if request.POST.get('api'):
return HttpResponse(json.encode(dict(code=code, message=message)), mimetype='application/json')
else:
return index(request)
@never_cache
def signup(request):
if request.method == "POST":
form = SignupForm(prefix='signup', data=request.POST)
if form.is_valid():
new_user = form.save()
login_user(request, new_user)
logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email)
if not new_user.is_active:
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
return index(request)
@never_cache
def logout(request):
logging.user(request, "~FG~BBLogout~FW")
logout_user(request)
if request.GET.get('api'):
return HttpResponse(json.encode(dict(code=1)), mimetype='application/json')
else:
return HttpResponseRedirect(reverse('index'))
def autologin(request, username, secret):
next = request.GET.get('next', '')
if not username or not secret:
return HttpResponseForbidden()
profile = Profile.objects.filter(user__username=username, secret_token=secret)
if not profile:
return HttpResponseForbidden()
user = profile[0].user
user.backend = settings.AUTHENTICATION_BACKENDS[0]
login_user(request, user)
logging.user(user, "~FG~BB~SKAuto-Login. Next stop: %s~FW" % (next if next else 'Homepage',))
if next and not next.startswith('/'):
next = '?next=' + next
return HttpResponseRedirect(reverse('index') + next)
elif next:
return HttpResponseRedirect(next)
else:
return HttpResponseRedirect(reverse('index'))
@ratelimit(minutes=1, requests=24)
@never_cache
@json.json_view
def load_feeds(request):
user = get_user(request)
feeds = {}
include_favicons = request.REQUEST.get('include_favicons', False)
flat = request.REQUEST.get('flat', False)
update_counts = request.REQUEST.get('update_counts', False)
version = int(request.REQUEST.get('v', 1))
if include_favicons == 'false': include_favicons = False
if update_counts == 'false': update_counts = False
if flat == 'false': flat = False
if flat: return load_feeds_flat(request)
try:
folders = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
data = dict(feeds=[], folders=[])
return data
except UserSubscriptionFolders.MultipleObjectsReturned:
UserSubscriptionFolders.objects.filter(user=user)[1:].delete()
folders = UserSubscriptionFolders.objects.get(user=user)
user_subs = UserSubscription.objects.select_related('feed').filter(user=user)
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
scheduled_feeds = []
for sub in user_subs:
pk = sub.feed_id
if update_counts and sub.needs_unread_recalc:
sub.calculate_feed_scores(silent=True)
feeds[pk] = sub.canonical(include_favicon=include_favicons)
if not sub.active: continue
if not sub.feed.active and not sub.feed.has_feed_exception:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.active_subscribers <= 0:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.next_scheduled_update < day_ago:
scheduled_feeds.append(sub.feed.pk)
if len(scheduled_feeds) > 0 and request.user.is_authenticated():
logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." %
len(scheduled_feeds))
ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk))
starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=user.pk).count()
social_params = {
'user_id': user.pk,
'include_favicon': include_favicons,
'update_counts': update_counts,
}
social_feeds = MSocialSubscription.feeds(**social_params)
social_profile = MSocialProfile.profile(user.pk)
social_services = MSocialServices.profile(user.pk)
categories = None
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
'feeds': feeds.values() if version == 2 else feeds,
'social_feeds': social_feeds,
'social_profile': social_profile,
'social_services': social_services,
'user_profile': user.profile,
"is_staff": user.is_staff,
'folders': json.decode(folders.folders),
'starred_count': starred_count,
'starred_counts': starred_counts,
'categories': categories
}
return data
@json.json_view
def load_feed_favicons(request):
user = get_user(request)
feed_ids = request.REQUEST.getlist('feed_ids')
if not feed_ids:
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
feed_ids = [sub['feed__pk'] for sub in user_subs.values('feed__pk')]
feed_icons = dict([(i.feed_id, i.data) for i in MFeedIcon.objects(feed_id__in=feed_ids)])
return feed_icons
def load_feeds_flat(request):
user = request.user
include_favicons = is_true(request.REQUEST.get('include_favicons', False))
update_counts = is_true(request.REQUEST.get('update_counts', True))
feeds = {}
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
scheduled_feeds = []
iphone_version = "2.1"
if include_favicons == 'false': include_favicons = False
if update_counts == 'false': update_counts = False
if not user.is_authenticated():
return HttpResponseForbidden()
try:
folders = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
folders = []
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
if not user_subs and folders:
folders.auto_activate()
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
for sub in user_subs:
if update_counts and sub.needs_unread_recalc:
sub.calculate_feed_scores(silent=True)
feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons)
if not sub.feed.active and not sub.feed.has_feed_exception:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.active_subscribers <= 0:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.next_scheduled_update < day_ago:
scheduled_feeds.append(sub.feed.pk)
if len(scheduled_feeds) > 0 and request.user.is_authenticated():
logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." %
len(scheduled_feeds))
ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk))
flat_folders = []
if folders:
flat_folders = folders.flatten_folders(feeds=feeds)
social_params = {
'user_id': user.pk,
'include_favicon': include_favicons,
'update_counts': update_counts,
}
social_feeds = MSocialSubscription.feeds(**social_params)
social_profile = MSocialProfile.profile(user.pk)
social_services = MSocialServices.profile(user.pk)
starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=user.pk).count()
categories = None
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials ~FMflat~FB%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
"flat_folders": flat_folders,
"feeds": feeds,
"social_feeds": social_feeds,
"social_profile": social_profile,
"social_services": social_services,
"user": user.username,
"is_staff": user.is_staff,
"user_profile": user.profile,
"iphone_version": iphone_version,
"categories": categories,
'starred_count': starred_count,
'starred_counts': starred_counts,
}
return data
@ratelimit(minutes=1, requests=10)
@never_cache
@json.json_view
def refresh_feeds(request):
user = get_user(request)
feed_ids = request.REQUEST.getlist('feed_id')
check_fetch_status = request.REQUEST.get('check_fetch_status')
favicons_fetching = request.REQUEST.getlist('favicons_fetching')
social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id]
feed_ids = list(set(feed_ids) - set(social_feed_ids))
feeds = {}
if feed_ids or (not social_feed_ids and not feed_ids):
feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids,
check_fetch_status=check_fetch_status)
social_feeds = {}
if social_feed_ids or (not social_feed_ids and not feed_ids):
social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids)
favicons_fetching = [int(f) for f in favicons_fetching if f]
feed_icons = {}
if favicons_fetching:
feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)])
for feed_id, feed in feeds.items():
if feed_id in favicons_fetching and feed_id in feed_icons:
feeds[feed_id]['favicon'] = feed_icons[feed_id].data
feeds[feed_id]['favicon_color'] = feed_icons[feed_id].color
feeds[feed_id]['favicon_fetching'] = feed.get('favicon_fetching')
user_subs = UserSubscription.objects.filter(user=user, active=True).only('feed')
sub_feed_ids = [s.feed_id for s in user_subs]
if favicons_fetching:
moved_feed_ids = [f for f in favicons_fetching if f not in sub_feed_ids]
for moved_feed_id in moved_feed_ids:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=moved_feed_id)
if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds:
feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed_id]
feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed_id
if check_fetch_status:
missing_feed_ids = list(set(feed_ids) - set(sub_feed_ids))
if missing_feed_ids:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id__in=missing_feed_ids)
for duplicate_feed in duplicate_feeds:
feeds[duplicate_feed.duplicate_feed_id] = {'id': duplicate_feed.feed_id}
interactions_count = MInteraction.user_unread_count(user.pk)
if True or settings.DEBUG or check_fetch_status:
logging.user(request, "~FBRefreshing %s feeds (%s/%s)" % (
len(feeds.keys()), check_fetch_status, len(favicons_fetching)))
return {
'feeds': feeds,
'social_feeds': social_feeds,
'interactions_count': interactions_count,
}
@json.json_view
def interactions_count(request):
user = get_user(request)
interactions_count = MInteraction.user_unread_count(user.pk)
return {
'interactions_count': interactions_count,
}
@never_cache
@ajax_login_required
@json.json_view
def feed_unread_count(request):
user = request.user
feed_ids = request.REQUEST.getlist('feed_id')
force = request.REQUEST.get('force', False)
social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id]
feed_ids = list(set(feed_ids) - set(social_feed_ids))
feeds = {}
if feed_ids:
feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, force=force)
social_feeds = {}
if social_feed_ids:
social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids)
if len(feed_ids) == 1:
if settings.DEBUG:
feed_title = Feed.get_by_id(feed_ids[0]).feed_title
else:
feed_title = feed_ids[0]
elif len(social_feed_ids) == 1:
feed_title = MSocialProfile.objects.get(user_id=social_feed_ids[0].replace('social:', '')).username
else:
feed_title = "%s feeds" % (len(feeds) + len(social_feeds))
logging.user(request, "~FBUpdating unread count on: %s" % feed_title)
return {'feeds': feeds, 'social_feeds': social_feeds}
def refresh_feed(request, feed_id):
user = get_user(request)
feed = get_object_or_404(Feed, pk=feed_id)
feed = feed.update(force=True, compute_scores=False)
usersub = UserSubscription.objects.get(user=user, feed=feed)
usersub.calculate_feed_scores(silent=False)
logging.user(request, "~FBRefreshing feed: %s" % feed)
return load_single_feed(request, feed_id)
@never_cache
@json.json_view
def load_single_feed(request, feed_id):
start = time.time()
user = get_user(request)
# offset = int(request.REQUEST.get('offset', 0))
# limit = int(request.REQUEST.get('limit', 6))
limit = 6
page = int(request.REQUEST.get('page', 1))
offset = limit * (page-1)
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'all')
query = request.REQUEST.get('query')
include_story_content = is_true(request.REQUEST.get('include_story_content', True))
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
message = None
user_search = None
dupe_feed_id = None
user_profiles = []
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
if not feed_id: raise Http404
feed_address = request.REQUEST.get('feed_address')
feed = Feed.get_by_id(feed_id, feed_address=feed_address)
if not feed:
raise Http404
try:
usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist:
usersub = None
if query:
if user.profile.is_premium:
user_search = MUserSearch.get_user(user.pk)
user_search.touch_search_date()
stories = feed.find_stories(query, order=order, offset=offset, limit=limit)
else:
stories = []
message = "You must be a premium subscriber to search."
elif read_filter == 'starred':
mstories = MStarredStory.objects(
user_id=user.pk,
story_feed_id=feed_id
).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit]
stories = Feed.format_stories(mstories)
elif usersub and (read_filter == 'unread' or order == 'oldest'):
stories = usersub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit,
default_cutoff_date=user.profile.unread_cutoff)
else:
stories = feed.get_stories(offset, limit)
checkpoint1 = time.time()
try:
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)
except redis.ConnectionError:
logging.user(request, "~BR~FK~SBRedis is unavailable for shared stories.")
checkpoint2 = time.time()
# Get intelligence classifier for user
if usersub and usersub.is_trained:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id, social_user_id=0))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id))
else:
classifier_feeds = []
classifier_authors = []
classifier_titles = []
classifier_tags = []
classifiers = get_classifiers_for_user(user, feed_id=feed_id,
classifier_feeds=classifier_feeds,
classifier_authors=classifier_authors,
classifier_titles=classifier_titles,
classifier_tags=classifier_tags)
checkpoint3 = time.time()
unread_story_hashes = []
if stories:
if (read_filter == 'all' or query) and usersub:
unread_story_hashes = UserSubscription.story_hashes(user.pk, read_filter='unread',
feed_ids=[usersub.feed_id],
usersubs=[usersub],
group_by_feed=False,
cutoff_date=user.profile.unread_cutoff)
story_hashes = [story['story_hash'] for story in stories if story['story_hash']]
starred_stories = MStarredStory.objects(user_id=user.pk,
story_feed_id=feed.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'starred_date', 'user_tags')
shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes)
shared_stories = []
if shared_story_hashes:
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=shared_story_hashes)\
.only('story_hash', 'shared_date', 'comments')
starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date,
user_tags=story.user_tags))
for story in starred_stories])
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
checkpoint4 = time.time()
for story in stories:
if not include_story_content:
del story['story_content']
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
nowtz = localtime_for_timezone(now, user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
if usersub:
story['read_status'] = 1
if (read_filter == 'all' or query) and usersub:
story['read_status'] = 1 if story['story_hash'] not in unread_story_hashes else 0
elif read_filter == 'unread' and usersub:
story['read_status'] = 0
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['user_tags'] = starred_stories[story['story_hash']]['user_tags']
if story['story_hash'] in shared_stories:
story['shared'] = True
shared_date = localtime_for_timezone(shared_stories[story['story_hash']]['shared_date'],
user.profile.timezone)
story['shared_date'] = format_story_link_date__long(shared_date, now)
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
else:
story['read_status'] = 1
story['intelligence'] = {
'feed': apply_classifier_feeds(classifier_feeds, feed),
'author': apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story),
}
story['score'] = UserSubscription.score_story(story['intelligence'])
# Intelligence
feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else []
feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else []
if usersub:
usersub.feed_opens += 1
usersub.needs_unread_recalc = True
usersub.save(update_fields=['feed_opens', 'needs_unread_recalc'])
diff1 = checkpoint1-start
diff2 = checkpoint2-start
diff3 = checkpoint3-start
diff4 = checkpoint4-start
timediff = time.time()-start
last_update = relative_timesince(feed.last_update)
time_breakdown = ""
if timediff > 1 or settings.DEBUG:
time_breakdown = "~SN~FR(~SB%.4s/%.4s/%.4s/%.4s~SN)" % (
diff1, diff2, diff3, diff4)
search_log = "~SN~FG(~SB%s~SN) " % query if query else ""
logging.user(request, "~FYLoading feed: ~SB%s%s (%s/%s) %s%s" % (
feed.feed_title[:22], ('~SN/p%s' % page) if page > 1 else '', order, read_filter, search_log, time_breakdown))
if not include_hidden:
hidden_stories_removed = 0
new_stories = []
for story in stories:
if story['score'] >= 0:
new_stories.append(story)
else:
hidden_stories_removed += 1
stories = new_stories
data = dict(stories=stories,
user_profiles=user_profiles,
feed_tags=feed_tags,
feed_authors=feed_authors,
classifiers=classifiers,
updated=last_update,
user_search=user_search,
feed_id=feed.pk,
elapsed_time=round(float(timediff), 2),
message=message)
if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed
if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id
if not usersub:
data.update(feed.canonical())
# if not usersub and feed.num_subscribers <= 1:
# data = dict(code=-1, message="You must be subscribed to this feed.")
# if page <= 3:
# import random
# time.sleep(random.randint(2, 4))
# if page == 2:
# assert False
return data
def load_feed_page(request, feed_id):
if not feed_id:
raise Http404
feed = Feed.get_by_id(feed_id)
if feed and feed.has_page and not feed.has_page_exception:
if settings.BACKED_BY_AWS.get('pages_on_node'):
url = "http://%s/original_page/%s" % (
settings.ORIGINAL_PAGE_SERVER,
feed.pk,
)
page_response = requests.get(url)
if page_response.status_code == 200:
response = HttpResponse(page_response.content, mimetype="text/html; charset=utf-8")
response['Content-Encoding'] = 'gzip'
response['Last-Modified'] = page_response.headers.get('Last-modified')
response['Etag'] = page_response.headers.get('Etag')
response['Content-Length'] = str(len(page_response.content))
logging.user(request, "~FYLoading original page, proxied from node: ~SB%s bytes" %
(len(page_response.content)))
return response
if settings.BACKED_BY_AWS['pages_on_s3'] and feed.s3_page:
if settings.PROXY_S3_PAGES:
key = settings.S3_PAGES_BUCKET.get_key(feed.s3_pages_key)
if key:
compressed_data = key.get_contents_as_string()
response = HttpResponse(compressed_data, mimetype="text/html; charset=utf-8")
response['Content-Encoding'] = 'gzip'
logging.user(request, "~FYLoading original page, proxied: ~SB%s bytes" %
(len(compressed_data)))
return response
else:
logging.user(request, "~FYLoading original page, non-proxied")
return HttpResponseRedirect('//%s/%s' % (settings.S3_PAGES_BUCKET_NAME,
feed.s3_pages_key))
data = MFeedPage.get_data(feed_id=feed_id)
if not data or not feed or not feed.has_page or feed.has_page_exception:
logging.user(request, "~FYLoading original page, ~FRmissing")
return render(request, 'static/404_original_page.xhtml', {},
content_type='text/html',
status=404)
logging.user(request, "~FYLoading original page, from the db")
return HttpResponse(data, mimetype="text/html; charset=utf-8")
@json.json_view
def load_starred_stories(request):
user = get_user(request)
offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 10))
page = int(request.REQUEST.get('page', 0))
query = request.REQUEST.get('query')
order = request.REQUEST.get('order', 'newest')
tag = request.REQUEST.get('tag')
story_hashes = request.REQUEST.getlist('h')[:100]
version = int(request.REQUEST.get('v', 1))
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
message = None
order_by = '-' if order == "newest" else ""
if page: offset = limit * (page - 1)
if query:
# results = SearchStarredStory.query(user.pk, query)
# story_ids = [result.db_id for result in results]
if user.profile.is_premium:
stories = MStarredStory.find_stories(query, user.pk, tag=tag, offset=offset, limit=limit,
order=order)
else:
stories = []
message = "You must be a premium subscriber to search."
elif tag:
if user.profile.is_premium:
mstories = MStarredStory.objects(
user_id=user.pk,
user_tags__contains=tag
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
stories = []
message = "You must be a premium subscriber to read saved stories by tag."
elif story_hashes:
mstories = MStarredStory.objects(
user_id=user.pk,
story_hash__in=story_hashes
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
mstories = MStarredStory.objects(
user_id=user.pk
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True)
story_hashes = [story['story_hash'] for story in stories]
story_feed_ids = list(set(s['story_feed_id'] for s in stories))
usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk')
usersub_ids = [us['feed__pk'] for us in usersub_ids]
unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids)))
unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids)
unsub_feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in unsub_feeds)
shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes)
shared_stories = []
if shared_story_hashes:
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=shared_story_hashes)\
.only('story_hash', 'shared_date', 'comments')
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, nowtz)
story['starred_timestamp'] = starred_date.strftime('%s')
story['read_status'] = 1
story['starred'] = True
story['intelligence'] = {
'feed': 1,
'author': 0,
'tags': 0,
'title': 0,
}
if story['story_hash'] in shared_stories:
story['shared'] = True
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
search_log = "~SN~FG(~SB%s~SN)" % query if query else ""
logging.user(request, "~FCLoading starred stories: ~SB%s stories %s" % (len(stories), search_log))
return {
"stories": stories,
"user_profiles": user_profiles,
'feeds': unsub_feeds.values() if version == 2 else unsub_feeds,
"message": message,
}
@json.json_view
def starred_story_hashes(request):
user = get_user(request)
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
mstories = MStarredStory.objects(
user_id=user.pk
).only('story_hash', 'starred_date').order_by('-starred_date')
if include_timestamps:
story_hashes = [(s.story_hash, s.starred_date.strftime("%s")) for s in mstories]
else:
story_hashes = [s.story_hash for s in mstories]
logging.user(request, "~FYLoading ~FCstarred story hashes~FY: %s story hashes" %
(len(story_hashes)))
return dict(starred_story_hashes=story_hashes)
def starred_stories_rss_feed(request, user_id, secret_token, tag_slug):
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
raise Http404
try:
tag_counts = MStarredStoryCounts.objects.get(user_id=user_id, slug=tag_slug)
except MStarredStoryCounts.MultipleObjectsReturned:
tag_counts = MStarredStoryCounts.objects(user_id=user_id, slug=tag_slug).first()
except MStarredStoryCounts.DoesNotExist:
raise Http404
data = {}
data['title'] = "Saved Stories - %s" % tag_counts.tag
data['link'] = "%s%s" % (
settings.NEWSBLUR_URL,
reverse('saved-stories-tag', kwargs=dict(tag_name=tag_slug)))
data['description'] = "Stories saved by %s on NewsBlur with the tag \"%s\"." % (user.username,
tag_counts.tag)
data['lastBuildDate'] = datetime.datetime.utcnow()
data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL
data['docs'] = None
data['author_name'] = user.username
data['feed_url'] = "%s%s" % (
settings.NEWSBLUR_URL,
reverse('starred-stories-rss-feed',
kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug)),
)
rss = feedgenerator.Atom1Feed(**data)
if not tag_counts.tag:
starred_stories = MStarredStory.objects(
user_id=user.pk
).order_by('-starred_date').limit(25)
else:
starred_stories = MStarredStory.objects(
user_id=user.pk,
user_tags__contains=tag_counts.tag
).order_by('-starred_date').limit(25)
for starred_story in starred_stories:
story_data = {
'title': starred_story.story_title,
'link': starred_story.story_permalink,
'description': (starred_story.story_content_z and
zlib.decompress(starred_story.story_content_z)),
'author_name': starred_story.story_author_name,
'categories': starred_story.story_tags,
'unique_id': starred_story.story_guid,
'pubdate': starred_story.starred_date,
}
rss.add_item(**story_data)
logging.user(request, "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" % (
user.username,
tag_counts.tag,
tag_counts.count,
request.META.get('HTTP_USER_AGENT', "")[:24]
))
return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml')
@json.json_view
def load_read_stories(request):
user = get_user(request)
offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 10))
page = int(request.REQUEST.get('page', 0))
order = request.REQUEST.get('order', 'newest')
query = request.REQUEST.get('query')
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
message = None
if page: offset = limit * (page - 1)
if query:
stories = []
message = "Not implemented yet."
# if user.profile.is_premium:
# stories = MStarredStory.find_stories(query, user.pk, offset=offset, limit=limit)
# else:
# stories = []
# message = "You must be a premium subscriber to search."
else:
story_hashes = RUserStory.get_read_stories(user.pk, offset=offset, limit=limit, order=order)
mstories = MStory.objects(story_hash__in=story_hashes)
stories = Feed.format_stories(mstories)
stories = sorted(stories, key=lambda story: story_hashes.index(story['story_hash']),
reverse=bool(order=="oldest"))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True)
story_hashes = [story['story_hash'] for story in stories]
story_feed_ids = list(set(s['story_feed_id'] for s in stories))
usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk')
usersub_ids = [us['feed__pk'] for us in usersub_ids]
unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids)))
unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids)
unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds]
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'shared_date', 'comments')
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
starred_stories = MStarredStory.objects(user_id=user.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'starred_date')
starred_stories = dict([(story.story_hash, story.starred_date)
for story in starred_stories])
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
story['read_status'] = 1
story['intelligence'] = {
'feed': 1,
'author': 0,
'tags': 0,
'title': 0,
}
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
if story['story_hash'] in shared_stories:
story['shared'] = True
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
search_log = "~SN~FG(~SB%s~SN)" % query if query else ""
logging.user(request, "~FCLoading read stories: ~SB%s stories %s" % (len(stories), search_log))
return {
"stories": stories,
"user_profiles": user_profiles,
"feeds": unsub_feeds,
"message": message,
}
@json.json_view
def load_river_stories__redis(request):
limit = 12
start = time.time()
user = get_user(request)
message = None
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feeds') if feed_id]
if not feed_ids:
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('f') if feed_id]
story_hashes = request.REQUEST.getlist('h')[:100]
original_feed_ids = list(feed_ids)
page = int(request.REQUEST.get('page', 1))
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'unread')
query = request.REQUEST.get('query')
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
usersubs = []
code = 1
user_search = None
offset = (page-1) * limit
limit = page * limit
story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-')
if story_hashes:
unread_feed_story_hashes = None
read_filter = 'unread'
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
elif query:
if user.profile.is_premium:
user_search = MUserSearch.get_user(user.pk)
user_search.touch_search_date()
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
read_filter='all')
feed_ids = [sub.feed_id for sub in usersubs]
stories = Feed.find_feed_stories(feed_ids, query, order=order, offset=offset, limit=limit)
mstories = stories
unread_feed_story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
read_filter="unread", order=order,
group_by_feed=False,
cutoff_date=user.profile.unread_cutoff)
else:
stories = []
mstories = []
message = "You must be a premium subscriber to search."
elif read_filter == 'starred':
mstories = MStarredStory.objects(
user_id=user.pk,
story_feed_id__in=feed_ids
).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
read_filter=read_filter)
all_feed_ids = [f for f in feed_ids]
feed_ids = [sub.feed_id for sub in usersubs]
if feed_ids:
params = {
"user_id": user.pk,
"feed_ids": feed_ids,
"all_feed_ids": all_feed_ids,
"offset": offset,
"limit": limit,
"order": order,
"read_filter": read_filter,
"usersubs": usersubs,
"cutoff_date": user.profile.unread_cutoff,
}
story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params)
else:
story_hashes = []
unread_feed_story_hashes = []
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)
if not usersubs:
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=found_feed_ids,
read_filter=read_filter)
trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained]
found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids))
# Find starred stories
if found_feed_ids:
if read_filter == 'starred':
starred_stories = mstories
else:
starred_stories = MStarredStory.objects(
user_id=user.pk,
story_feed_id__in=found_feed_ids
).only('story_hash', 'starred_date')
starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date,
user_tags=story.user_tags))
for story in starred_stories])
else:
starred_stories = {}
# Intelligence classifiers for all feeds involved
if found_trained_feed_ids:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids,
social_user_id=0))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
else:
classifier_feeds = []
classifier_authors = []
classifier_titles = []
classifier_tags = []
classifiers = sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids,
classifier_feeds=classifier_feeds,
classifier_authors=classifier_authors,
classifier_titles=classifier_titles,
classifier_tags=classifier_tags)
# Just need to format stories
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
if read_filter == 'starred':
story['read_status'] = 1
else:
story['read_status'] = 0
if read_filter == 'all' or query:
if (unread_feed_story_hashes is not None and
story['story_hash'] not in unread_feed_story_hashes):
story['read_status'] = 1
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['user_tags'] = starred_stories[story['story_hash']]['user_tags']
story['intelligence'] = {
'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']),
'author': apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story),
}
story['score'] = UserSubscription.score_story(story['intelligence'])
if not user.profile.is_premium:
message = "The full River of News is a premium feature."
code = 0
# if page > 1:
# stories = []
# else:
# stories = stories[:5]
diff = time.time() - start
timediff = round(float(diff), 2)
logging.user(request, "~FYLoading ~FCriver stories~FY: ~SBp%s~SN (%s/%s "
"stories, ~SN%s/%s/%s feeds, %s/%s)" %
(page, len(stories), len(mstories), len(found_feed_ids),
len(feed_ids), len(original_feed_ids), order, read_filter))
if not include_hidden:
hidden_stories_removed = 0
new_stories = []
for story in stories:
if story['score'] >= 0:
new_stories.append(story)
else:
hidden_stories_removed += 1
stories = new_stories
# if page <= 1:
# import random
# time.sleep(random.randint(0, 6))
data = dict(code=code,
message=message,
stories=stories,
classifiers=classifiers,
elapsed_time=timediff,
user_search=user_search,
user_profiles=user_profiles)
if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed
return data
@json.json_view
def unread_story_hashes__old(request):
user = get_user(request)
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feed_id') if feed_id]
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
usersubs = {}
if not feed_ids:
usersubs = UserSubscription.objects.filter(Q(unread_count_neutral__gt=0) |
Q(unread_count_positive__gt=0),
user=user, active=True)
feed_ids = [sub.feed_id for sub in usersubs]
else:
usersubs = UserSubscription.objects.filter(Q(unread_count_neutral__gt=0) |
Q(unread_count_positive__gt=0),
user=user, active=True, feed__in=feed_ids)
unread_feed_story_hashes = {}
story_hash_count = 0
usersubs = dict((sub.feed_id, sub) for sub in usersubs)
for feed_id in feed_ids:
if feed_id in usersubs:
us = usersubs[feed_id]
else:
continue
if not us.unread_count_neutral and not us.unread_count_positive:
continue
unread_feed_story_hashes[feed_id] = us.get_stories(read_filter='unread', limit=500,
withscores=include_timestamps,
hashes_only=True,
default_cutoff_date=user.profile.unread_cutoff)
story_hash_count += len(unread_feed_story_hashes[feed_id])
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
(len(feed_ids), len(story_hash_count)))
return dict(unread_feed_story_hashes=unread_feed_story_hashes)
@json.json_view
def unread_story_hashes(request):
user = get_user(request)
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feed_id') if feed_id]
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'unread')
story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
order=order, read_filter=read_filter,
include_timestamps=include_timestamps,
cutoff_date=user.profile.unread_cutoff)
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
(len(feed_ids), len(story_hashes)))
return dict(unread_feed_story_hashes=story_hashes)
@ajax_login_required
@json.json_view
def mark_all_as_read(request):
code = 1
try:
days = int(request.REQUEST.get('days', 0))
except ValueError:
return dict(code=-1, message="Days parameter must be an integer, not: %s" %
request.REQUEST.get('days'))
read_date = datetime.datetime.utcnow() - datetime.timedelta(days=days)
feeds = UserSubscription.objects.filter(user=request.user)
socialsubs = MSocialSubscription.objects.filter(user_id=request.user.pk)
for subtype in [feeds, socialsubs]:
for sub in subtype:
if days == 0:
sub.mark_feed_read()
else:
if sub.mark_read_date < read_date:
sub.needs_unread_recalc = True
sub.mark_read_date = read_date
sub.save()
logging.user(request, "~FMMarking all as read: ~SB%s days" % (days,))
return dict(code=code)
@ajax_login_required
@json.json_view
def mark_story_as_read(request):
story_ids = request.REQUEST.getlist('story_id')
try:
feed_id = int(get_argument_or_404(request, 'feed_id'))
except ValueError:
return dict(code=-1, errors=["You must pass a valid feed_id: %s" %
request.REQUEST.get('feed_id')])
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if duplicate_feed:
feed_id = duplicate_feed[0].feed_id
try:
usersub = UserSubscription.objects.get(user=request.user,
feed=duplicate_feed[0].feed)
except (Feed.DoesNotExist):
return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id])
else:
return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id])
except UserSubscription.DoesNotExist:
usersub = None
if usersub:
data = usersub.mark_story_ids_as_read(story_ids, request=request)
else:
data = dict(code=-1, errors=["User is not subscribed to this feed."])
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
return data
@ajax_login_required
@json.json_view
def mark_story_hashes_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
story_hashes = request.REQUEST.getlist('story_hash')
feed_ids, friend_ids = RUserStory.mark_story_hashes_read(request.user.pk, story_hashes)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(
user_id=request.user.pk,
subscription_user_id__in=friend_ids)
for socialsub in socialsubs:
if not socialsub.needs_unread_recalc:
socialsub.needs_unread_recalc = True
socialsub.save()
r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id)
# Also count on original subscription
for feed_id in feed_ids:
usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
r.publish(request.user.username, 'feed:%s' % feed_id)
hash_count = len(story_hashes)
logging.user(request, "~FYRead %s %s in feed/socialsubs: %s/%s" % (
hash_count, 'story' if hash_count == 1 else 'stories', feed_ids, friend_ids))
return dict(code=1, story_hashes=story_hashes,
feed_ids=feed_ids, friend_user_ids=friend_ids)
@ajax_login_required
@json.json_view
def mark_feed_stories_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
feeds_stories = request.REQUEST.get('feeds_stories', "{}")
feeds_stories = json.decode(feeds_stories)
data = {
'code': -1,
'message': 'Nothing was marked as read'
}
for feed_id, story_ids in feeds_stories.items():
try:
feed_id = int(feed_id)
except ValueError:
continue
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
data = usersub.mark_story_ids_as_read(story_ids, request=request)
except UserSubscription.DoesNotExist:
return dict(code=-1, error="You are not subscribed to this feed_id: %d" % feed_id)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
try:
if not duplicate_feed: raise Feed.DoesNotExist
usersub = UserSubscription.objects.get(user=request.user,
feed=duplicate_feed[0].feed)
data = usersub.mark_story_ids_as_read(story_ids, request=request)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
return dict(code=-1, error="No feed exists for feed_id: %d" % feed_id)
r.publish(request.user.username, 'feed:%s' % feed_id)
return data
@ajax_login_required
@json.json_view
def mark_social_stories_as_read(request):
code = 1
errors = []
data = {}
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
users_feeds_stories = request.REQUEST.get('users_feeds_stories', "{}")
users_feeds_stories = json.decode(users_feeds_stories)
for social_user_id, feeds in users_feeds_stories.items():
for feed_id, story_ids in feeds.items():
feed_id = int(feed_id)
try:
socialsub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=social_user_id)
data = socialsub.mark_story_ids_as_read(story_ids, feed_id, request=request)
except OperationError, e:
code = -1
errors.append("Already read story: %s" % e)
except MSocialSubscription.DoesNotExist:
MSocialSubscription.mark_unsub_story_ids_as_read(request.user.pk, social_user_id,
story_ids, feed_id,
request=request)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if duplicate_feed:
try:
socialsub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=social_user_id)
data = socialsub.mark_story_ids_as_read(story_ids, duplicate_feed[0].feed.pk, request=request)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
code = -1
errors.append("No feed exists for feed_id %d." % feed_id)
else:
continue
r.publish(request.user.username, 'feed:%s' % feed_id)
r.publish(request.user.username, 'social:%s' % social_user_id)
data.update(code=code, errors=errors)
return data
@required_params('story_id', feed_id=int)
@ajax_login_required
@json.json_view
def mark_story_as_unread(request):
story_id = request.REQUEST.get('story_id', None)
feed_id = int(request.REQUEST.get('feed_id', 0))
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
feed = usersub.feed
except UserSubscription.DoesNotExist:
usersub = None
feed = Feed.get_by_id(feed_id)
if usersub and not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
data = dict(code=0, payload=dict(story_id=story_id))
story, found_original = MStory.find_story(feed_id, story_id)
if not story:
logging.user(request, "~FY~SBUnread~SN story in feed: %s (NOT FOUND)" % (feed))
return dict(code=-1, message="Story not found.")
if usersub:
data = usersub.invert_read_stories_after_unread_story(story, request)
message = RUserStory.story_can_be_marked_read_by_user(story, request.user)
if message:
data['code'] = -1
data['message'] = message
return data
social_subs = MSocialSubscription.mark_dirty_sharing_story(user_id=request.user.pk,
story_feed_id=feed_id,
story_guid_hash=story.guid_hash)
dirty_count = social_subs and social_subs.count()
dirty_count = ("(%s social_subs)" % dirty_count) if dirty_count else ""
RUserStory.mark_story_hash_unread(user_id=request.user.pk, story_hash=story.story_hash)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
logging.user(request, "~FY~SBUnread~SN story in feed: %s %s" % (feed, dirty_count))
return data
@ajax_login_required
@json.json_view
@required_params('story_hash')
def mark_story_hash_as_unread(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
story_hash = request.REQUEST.get('story_hash')
feed_id, _ = MStory.split_story_hash(story_hash)
story, _ = MStory.find_story(feed_id, story_hash)
if not story:
data = dict(code=-1, message="That story has been removed from the feed, no need to mark it unread.")
return data
message = RUserStory.story_can_be_marked_read_by_user(story, request.user)
if message:
data = dict(code=-1, message=message)
return data
# Also count on original subscription
usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
data = usersub.invert_read_stories_after_unread_story(story, request)
r.publish(request.user.username, 'feed:%s' % feed_id)
feed_id, friend_ids = RUserStory.mark_story_hash_unread(request.user.pk, story_hash)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(
user_id=request.user.pk,
subscription_user_id__in=friend_ids)
for socialsub in socialsubs:
if not socialsub.needs_unread_recalc:
socialsub.needs_unread_recalc = True
socialsub.save()
r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id)
logging.user(request, "~FYUnread story in feed/socialsubs: %s/%s" % (feed_id, friend_ids))
return dict(code=1, story_hash=story_hash, feed_id=feed_id, friend_user_ids=friend_ids)
@ajax_login_required
@json.json_view
def mark_feed_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
feed_ids = request.REQUEST.getlist('feed_id')
cutoff_timestamp = int(request.REQUEST.get('cutoff_timestamp', 0))
direction = request.REQUEST.get('direction', 'older')
multiple = len(feed_ids) > 1
code = 1
errors = []
cutoff_date = datetime.datetime.fromtimestamp(cutoff_timestamp) if cutoff_timestamp else None
for feed_id in feed_ids:
if 'social:' in feed_id:
user_id = int(feed_id.replace('social:', ''))
try:
sub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=user_id)
except MSocialSubscription.DoesNotExist:
logging.user(request, "~FRCouldn't find socialsub: %s" % user_id)
continue
if not multiple:
sub_user = User.objects.get(pk=sub.subscription_user_id)
logging.user(request, "~FMMarking social feed as read: ~SB%s" % (sub_user.username,))
else:
try:
feed = Feed.objects.get(id=feed_id)
sub = UserSubscription.objects.get(feed=feed, user=request.user)
if not multiple:
logging.user(request, "~FMMarking feed as read: ~SB%s" % (feed,))
except (Feed.DoesNotExist, UserSubscription.DoesNotExist), e:
errors.append("User not subscribed: %s" % e)
continue
except (ValueError), e:
errors.append("Invalid feed_id: %s" % e)
continue
if not sub:
errors.append("User not subscribed: %s" % feed_id)
continue
try:
if direction == "older":
marked_read = sub.mark_feed_read(cutoff_date=cutoff_date)
else:
marked_read = sub.mark_newer_stories_read(cutoff_date=cutoff_date)
if marked_read and not multiple:
r.publish(request.user.username, 'feed:%s' % feed_id)
except IntegrityError, e:
errors.append("Could not mark feed as read: %s" % e)
code = -1
if multiple:
logging.user(request, "~FMMarking ~SB%s~SN feeds as read" % len(feed_ids))
r.publish(request.user.username, 'refresh:%s' % ','.join(feed_ids))
if errors:
logging.user(request, "~FMMarking read had errors: ~FR%s" % errors)
return dict(code=code, errors=errors, cutoff_date=cutoff_date, direction=direction)
def _parse_user_info(user):
return {
'user_info': {
'is_anonymous': json.encode(user.is_anonymous()),
'is_authenticated': json.encode(user.is_authenticated()),
'username': json.encode(user.username if user.is_authenticated() else 'Anonymous')
}
}
@ajax_login_required
@json.json_view
def add_url(request):
code = 0
url = request.POST['url']
folder = request.POST.get('folder', '')
new_folder = request.POST.get('new_folder')
auto_active = is_true(request.POST.get('auto_active', 1))
skip_fetch = is_true(request.POST.get('skip_fetch', False))
feed = None
if not url:
code = -1
message = 'Enter in the website address or the feed URL.'
elif any([(banned_url in url) for banned_url in BANNED_URLS]):
code = -1
message = "The publisher of this website has banned NewsBlur."
else:
if new_folder:
usf, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user)
usf.add_folder(folder, new_folder)
folder = new_folder
code, message, us = UserSubscription.add_subscription(user=request.user, feed_address=url,
folder=folder, auto_active=auto_active,
skip_fetch=skip_fetch)
feed = us and us.feed
if feed:
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:%s' % feed.pk)
MUserSearch.schedule_index_feeds_for_search(feed.pk, request.user.pk)
return dict(code=code, message=message, feed=feed)
@ajax_login_required
@json.json_view
def add_folder(request):
folder = request.POST['folder']
parent_folder = request.POST.get('parent_folder', '')
folders = None
logging.user(request, "~FRAdding Folder: ~SB%s (in %s)" % (folder, parent_folder))
if folder:
code = 1
message = ""
user_sub_folders_object, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user)
user_sub_folders_object.add_folder(parent_folder, folder)
folders = json.decode(user_sub_folders_object.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
else:
code = -1
message = "Gotta write in a folder name."
return dict(code=code, message=message, folders=folders)
@ajax_login_required
@json.json_view
def delete_feed(request):
feed_id = int(request.POST['feed_id'])
in_folder = request.POST.get('in_folder', None)
if not in_folder or in_folder == ' ':
in_folder = ""
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feed(feed_id, in_folder)
feed = Feed.objects.filter(pk=feed_id)
if feed:
feed[0].count_subscribers()
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, message="Removed %s from '%s'." % (feed, in_folder))
@ajax_login_required
@json.json_view
def delete_feed_by_url(request):
message = ""
code = 0
url = request.POST['url']
in_folder = request.POST.get('in_folder', '')
if in_folder == ' ':
in_folder = ""
feed = Feed.get_feed_from_url(url, create=False)
if feed:
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feed(feed.pk, in_folder)
code = 1
feed = Feed.objects.filter(pk=feed.pk)
if feed:
feed[0].count_subscribers()
else:
code = -1
message = "URL not found."
return dict(code=code, message=message)
@ajax_login_required
@json.json_view
def delete_folder(request):
folder_to_delete = request.POST.get('folder_name') or request.POST.get('folder_to_delete')
in_folder = request.POST.get('in_folder', None)
feed_ids_in_folder = [int(f) for f in request.REQUEST.getlist('feed_id') if f]
request.user.profile.send_opml_export_email(reason="You have deleted an entire folder of feeds, so here's a backup just in case.")
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_folder(folder_to_delete, in_folder, feed_ids_in_folder)
folders = json.decode(user_sub_folders.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=folders)
@required_params('feeds_by_folder')
@ajax_login_required
@json.json_view
def delete_feeds_by_folder(request):
feeds_by_folder = json.decode(request.POST['feeds_by_folder'])
request.user.profile.send_opml_export_email(reason="You have deleted a number of feeds at once, so here's a backup just in case.")
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feeds_by_folder(feeds_by_folder)
folders = json.decode(user_sub_folders.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=folders)
@ajax_login_required
@json.json_view
def rename_feed(request):
feed = get_object_or_404(Feed, pk=int(request.POST['feed_id']))
user_sub = UserSubscription.objects.get(user=request.user, feed=feed)
feed_title = request.POST['feed_title']
logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % (
feed.feed_title, feed_title))
user_sub.user_title = feed_title
user_sub.save()
return dict(code=1)
@ajax_login_required
@json.json_view
def rename_folder(request):
folder_to_rename = request.POST.get('folder_name') or request.POST.get('folder_to_rename')
new_folder_name = request.POST['new_folder_name']
in_folder = request.POST.get('in_folder', '')
code = 0
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# renames all, but only in the same folder parent. But nobody should be doing that, right?
if folder_to_rename and new_folder_name:
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.rename_folder(folder_to_rename, new_folder_name, in_folder)
code = 1
else:
code = -1
return dict(code=code)
@ajax_login_required
@json.json_view
def move_feed_to_folders(request):
feed_id = int(request.POST['feed_id'])
in_folders = request.POST.getlist('in_folders', '')
to_folders = request.POST.getlist('to_folders', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_feed_to_folders(feed_id, in_folders=in_folders,
to_folders=to_folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@ajax_login_required
@json.json_view
def move_feed_to_folder(request):
feed_id = int(request.POST['feed_id'])
in_folder = request.POST.get('in_folder', '')
to_folder = request.POST.get('to_folder', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_feed_to_folder(feed_id, in_folder=in_folder,
to_folder=to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@ajax_login_required
@json.json_view
def move_folder_to_folder(request):
folder_name = request.POST['folder_name']
in_folder = request.POST.get('in_folder', '')
to_folder = request.POST.get('to_folder', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_folder_to_folder(folder_name, in_folder=in_folder, to_folder=to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@required_params('feeds_by_folder', 'to_folder')
@ajax_login_required
@json.json_view
def move_feeds_by_folder_to_folder(request):
feeds_by_folder = json.decode(request.POST['feeds_by_folder'])
to_folder = request.POST['to_folder']
new_folder = request.POST.get('new_folder', None)
request.user.profile.send_opml_export_email(reason="You have moved a number of feeds at once, so here's a backup just in case.")
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
if new_folder:
user_sub_folders.add_folder(to_folder, new_folder)
to_folder = new_folder
user_sub_folders = user_sub_folders.move_feeds_by_folder_to_folder(feeds_by_folder, to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@login_required
def add_feature(request):
if not request.user.is_staff:
return HttpResponseForbidden()
code = -1
form = FeatureForm(request.POST)
if form.is_valid():
form.save()
code = 1
return HttpResponseRedirect(reverse('index'))
return dict(code=code)
@json.json_view
def load_features(request):
user = get_user(request)
page = max(int(request.REQUEST.get('page', 0)), 0)
logging.user(request, "~FBBrowse features: ~SBPage #%s" % (page+1))
features = Feature.objects.all()[page*3:(page+1)*3+1].values()
features = [{
'description': f['description'],
'date': localtime_for_timezone(f['date'], user.profile.timezone).strftime("%b %d, %Y")
} for f in features]
return features
@ajax_login_required
@json.json_view
def save_feed_order(request):
folders = request.POST.get('folders')
if folders:
# Test that folders can be JSON decoded
folders_list = json.decode(folders)
assert folders_list is not None
logging.user(request, "~FBFeed re-ordering: ~SB%s folders/feeds" % (len(folders_list)))
user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user)
user_sub_folders.folders = folders
user_sub_folders.save()
return {}
@json.json_view
def feeds_trainer(request):
classifiers = []
feed_id = request.REQUEST.get('feed_id')
user = get_user(request)
usersubs = UserSubscription.objects.filter(user=user, active=True)
if feed_id:
feed = get_object_or_404(Feed, pk=feed_id)
usersubs = usersubs.filter(feed=feed)
usersubs = usersubs.select_related('feed').order_by('-feed__stories_last_month')
for us in usersubs:
if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id:
classifier = dict()
classifier['classifiers'] = get_classifiers_for_user(user, feed_id=us.feed.pk)
classifier['feed_id'] = us.feed_id
classifier['stories_last_month'] = us.feed.stories_last_month
classifier['num_subscribers'] = us.feed.num_subscribers
classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else []
classifier['feed_authors'] = json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else []
classifiers.append(classifier)
user.profile.has_trained_intelligence = True
user.profile.save()
logging.user(user, "~FGLoading Trainer: ~SB%s feeds" % (len(classifiers)))
return classifiers
@ajax_login_required
@json.json_view
def save_feed_chooser(request):
is_premium = request.user.profile.is_premium
approved_feeds = [int(feed_id) for feed_id in request.POST.getlist('approved_feeds') if feed_id]
if not is_premium:
approved_feeds = approved_feeds[:64]
activated = 0
usersubs = UserSubscription.objects.filter(user=request.user)
for sub in usersubs:
try:
if sub.feed_id in approved_feeds:
activated += 1
if not sub.active:
sub.active = True
sub.save()
if sub.feed.active_subscribers <= 0:
sub.feed.count_subscribers()
elif sub.active:
sub.active = False
sub.save()
except Feed.DoesNotExist:
pass
request.user.profile.queue_new_feeds()
request.user.profile.refresh_stale_feeds(exclude_new=True)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
logging.user(request, "~BB~FW~SBFeed chooser: ~FC%s~SN/~SB%s" % (
activated,
usersubs.count()
))
return {'activated': activated}
@ajax_login_required
def retrain_all_sites(request):
for sub in UserSubscription.objects.filter(user=request.user):
sub.is_trained = False
sub.save()
return feeds_trainer(request)
@login_required
def activate_premium_account(request):
try:
usersubs = UserSubscription.objects.select_related('feed').filter(user=request.user)
for sub in usersubs:
sub.active = True
sub.save()
if sub.feed.premium_subscribers <= 0:
sub.feed.count_subscribers()
sub.feed.schedule_feed_fetch_immediately()
except Exception, e:
subject = "Premium activation failed"
message = "%s -- %s\n\n%s" % (request.user, usersubs, e)
mail_admins(subject, message, fail_silently=True)
request.user.profile.is_premium = True
request.user.profile.save()
return HttpResponseRedirect(reverse('index'))
@login_required
def login_as(request):
if not request.user.is_staff:
logging.user(request, "~SKNON-STAFF LOGGING IN AS ANOTHER USER!")
assert False
return HttpResponseForbidden()
username = request.GET['user']
user = get_object_or_404(User, username__iexact=username)
user.backend = settings.AUTHENTICATION_BACKENDS[0]
login_user(request, user)
return HttpResponseRedirect(reverse('index'))
def iframe_buster(request):
logging.user(request, "~FB~SBiFrame bust!")
return HttpResponse(status=204)
@required_params('story_id', feed_id=int)
@ajax_login_required
@json.json_view
def mark_story_as_starred(request):
return _mark_story_as_starred(request)
@required_params('story_hash')
@ajax_login_required
@json.json_view
def mark_story_hash_as_starred(request):
return _mark_story_as_starred(request)
def _mark_story_as_starred(request):
code = 1
feed_id = int(request.REQUEST.get('feed_id', 0))
story_id = request.REQUEST.get('story_id', None)
story_hash = request.REQUEST.get('story_hash', None)
user_tags = request.REQUEST.getlist('user_tags')
message = ""
if story_hash:
story, _ = MStory.find_story(story_hash=story_hash)
feed_id = story and story.story_feed_id
else:
story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id)
if not story:
return {'code': -1, 'message': "Could not find story to save."}
story_db = dict([(k, v) for k, v in story._data.items()
if k is not None and v is not None])
story_db.pop('user_id', None)
story_db.pop('starred_date', None)
story_db.pop('id', None)
story_db.pop('user_tags', None)
now = datetime.datetime.now()
story_values = dict(starred_date=now, user_tags=user_tags, **story_db)
params = dict(story_guid=story.story_guid, user_id=request.user.pk)
starred_story = MStarredStory.objects(**params).limit(1)
created = False
removed_user_tags = []
if not starred_story:
params.update(story_values)
starred_story = MStarredStory.objects.create(**params)
created = True
MActivity.new_starred_story(user_id=request.user.pk,
story_title=story.story_title,
story_feed_id=feed_id,
story_id=starred_story.story_guid)
new_user_tags = user_tags
MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=1)
else:
starred_story = starred_story[0]
new_user_tags = list(set(user_tags) - set(starred_story.user_tags or []))
removed_user_tags = list(set(starred_story.user_tags or []) - set(user_tags))
starred_story.user_tags = user_tags
starred_story.save()
for tag in new_user_tags:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=1)
for tag in removed_user_tags:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1)
if random.random() < 0.01:
MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk)
MStarredStoryCounts.count_for_user(request.user.pk, total_only=True)
starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=request.user.pk).count()
if created:
logging.user(request, "~FCStarring: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags))
else:
logging.user(request, "~FCUpdating starred:~SN~FC ~SB%s~SN (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags))
return {'code': code, 'message': message, 'starred_count': starred_count, 'starred_counts': starred_counts}
@required_params('story_id')
@ajax_login_required
@json.json_view
def mark_story_as_unstarred(request):
return _mark_story_as_unstarred(request)
@required_params('story_hash')
@ajax_login_required
@json.json_view
def mark_story_hash_as_unstarred(request):
return _mark_story_as_unstarred(request)
def _mark_story_as_unstarred(request):
code = 1
story_id = request.POST.get('story_id', None)
story_hash = request.REQUEST.get('story_hash', None)
starred_counts = None
starred_story = None
if story_id:
starred_story = MStarredStory.objects(user_id=request.user.pk, story_guid=story_id)
if not story_id or not starred_story:
starred_story = MStarredStory.objects(user_id=request.user.pk, story_hash=story_hash or story_id)
if starred_story:
starred_story = starred_story[0]
logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story.story_title[:50]))
user_tags = starred_story.user_tags
feed_id = starred_story.story_feed_id
MActivity.remove_starred_story(user_id=request.user.pk,
story_feed_id=starred_story.story_feed_id,
story_id=starred_story.story_guid)
starred_story.user_id = 0
try:
starred_story.save()
except NotUniqueError:
starred_story.delete()
MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=-1)
for tag in user_tags:
try:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1)
except MStarredStoryCounts.DoesNotExist:
pass
# MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk)
MStarredStoryCounts.count_for_user(request.user.pk, total_only=True)
starred_counts = MStarredStoryCounts.user_counts(request.user.pk)
else:
code = -1
return {'code': code, 'starred_counts': starred_counts}
@ajax_login_required
@json.json_view
def send_story_email(request):
code = 1
message = 'OK'
story_id = request.POST['story_id']
feed_id = request.POST['feed_id']
to_addresses = request.POST.get('to', '').replace(',', ' ').replace(' ', ' ').strip().split(' ')
from_name = request.POST['from_name']
from_email = request.POST['from_email']
email_cc = is_true(request.POST.get('email_cc', 'true'))
comments = request.POST['comments']
comments = comments[:2048] # Separated due to PyLint
from_address = 'share@newsblur.com'
share_user_profile = MSocialProfile.get_user(request.user.pk)
if not to_addresses:
code = -1
message = 'Please provide at least one email address.'
elif not all(email_re.match(to_address) for to_address in to_addresses if to_addresses):
code = -1
message = 'You need to send the email to a valid email address.'
elif not email_re.match(from_email):
code = -1
message = 'You need to provide your email address.'
elif not from_name:
code = -1
message = 'You need to provide your name.'
else:
story, _ = MStory.find_story(feed_id, story_id)
story = Feed.format_story(story, feed_id, text=True)
feed = Feed.get_by_id(story['story_feed_id'])
params = {
"to_addresses": to_addresses,
"from_name": from_name,
"from_email": from_email,
"email_cc": email_cc,
"comments": comments,
"from_address": from_address,
"story": story,
"feed": feed,
"share_user_profile": share_user_profile,
}
text = render_to_string('mail/email_story.txt', params)
html = render_to_string('mail/email_story.xhtml', params)
subject = '%s' % (story['story_title'])
cc = None
if email_cc:
cc = ['%s <%s>' % (from_name, from_email)]
subject = subject.replace('\n', ' ')
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % from_address,
to=to_addresses,
cc=cc,
headers={'Reply-To': '%s <%s>' % (from_name, from_email)})
msg.attach_alternative(html, "text/html")
try:
msg.send()
except boto.ses.connection.ResponseError, e:
code = -1
message = "Email error: %s" % str(e)
logging.user(request, '~BMSharing story by email to %s recipient%s: ~FY~SB%s~SN~BM~FY/~SB%s' %
(len(to_addresses), '' if len(to_addresses) == 1 else 's',
story['story_title'][:50], feed and feed.feed_title[:50]))
return {'code': code, 'message': message}
@json.json_view
def load_tutorial(request):
if request.REQUEST.get('finished'):
logging.user(request, '~BY~FW~SBFinishing Tutorial')
return {}
else:
newsblur_feed = Feed.objects.filter(feed_address__icontains='blog.newsblur.com').order_by('-pk')[0]
logging.user(request, '~BY~FW~SBLoading Tutorial')
return {
'newsblur_feed': newsblur_feed.canonical()
}
|
slava-sh/NewsBlur
|
apps/reader/views.py
|
Python
|
mit
| 95,205 |
// nodejs按行读取文件流
var Stream = require('stream').Stream,
util = require('util');
var LineStream = function() {
this.writable = true;
this.readable = true;
this.buffer = '';
};
util.inherits(LineStream, Stream);
LineStream.prototype.write = function(data, encoding) {
if (Buffer.isBuffer(data)) {
data = data.toString(encoding || 'utf8');
}
var parts = data.split(/\n/g);
var len = parts.length;
for (var i = 0; i < len; i++) {
this.emit('data', parts[i]+'\n');
}
};
LineStream.prototype.end = function() {
if(this.buffer.length > 0){
this.emit('data',this.buffer);
this.buffer = '';
}
this.emit('end');
};
module.exports = LineStream;
|
SBFE/js-combine-pack
|
lib/tool/lineStream.js
|
JavaScript
|
mit
| 680 |
using LibrarySystem.Models;
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.EntityFramework;
using System;
using System.Collections.Generic;
using System.Linq;
namespace LibrarySystem.Account
{
public partial class Manage : System.Web.UI.Page
{
protected string SuccessMessage
{
get;
private set;
}
protected bool CanRemoveExternalLogins
{
get;
private set;
}
protected void Page_Load()
{
if (!IsPostBack)
{
// Determine the sections to render
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
if (manager.HasLocalLogin(User.Identity.GetUserId()))
{
changePasswordHolder.Visible = true;
}
else
{
setPassword.Visible = true;
changePasswordHolder.Visible = false;
}
CanRemoveExternalLogins = manager.GetLogins(User.Identity.GetUserId()).Count() > 1;
// Render success message
var message = Request.QueryString["m"];
if (message != null)
{
// Strip the query string from action
Form.Action = ResolveUrl("~/Account/Manage");
SuccessMessage =
message == "ChangePwdSuccess" ? "Your password has been changed."
: message == "SetPwdSuccess" ? "Your password has been set."
: message == "RemoveLoginSuccess" ? "The account was removed."
: String.Empty;
successMessage.Visible = !String.IsNullOrEmpty(SuccessMessage);
}
}
}
protected void ChangePassword_Click(object sender, EventArgs e)
{
if (IsValid)
{
IPasswordManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Passwords;
IdentityResult result = manager.ChangePassword(User.Identity.GetUserName(), CurrentPassword.Text, NewPassword.Text);
if (result.Success)
{
Response.Redirect("~/Account/Manage?m=ChangePwdSuccess");
}
else
{
AddErrors(result);
}
}
}
protected void SetPassword_Click(object sender, EventArgs e)
{
if (IsValid)
{
// Create the local login info and link the local account to the user
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
IdentityResult result = manager.AddLocalLogin(User.Identity.GetUserId(), User.Identity.GetUserName(), password.Text);
if (result.Success)
{
Response.Redirect("~/Account/Manage?m=SetPwdSuccess");
}
else
{
AddErrors(result);
}
}
}
public IEnumerable<IUserLogin> GetLogins()
{
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
var accounts = manager.GetLogins(User.Identity.GetUserId());
CanRemoveExternalLogins = accounts.Count() > 1;
return accounts;
}
public void RemoveLogin(string loginProvider, string providerKey)
{
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
var result = manager.RemoveLogin(User.Identity.GetUserId(), loginProvider, providerKey);
var msg = result.Success
? "?m=RemoveLoginSuccess"
: String.Empty;
Response.Redirect("~/Account/Manage" + msg);
}
private void AddErrors(IdentityResult result) {
foreach (var error in result.Errors) {
ModelState.AddModelError("", error);
}
}
}
}
|
krasimirkrustev/ta-library-system
|
LibrarySystem/LibrarySystem/Account/Manage.aspx.cs
|
C#
|
mit
| 4,298 |
__author__ = "Guillaume"
__license__ = "MIT"
__copyright__ = "2015, ESRF"
import numpy
from freesas.model import SASModel
class Grid:
"""
This class is used to create a grid which include all the input models
"""
def __init__(self, inputfiles):
"""
:param inputfiles: list of pdb files needed for averaging
"""
self.inputs = inputfiles
self.size = []
self.nbknots = None
self.radius = None
self.coordknots = []
def __repr__(self):
return "Grid with %i knots"%self.nbknots
def spatial_extent(self):
"""
Calculate the maximal extent of input models
:return self.size: 6-list with x,y,z max and then x,y,z min
"""
atoms = []
models_fineness = []
for files in self.inputs:
m = SASModel(files)
if len(atoms)==0:
atoms = m.atoms
else:
atoms = numpy.append(atoms, m.atoms, axis=0)
models_fineness.append(m.fineness)
mean_fineness = sum(models_fineness) / len(models_fineness)
coordmin = atoms.min(axis=0) - mean_fineness
coordmax = atoms.max(axis=0) + mean_fineness
self.size = [coordmax[0],coordmax[1],coordmax[2],coordmin[0],coordmin[1],coordmin[2]]
return self.size
def calc_radius(self, nbknots=None):
"""
Calculate the radius of each point of a hexagonal close-packed grid,
knowing the total volume and the number of knots in this grid.
:param nbknots: number of knots wanted for the grid
:return radius: the radius of each knot of the grid
"""
if len(self.size)==0:
self.spatial_extent()
nbknots = nbknots if nbknots is not None else 5000
size = self.size
dx = size[0] - size[3]
dy = size[1] - size[4]
dz = size[2] - size[5]
volume = dx * dy * dz
density = numpy.pi / (3*2**0.5)
radius = ((3 /( 4 * numpy.pi)) * density * volume / nbknots)**(1.0/3)
self.radius = radius
return radius
def make_grid(self):
"""
Create a grid using the maximal size and the radius previously computed.
The geometry used is a face-centered cubic lattice (fcc).
:return knots: 2d-array, coordinates of each dot of the grid. Saved as self.coordknots.
"""
if len(self.size)==0:
self.spatial_extent()
if self.radius is None:
self.calc_radius()
radius = self.radius
a = numpy.sqrt(2.0)*radius
xmax = self.size[0]
xmin = self.size[3]
ymax = self.size[1]
ymin = self.size[4]
zmax = self.size[2]
zmin = self.size[5]
x = 0.0
y = 0.0
z = 0.0
xlist = []
ylist = []
zlist = []
knots = numpy.empty((1,4), dtype="float")
while (zmin + z) <= zmax:
zlist.append(z)
z += a
while (ymin + y) <= ymax:
ylist.append(y)
y += a
while (xmin + x) <= xmax:
xlist.append(x)
x += a
for i in range(len(zlist)):
z = zlist[i]
if i % 2 ==0:
for j in range(len(xlist)):
x = xlist[j]
if j % 2 == 0:
for y in ylist[0:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for y in ylist[1:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for j in range(len(xlist)):
x = xlist[j]
if j % 2 == 0:
for y in ylist[1:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for y in ylist[0:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
knots = numpy.delete(knots, 0, axis=0)
self.nbknots = knots.shape[0]
self.coordknots = knots
return knots
class AverModels():
"""
Provides tools to create an averaged models using several aligned dummy atom models
"""
def __init__(self, inputfiles, grid):
"""
:param inputfiles: list of pdb files of aligned models
:param grid: 2d-array coordinates of each point of a grid, fourth column full of zeros
"""
self.inputfiles = inputfiles
self.models = []
self.header = []
self.radius = None
self.atoms = []
self.grid = grid
def __repr__(self):
return "Average SAS model with %i atoms"%len(self.atoms)
def read_files(self, reference=None):
"""
Read all the pdb file in the inputfiles list, creating SASModels.
The SASModels created are save in a list, the reference model is the first model in the list.
:param reference: position of the reference model file in the inputfiles list
"""
ref = reference if reference is not None else 0
inputfiles = self.inputfiles
models = []
models.append(SASModel(inputfiles[ref]))
for i in range(len(inputfiles)):
if i==ref:
continue
else:
models.append(SASModel(inputfiles[i]))
self.models = models
return models
def calc_occupancy(self, griddot):
"""
Assign an occupancy and a contribution factor to the point of the grid.
:param griddot: 1d-array, coordinates of a point of the grid
:return tuple: 2-tuple containing (occupancy, contribution)
"""
occ = 0.0
contrib = 0
for model in self.models:
f = model.fineness
for i in range(model.atoms.shape[0]):
dx = model.atoms[i, 0] - griddot[0]
dy = model.atoms[i, 1] - griddot[1]
dz = model.atoms[i, 2] - griddot[2]
dist = dx * dx + dy * dy + dz * dz
add = max(1 - (dist / f), 0)
if add != 0:
contrib += 1
occ += add
return occ, contrib
def assign_occupancy(self):
"""
For each point of the grid, total occupancy and contribution factor are computed and saved.
The grid is then ordered with decreasing value of occupancy.
The fourth column of the array correspond to the occupancy of the point and the fifth to
the contribution for this point.
:return sortedgrid: 2d-array, coordinates of each point of the grid
"""
grid = self.grid
nbknots = grid.shape[0]
grid = numpy.append(grid, numpy.zeros((nbknots, 1), dtype="float"), axis=1)
for i in range(nbknots):
occ, contrib = self.calc_occupancy(grid[i, 0:3])
grid[i, 3] = occ
grid[i, 4] = contrib
order = numpy.argsort(grid, axis=0)[:, -2]
sortedgrid = numpy.empty_like(grid)
for i in range(nbknots):
sortedgrid[nbknots - i - 1, :] = grid[order[i], :]
return sortedgrid
def make_header(self):
"""
Create the layout of the pdb file for the averaged model.
"""
header = []
header.append("Number of files averaged : %s\n"%len(self.inputfiles))
for i in self.inputfiles:
header.append(i + "\n")
header.append("Total number of dots in the grid : %s\n"%self.grid.shape[0])
decade = 1
for i in range(self.grid.shape[0]):
line = "ATOM CA ASP 1 20.00 2 201\n"
line = line[:7] + "%4.i"%(i + 1) + line[11:]
if not (i + 1) % 10:
decade += 1
line = line[:21] + "%4.i"%decade + line[25:]
header.append(line)
self.header = header
return header
def save_aver(self, filename):
"""
Save the position of each occupied dot of the grid, its occupancy and its contribution
in a pdb file.
:param filename: name of the pdb file to write
"""
if len(self.header) == 0:
self.make_header()
assert self.grid.shape[-1] == 5
nr = 0
with open(filename, "w") as pdbout:
for line in self.header:
if line.startswith("ATOM"):
if nr < self.grid.shape[0] and self.grid[nr, 4] != 0:
coord = "%8.3f%8.3f%8.3f" % tuple(self.grid[nr, 0:3])
occ = "%6.2f" % self.grid[nr, 3]
contrib = "%2.f" % self.grid[nr, 4]
line = line[:30] + coord + occ + line[60:66] + contrib + line[68:]
else:
line = ""
nr += 1
pdbout.write(line)
|
kif/freesas
|
freesas/average.py
|
Python
|
mit
| 9,116 |
// Based on "Design Patterns: Elements of Reusable Object-Oriented Software"
// book by Erich Gamma, John Vlissides, Ralph Johnson, and Richard Helm
//
// Created by Bartosz Rachwal. The National Institute of Advanced Industrial Science and Technology, Japan.
#include "btree.h"
namespace structural
{
namespace flyweight
{
BTree::BTree(long size) : size_(size)
{
leafs_ = new operational::iterator::List<Font*>();
nodes_ = new int[size_];
for (auto i = 0; i < size_; i++)
{
nodes_[i] = -1;
}
}
BTree::BTree() : BTree(1000) { }
void BTree::Set(Font* font, const int& index, const int& span) const
{
auto font_index = -1;
for (auto i = 0; i < leafs_->Count(); i++)
{
if (&leafs_->Get(i) == &font)
{
font_index = i;
}
}
if (font_index == -1)
{
leafs_->Append(font);
font_index = leafs_->Count() - 1;
}
for (auto j = index; j < index + span; j++)
{
nodes_[j] = font_index;
}
}
Font *BTree::Get(int index) const
{
if (index > size_)
{
return nullptr;
}
auto font_index = nodes_[index];
if (font_index == -1)
{
return nullptr;
}
return leafs_->Get(font_index);
}
}
}
|
rachwal/DesignPatterns
|
structural/src/flyweight/btree.cc
|
C++
|
mit
| 1,119 |
<!doctype html>
<html class="no-js" lang="">
<head>
<title>Zabuun - Learn Egyptian Arabic for English speakers</title>
<meta name="description" content="">
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/head.php';?>
</head>
<body>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/ie8.php';?>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/header.php';?>
<div class="content">
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/side.php';?>
<div class="main">
<div class="location">
<p class="breadcrumbs">Essays > The First Song</p>
<p class="expandcollapse">
<a href="">Expand All</a> | <a href="">Collapse All</a>
</p>
</div>
<!-- begin essay -->
<h1>The First Song</h1>
<p> She sits in the car. Her dad turns on the radio. A song plays. She taps her feet. She sways her head. Her dad laughs at her. He likes the song too. The song is over. The radio plays a different song. She does not like the new song. She sits quietly. </p>
<!-- end essay -->
</div>
</div>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/footer.php';?>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/scripts.php';?>
</body>
</html>
|
javanigus/zabuun
|
essay/0006-the-first-song.php
|
PHP
|
mit
| 1,193 |
package main
import (
"os"
"github.com/codegangsta/cli"
)
func main() {
app := cli.NewApp()
app.Name = "nano-client"
app.Usage = "Send a request to service"
app.Version = Version
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "service, s",
Usage: "Service endpoint to send request to (Required)",
},
cli.StringFlag{
Name: "method, m",
Usage: "RPC method to call (Required)",
},
cli.StringFlag{
Name: "params, p",
Usage: "Parameters as JSON (Required)",
},
}
//app.Action = SendRequest
app.Run(os.Args)
}
|
mouadino/go-nano
|
cli/nano-client/main.go
|
GO
|
mit
| 551 |
import logger from './logger';
import app from './app';
const port = app.get('port');
const server = app.listen(port);
process.on('unhandledRejection', (reason, p) =>
logger.error('Unhandled Rejection at: Promise ', p, reason)
);
server.on('listening', () =>
logger.info('Feathers application started on http://%s:%d', app.get('host'), port)
);
|
feathersjs/generator-feathers
|
generators/app/templates/ts/src/index.ts
|
TypeScript
|
mit
| 352 |
// Template Source: BaseEntityCollectionResponse.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.requests;
import com.microsoft.graph.models.ContactFolder;
import com.microsoft.graph.http.BaseCollectionResponse;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Contact Folder Collection Response.
*/
public class ContactFolderCollectionResponse extends BaseCollectionResponse<ContactFolder> {
}
|
microsoftgraph/msgraph-sdk-java
|
src/main/java/com/microsoft/graph/requests/ContactFolderCollectionResponse.java
|
Java
|
mit
| 765 |
/*
* Copyright 2013 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*package com.google.zxing.common.reedsolomon;*/
import * as assert from 'assert';
import { ZXingStringBuilder } from '@zxing/library';
import Random from '../../../core/util/Random';
import { ZXingSystem } from '@zxing/library';
import { GenericGF } from '@zxing/library';
import { ReedSolomonEncoder } from '@zxing/library';
import { ReedSolomonDecoder } from '@zxing/library';
/*import java.util.Random;*/
import { corrupt } from './ReedSolomonCorrupt';
/**
* @author Rustam Abdullaev
*/
describe('ReedSolomonSpec', () => {
it('testDataMatrix 1 - real life test case', () => {
testEncodeDecode(
GenericGF.DATA_MATRIX_FIELD_256,
Int32Array.from([142, 164, 186]),
Int32Array.from([114, 25, 5, 88, 102])
);
});
it('testDataMatrix 2 - real life test case', () => {
testEncodeDecode(
GenericGF.DATA_MATRIX_FIELD_256,
Int32Array.from([
0x69, 0x75, 0x75, 0x71, 0x3B, 0x30, 0x30, 0x64,
0x70, 0x65, 0x66, 0x2F, 0x68, 0x70, 0x70, 0x68,
0x6D, 0x66, 0x2F, 0x64, 0x70, 0x6E, 0x30, 0x71,
0x30, 0x7B, 0x79, 0x6A, 0x6F, 0x68, 0x30, 0x81,
0xF0, 0x88, 0x1F, 0xB5
]),
Int32Array.from([
0x1C, 0x64, 0xEE, 0xEB, 0xD0, 0x1D, 0x00, 0x03,
0xF0, 0x1C, 0xF1, 0xD0, 0x6D, 0x00, 0x98, 0xDA,
0x80, 0x88, 0xBE, 0xFF, 0xB7, 0xFA, 0xA9, 0x95
])
);
});
it('testDataMatrix 3.1 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.DATA_MATRIX_FIELD_256, 10, 240);
});
it('testDataMatrix 3.2 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.DATA_MATRIX_FIELD_256, 128, 127);
});
it('testDataMatrix 3.3 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.DATA_MATRIX_FIELD_256, 220, 35);
});
it('testQRCode 1 - from example given in ISO 18004, Annex I', () => {
// Test case from example given in ISO 18004, Annex I
testEncodeDecode(
GenericGF.QR_CODE_FIELD_256,
Int32Array.from([
0x10, 0x20, 0x0C, 0x56, 0x61, 0x80, 0xEC, 0x11,
0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11
]),
Int32Array.from([
0xA5, 0x24, 0xD4, 0xC1, 0xED, 0x36, 0xC7, 0x87,
0x2C, 0x55
])
);
});
it('testQRCode 2 - real life test case', () => {
testEncodeDecode(
GenericGF.QR_CODE_FIELD_256,
Int32Array.from([
0x72, 0x67, 0x2F, 0x77, 0x69, 0x6B, 0x69, 0x2F,
0x4D, 0x61, 0x69, 0x6E, 0x5F, 0x50, 0x61, 0x67,
0x65, 0x3B, 0x3B, 0x00, 0xEC, 0x11, 0xEC, 0x11,
0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11
]),
Int32Array.from([
0xD8, 0xB8, 0xEF, 0x14, 0xEC, 0xD0, 0xCC, 0x85,
0x73, 0x40, 0x0B, 0xB5, 0x5A, 0xB8, 0x8B, 0x2E,
0x08, 0x62
])
);
});
it('testQRCode 3.1 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.QR_CODE_FIELD_256, 10, 240);
});
it('testQRCode 3.2 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.QR_CODE_FIELD_256, 128, 127);
});
it('testQRCode 3.3 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.QR_CODE_FIELD_256, 220, 35);
});
it('testAztec 1 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_PARAM,
Int32Array.from([0x5, 0x6]),
Int32Array.from([0x3, 0x2, 0xB, 0xB, 0x7])
);
});
it('testAztec 2 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_PARAM,
Int32Array.from([0x0, 0x0, 0x0, 0x9]),
Int32Array.from([0xA, 0xD, 0x8, 0x6, 0x5, 0x6])
);
});
it('testAztec 3 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_PARAM,
Int32Array.from([0x2, 0x8, 0x8, 0x7]),
Int32Array.from([0xE, 0xC, 0xA, 0x9, 0x6, 0x8])
);
});
it('testAztec 4 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_6,
Int32Array.from([0x9, 0x32, 0x1, 0x29, 0x2F, 0x2, 0x27, 0x25, 0x1, 0x1B]),
Int32Array.from([0x2C, 0x2, 0xD, 0xD, 0xA, 0x16, 0x28, 0x9, 0x22, 0xA, 0x14])
);
});
it('testAztec 5 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_8,
Int32Array.from([
0xE0, 0x86, 0x42, 0x98, 0xE8, 0x4A, 0x96, 0xC6,
0xB9, 0xF0, 0x8C, 0xA7, 0x4A, 0xDA, 0xF8, 0xCE,
0xB7, 0xDE, 0x88, 0x64, 0x29, 0x8E, 0x84, 0xA9,
0x6C, 0x6B, 0x9F, 0x08, 0xCA, 0x74, 0xAD, 0xAF,
0x8C, 0xEB, 0x7C, 0x10, 0xC8, 0x53, 0x1D, 0x09,
0x52, 0xD8, 0xD7, 0x3E, 0x11, 0x94, 0xE9, 0x5B,
0x5F, 0x19, 0xD6, 0xFB, 0xD1, 0x0C, 0x85, 0x31,
0xD0, 0x95, 0x2D, 0x8D, 0x73, 0xE1, 0x19, 0x4E,
0x95, 0xB5, 0xF1, 0x9D, 0x6F]),
Int32Array.from([
0x31, 0xD7, 0x04, 0x46, 0xB2, 0xC1, 0x06, 0x94,
0x17, 0xE5, 0x0C, 0x2B, 0xA3, 0x99, 0x15, 0x7F,
0x16, 0x3C, 0x66, 0xBA, 0x33, 0xD9, 0xE8, 0x87,
0x86, 0xBB, 0x4B, 0x15, 0x4E, 0x4A, 0xDE, 0xD4,
0xED, 0xA1, 0xF8, 0x47, 0x2A, 0x50, 0xA6, 0xBC,
0x53, 0x7D, 0x29, 0xFE, 0x06, 0x49, 0xF3, 0x73,
0x9F, 0xC1, 0x75])
);
});
it('testAztec 6 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_10,
Int32Array.from([
0x15C, 0x1E1, 0x2D5, 0x02E, 0x048, 0x1E2, 0x037, 0x0CD,
0x02E, 0x056, 0x26A, 0x281, 0x1C2, 0x1A6, 0x296, 0x045,
0x041, 0x0AA, 0x095, 0x2CE, 0x003, 0x38F, 0x2CD, 0x1A2,
0x036, 0x1AD, 0x04E, 0x090, 0x271, 0x0D3, 0x02E, 0x0D5,
0x2D4, 0x032, 0x2CA, 0x281, 0x0AA, 0x04E, 0x024, 0x2D3,
0x296, 0x281, 0x0E2, 0x08A, 0x1AA, 0x28A, 0x280, 0x07C,
0x286, 0x0A1, 0x1D0, 0x1AD, 0x154, 0x032, 0x2C2, 0x1C1,
0x145, 0x02B, 0x2D4, 0x2B0, 0x033, 0x2D5, 0x276, 0x1C1,
0x282, 0x10A, 0x2B5, 0x154, 0x003, 0x385, 0x20F, 0x0C4,
0x02D, 0x050, 0x266, 0x0D5, 0x033, 0x2D5, 0x276, 0x1C1,
0x0D4, 0x2A0, 0x08F, 0x0C4, 0x024, 0x20F, 0x2E2, 0x1AD,
0x154, 0x02E, 0x056, 0x26A, 0x281, 0x090, 0x1E5, 0x14E,
0x0CF, 0x2B6, 0x1C1, 0x28A, 0x2A1, 0x04E, 0x0D5, 0x003,
0x391, 0x122, 0x286, 0x1AD, 0x2D4, 0x028, 0x262, 0x2EA,
0x0A2, 0x004, 0x176, 0x295, 0x201, 0x0D5, 0x024, 0x20F,
0x116, 0x0C1, 0x056, 0x095, 0x213, 0x004, 0x1EA, 0x28A,
0x02A, 0x234, 0x2CE, 0x037, 0x157, 0x0D3, 0x262, 0x026,
0x262, 0x2A0, 0x086, 0x106, 0x2A1, 0x126, 0x1E5, 0x266,
0x26A, 0x2A1, 0x0E6, 0x1AA, 0x281, 0x2B6, 0x271, 0x154,
0x02F, 0x0C4, 0x02D, 0x213, 0x0CE, 0x003, 0x38F, 0x2CD,
0x1A2, 0x036, 0x1B5, 0x26A, 0x086, 0x280, 0x086, 0x1AA,
0x2A1, 0x226, 0x1AD, 0x0CF, 0x2A6, 0x292, 0x2C6, 0x022,
0x1AA, 0x256, 0x0D5, 0x02D, 0x050, 0x266, 0x0D5, 0x004,
0x176, 0x295, 0x201, 0x0D3, 0x055, 0x031, 0x2CD, 0x2EA,
0x1E2, 0x261, 0x1EA, 0x28A, 0x004, 0x145, 0x026, 0x1A6,
0x1C6, 0x1F5, 0x2CE, 0x034, 0x051, 0x146, 0x1E1, 0x0B0,
0x1B0, 0x261, 0x0D5, 0x025, 0x142, 0x1C0, 0x07C, 0x0B0,
0x1E6, 0x081, 0x044, 0x02F, 0x2CF, 0x081, 0x290, 0x0A2,
0x1A6, 0x281, 0x0CD, 0x155, 0x031, 0x1A2, 0x086, 0x262,
0x2A1, 0x0CD, 0x0CA, 0x0E6, 0x1E5, 0x003, 0x394, 0x0C5,
0x030, 0x26F, 0x053, 0x0C1, 0x1B6, 0x095, 0x2D4, 0x030,
0x26F, 0x053, 0x0C0, 0x07C, 0x2E6, 0x295, 0x143, 0x2CD,
0x2CE, 0x037, 0x0C9, 0x144, 0x2CD, 0x040, 0x08E, 0x054,
0x282, 0x022, 0x2A1, 0x229, 0x053, 0x0D5, 0x262, 0x027,
0x26A, 0x1E8, 0x14D, 0x1A2, 0x004, 0x26A, 0x296, 0x281,
0x176, 0x295, 0x201, 0x0E2, 0x2C4, 0x143, 0x2D4, 0x026,
0x262, 0x2A0, 0x08F, 0x0C4, 0x031, 0x213, 0x2B5, 0x155,
0x213, 0x02F, 0x143, 0x121, 0x2A6, 0x1AD, 0x2D4, 0x034,
0x0C5, 0x026, 0x295, 0x003, 0x396, 0x2A1, 0x176, 0x295,
0x201, 0x0AA, 0x04E, 0x004, 0x1B0, 0x070, 0x275, 0x154,
0x026, 0x2C1, 0x2B3, 0x154, 0x2AA, 0x256, 0x0C1, 0x044,
0x004, 0x23F
]),
Int32Array.from([
0x379, 0x099, 0x348, 0x010, 0x090, 0x196, 0x09C, 0x1FF,
0x1B0, 0x32D, 0x244, 0x0DE, 0x201, 0x386, 0x163, 0x11F,
0x39B, 0x344, 0x3FE, 0x02F, 0x188, 0x113, 0x3D9, 0x102,
0x04A, 0x2E1, 0x1D1, 0x18E, 0x077, 0x262, 0x241, 0x20D,
0x1B8, 0x11D, 0x0D0, 0x0A5, 0x29C, 0x24D, 0x3E7, 0x006,
0x2D0, 0x1B7, 0x337, 0x178, 0x0F1, 0x1E0, 0x00B, 0x01E,
0x0DA, 0x1C6, 0x2D9, 0x00D, 0x28B, 0x34A, 0x252, 0x27A,
0x057, 0x0CA, 0x2C2, 0x2E4, 0x3A6, 0x0E3, 0x22B, 0x307,
0x174, 0x292, 0x10C, 0x1ED, 0x2FD, 0x2D4, 0x0A7, 0x051,
0x34F, 0x07A, 0x1D5, 0x01D, 0x22E, 0x2C2, 0x1DF, 0x08F,
0x105, 0x3FE, 0x286, 0x2A2, 0x3B1, 0x131, 0x285, 0x362,
0x315, 0x13C, 0x0F9, 0x1A2, 0x28D, 0x246, 0x1B3, 0x12C,
0x2AD, 0x0F8, 0x222, 0x0EC, 0x39F, 0x358, 0x014, 0x229,
0x0C8, 0x360, 0x1C2, 0x031, 0x098, 0x041, 0x3E4, 0x046,
0x332, 0x318, 0x2E3, 0x24E, 0x3E2, 0x1E1, 0x0BE, 0x239,
0x306, 0x3A5, 0x352, 0x351, 0x275, 0x0ED, 0x045, 0x229,
0x0BF, 0x05D, 0x253, 0x1BE, 0x02E, 0x35A, 0x0E4, 0x2E9,
0x17A, 0x166, 0x03C, 0x007
])
);
});
it('testAztec 7 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_12,
Int32Array.from([
0x571, 0xE1B, 0x542, 0xE12, 0x1E2, 0x0DC, 0xCD0, 0xB85,
0x69A, 0xA81, 0x709, 0xA6A, 0x584, 0x510, 0x4AA, 0x256,
0xCE0, 0x0F8, 0xFB3, 0x5A2, 0x0D9, 0xAD1, 0x389, 0x09C,
0x4D3, 0x0B8, 0xD5B, 0x503, 0x2B2, 0xA81, 0x2A8, 0x4E0,
0x92D, 0x3A5, 0xA81, 0x388, 0x8A6, 0xAA8, 0xAA0, 0x07C,
0xA18, 0xA17, 0x41A, 0xD55, 0x032, 0xB09, 0xC15, 0x142,
0xBB5, 0x2B0, 0x0CE, 0xD59, 0xD9C, 0x1A0, 0x90A, 0xAD5,
0x540, 0x0F8, 0x583, 0xCC4, 0x0B4, 0x509, 0x98D, 0x50C,
0xED5, 0x9D9, 0xC13, 0x52A, 0x023, 0xCC4, 0x092, 0x0FB,
0x89A, 0xD55, 0x02E, 0x15A, 0x6AA, 0x049, 0x079, 0x54E,
0x33E, 0xB67, 0x068, 0xAA8, 0x44E, 0x354, 0x03E, 0x452,
0x2A1, 0x9AD, 0xB50, 0x289, 0x8AE, 0xA28, 0x804, 0x5DA,
0x958, 0x04D, 0x509, 0x20F, 0x458, 0xC11, 0x589, 0x584,
0xC04, 0x7AA, 0x8A0, 0xAA3, 0x4B3, 0x837, 0x55C, 0xD39,
0x882, 0x698, 0xAA0, 0x219, 0x06A, 0x852, 0x679, 0x666,
0x9AA, 0xA13, 0x99A, 0xAA0, 0x6B6, 0x9C5, 0x540, 0xBCC,
0x40B, 0x613, 0x338, 0x03E, 0x3EC, 0xD68, 0x836, 0x6D6,
0x6A2, 0x1A8, 0x021, 0x9AA, 0xA86, 0x266, 0xB4C, 0xFA9,
0xA92, 0xB18, 0x226, 0xAA5, 0x635, 0x42D, 0x142, 0x663,
0x540, 0x45D, 0xA95, 0x804, 0xD31, 0x543, 0x1B3, 0x6EA,
0x78A, 0x617, 0xAA8, 0xA01, 0x145, 0x099, 0xA67, 0x19F,
0x5B3, 0x834, 0x145, 0x467, 0x84B, 0x06C, 0x261, 0x354,
0x255, 0x09C, 0x01F, 0x0B0, 0x798, 0x811, 0x102, 0xFB3,
0xC81, 0xA40, 0xA26, 0x9A8, 0x133, 0x555, 0x0C5, 0xA22,
0x1A6, 0x2A8, 0x4CD, 0x328, 0xE67, 0x940, 0x3E5, 0x0C5,
0x0C2, 0x6F1, 0x4CC, 0x16D, 0x895, 0xB50, 0x309, 0xBC5,
0x330, 0x07C, 0xB9A, 0x955, 0x0EC, 0xDB3, 0x837, 0x325,
0x44B, 0x344, 0x023, 0x854, 0xA08, 0x22A, 0x862, 0x914,
0xCD5, 0x988, 0x279, 0xA9E, 0x853, 0x5A2, 0x012, 0x6AA,
0x5A8, 0x15D, 0xA95, 0x804, 0xE2B, 0x114, 0x3B5, 0x026,
0x98A, 0xA02, 0x3CC, 0x40C, 0x613, 0xAD5, 0x558, 0x4C2,
0xF50, 0xD21, 0xA99, 0xADB, 0x503, 0x431, 0x426, 0xA54,
0x03E, 0x5AA, 0x15D, 0xA95, 0x804, 0xAA1, 0x380, 0x46C,
0x070, 0x9D5, 0x540, 0x9AC, 0x1AC, 0xD54, 0xAAA, 0x563,
0x044, 0x401, 0x220, 0x9F1, 0x4F0, 0xDAA, 0x170, 0x90F,
0x106, 0xE66, 0x85C, 0x2B4, 0xD54, 0x0B8, 0x4D3, 0x52C,
0x228, 0x825, 0x512, 0xB67, 0x007, 0xC7D, 0x9AD, 0x106,
0xCD6, 0x89C, 0x484, 0xE26, 0x985, 0xC6A, 0xDA8, 0x195,
0x954, 0x095, 0x427, 0x049, 0x69D, 0x2D4, 0x09C, 0x445,
0x355, 0x455, 0x003, 0xE50, 0xC50, 0xBA0, 0xD6A, 0xA81,
0x958, 0x4E0, 0xA8A, 0x15D, 0xA95, 0x806, 0x76A, 0xCEC,
0xE0D, 0x048, 0x556, 0xAAA, 0x007, 0xC2C, 0x1E6, 0x205,
0xA28, 0x4CC, 0x6A8, 0x676, 0xACE, 0xCE0, 0x9A9, 0x501,
0x1E6, 0x204, 0x907, 0xDC4, 0xD6A, 0xA81, 0x70A, 0xD35,
0x502, 0x483, 0xCAA, 0x719, 0xF5B, 0x383, 0x455, 0x422,
0x71A, 0xA01, 0xF22, 0x915, 0x0CD, 0x6DA, 0x814, 0x4C5,
0x751, 0x440, 0x22E, 0xD4A, 0xC02, 0x6A8, 0x490, 0x7A2,
0xC60, 0x8AC, 0x4AC, 0x260, 0x23D, 0x545, 0x055, 0x1A5,
0x9C1, 0xBAA, 0xE69, 0xCC4, 0x134, 0xC55, 0x010, 0xC83,
0x542, 0x933, 0xCB3, 0x34D, 0x550, 0x9CC, 0xD55, 0x035,
0xB4E, 0x2AA, 0x05E, 0x620, 0x5B0, 0x999, 0xC01, 0xF1F,
0x66B, 0x441, 0xB36, 0xB35, 0x10D, 0x401, 0x0CD, 0x554,
0x313, 0x35A, 0x67D, 0x4D4, 0x958, 0xC11, 0x355, 0x2B1,
0xAA1, 0x68A, 0x133, 0x1AA, 0x022, 0xED4, 0xAC0, 0x269,
0x8AA, 0x18D, 0x9B7, 0x53C, 0x530, 0xBD5, 0x450, 0x08A,
0x284, 0xCD3, 0x38C, 0xFAD, 0x9C1, 0xA0A, 0x2A3, 0x3C2,
0x583, 0x613, 0x09A, 0xA12, 0xA84, 0xE00, 0xF85, 0x83C,
0xC40, 0x888, 0x17D, 0x9E4, 0x0D2, 0x051, 0x34D, 0x409,
0x9AA, 0xA86, 0x2D1, 0x10D, 0x315, 0x426, 0x699, 0x473,
0x3CA, 0x01F, 0x286, 0x286, 0x137, 0x8A6, 0x60B, 0x6C4,
0xADA, 0x818, 0x4DE, 0x299, 0x803, 0xE5C, 0xD4A, 0xA87,
0x66D, 0x9C1, 0xB99, 0x2A2, 0x59A, 0x201, 0x1C2, 0xA50,
0x411, 0x543, 0x148, 0xA66, 0xACC, 0x413, 0xCD4, 0xF42,
0x9AD, 0x100, 0x935, 0x52D, 0x40A, 0xED4, 0xAC0, 0x271,
0x588, 0xA1D, 0xA81, 0x34C, 0x550, 0x11E, 0x620, 0x630,
0x9D6, 0xAAA, 0xC26, 0x17A, 0x869, 0x0D4, 0xCD6, 0xDA8,
0x1A1, 0x8A1, 0x352, 0xA01, 0xF2D, 0x50A, 0xED4, 0xAC0,
0x255, 0x09C, 0x023, 0x603, 0x84E, 0xAAA, 0x04D, 0x60D,
0x66A, 0xA55, 0x52B, 0x182, 0x220, 0x091, 0x00F, 0x8A7,
0x86D, 0x50B, 0x848, 0x788, 0x373, 0x342, 0xE15, 0xA6A,
0xA05, 0xC26, 0x9A9, 0x611, 0x441, 0x2A8, 0x95B, 0x380,
0x3E3, 0xECD, 0x688, 0x366, 0xB44, 0xE24, 0x271, 0x34C,
0x2E3, 0x56D, 0x40C, 0xACA, 0xA04, 0xAA1, 0x382, 0x4B4,
0xE96, 0xA04, 0xE22, 0x29A, 0xAA2, 0xA80, 0x1F2, 0x862,
0x85D, 0x06B, 0x554, 0x0CA, 0xC27, 0x054, 0x50A, 0xED4,
0xAC0, 0x33B, 0x567, 0x670, 0x682, 0x42A, 0xB55, 0x500,
0x3E1, 0x60F, 0x310, 0x2D1, 0x426, 0x635, 0x433, 0xB56,
0x767, 0x04D, 0x4A8, 0x08F, 0x310, 0x248, 0x3EE, 0x26B,
0x554, 0x0B8, 0x569, 0xAA8, 0x124, 0x1E5, 0x538, 0xCFA,
0xD9C, 0x1A2, 0xAA1, 0x138, 0xD50, 0x0F9, 0x148, 0xA86,
0x6B6, 0xD40, 0xA26, 0x2BA, 0x8A2, 0x011, 0x76A, 0x560,
0x135, 0x424, 0x83D, 0x163, 0x045, 0x625, 0x613, 0x011,
0xEAA, 0x282, 0xA8D, 0x2CE, 0x0DD, 0x573, 0x4E6, 0x209,
0xA62, 0xA80, 0x864, 0x1AA, 0x149, 0x9E5, 0x99A, 0x6AA,
0x84E, 0x66A, 0xA81, 0xADA, 0x715, 0x502, 0xF31, 0x02D,
0x84C, 0xCE0, 0x0F8, 0xFB3, 0x5A2, 0x0D9, 0xB59, 0xA88,
0x6A0, 0x086, 0x6AA, 0xA18, 0x99A, 0xD33, 0xEA6, 0xA4A,
0xC60, 0x89A, 0xA95, 0x8D5, 0x0B4, 0x509, 0x98D, 0x501,
0x176, 0xA56, 0x013, 0x4C5, 0x50C, 0x6CD, 0xBA9, 0xE29,
0x85E, 0xAA2, 0x804, 0x514, 0x266, 0x99C, 0x67D, 0x6CE,
0x0D0, 0x515, 0x19E, 0x12C, 0x1B0, 0x984, 0xD50, 0x954,
0x270, 0x07C, 0x2C1, 0xE62, 0x044, 0x40B, 0xECF, 0x206,
0x902, 0x89A, 0x6A0, 0x4CD, 0x554, 0x316, 0x888, 0x698,
0xAA1, 0x334, 0xCA3, 0x99E, 0x500, 0xF94, 0x314, 0x309,
0xBC5, 0x330, 0x5B6, 0x256, 0xD40, 0xC26, 0xF14, 0xCC0,
0x1F2, 0xE6A, 0x554, 0x3B3, 0x6CE, 0x0DC, 0xC95, 0x12C,
0xD10, 0x08E, 0x152, 0x820, 0x8AA, 0x18A, 0x453, 0x356,
0x620, 0x9E6, 0xA7A, 0x14D, 0x688, 0x049, 0xAA9, 0x6A0,
0x576, 0xA56, 0x013, 0x8AC, 0x450, 0xED4, 0x09A, 0x62A,
0x808, 0xF31, 0x031, 0x84E, 0xB55, 0x561, 0x30B, 0xD43,
0x486, 0xA66, 0xB6D, 0x40D, 0x0C5, 0x09A, 0x950, 0x0F9,
0x6A8, 0x576, 0xA56, 0x012, 0xA84, 0xE01, 0x1B0, 0x1C2,
0x755, 0x502, 0x6B0, 0x6B3, 0x552, 0xAA9, 0x58C, 0x111,
0x004, 0x882, 0x7C5, 0x3C3, 0x6A8, 0x5C2, 0x43C, 0x41B,
0x99A, 0x170, 0xAD3, 0x550, 0x2E1, 0x34D, 0x4B0, 0x8A2,
0x095, 0x44A, 0xD9C, 0x01F, 0x1F6, 0x6B4, 0x41B, 0x35A,
0x271, 0x213, 0x89A, 0x617, 0x1AB, 0x6A0, 0x656, 0x550,
0x255, 0x09C, 0x125, 0xA74, 0xB50, 0x271, 0x114, 0xD55,
0x154, 0x00F, 0x943, 0x142, 0xE83, 0x5AA, 0xA06, 0x561,
0x382, 0xA28, 0x576, 0xA56, 0x019, 0xDAB, 0x3B3, 0x834,
0x121, 0x55A, 0xAA8, 0x01F, 0x0B0, 0x798, 0x816, 0x8A1,
0x331, 0xAA1, 0x9DA, 0xB3B, 0x382, 0x6A5, 0x404, 0x798,
0x812, 0x41F, 0x713, 0x5AA, 0xA05, 0xC2B, 0x4D5, 0x409,
0x20F, 0x2A9, 0xC67, 0xD6C, 0xE0D, 0x155, 0x089, 0xC6A,
0x807, 0xC8A, 0x454, 0x335, 0xB6A, 0x051, 0x315, 0xD45,
0x100, 0x8BB, 0x52B, 0x009, 0xAA1, 0x241, 0xE8B, 0x182,
0x2B1, 0x2B0, 0x980, 0x8F5, 0x514, 0x154, 0x696, 0x706,
0xEAB, 0x9A7, 0x310, 0x4D3, 0x154, 0x043, 0x20D, 0x50A,
0x4CF, 0x2CC, 0xD35, 0x542, 0x733, 0x554, 0x0D6, 0xD38,
0xAA8, 0x179, 0x881, 0x6C2, 0x667, 0x007, 0xC7D, 0x9AD,
0x106, 0xCDA, 0xCD4, 0x435, 0x004, 0x335, 0x550, 0xC4C,
0xD69, 0x9F5, 0x352, 0x563, 0x044, 0xD54, 0xAC6, 0xA85,
0xA28, 0x4CC, 0x6A8, 0x08B, 0xB52, 0xB00, 0x9A6, 0x2A8,
0x636, 0x6DD, 0x4F1, 0x4C2, 0xF55, 0x140, 0x228, 0xA13,
0x34C, 0xE33, 0xEB6, 0x706, 0x828, 0xA8C, 0xF09, 0x60D,
0x84C, 0x26A, 0x84A, 0xA13, 0x803, 0xE16, 0x0F3, 0x102,
0x220, 0x5F6, 0x790, 0x348, 0x144, 0xD35, 0x026, 0x6AA,
0xA18, 0xB44, 0x434, 0xC55, 0x099, 0xA65, 0x1CC, 0xF28,
0x07C, 0xA18, 0xA18, 0x4DE, 0x299, 0x82D, 0xB12, 0xB6A,
0x061, 0x378, 0xA66, 0x00F, 0x973, 0x52A, 0xA1D, 0x9B6,
0x706, 0xE64, 0xA89, 0x668, 0x804, 0x70A, 0x941, 0x045,
0x50C, 0x522, 0x99A, 0xB31, 0x04F, 0x353, 0xD0A, 0x6B4,
0x402, 0x4D5, 0x4B5, 0x02B, 0xB52, 0xB00, 0x9C5, 0x622,
0x876, 0xA04, 0xD31, 0x540, 0x479, 0x881, 0x8C2, 0x75A,
0xAAB, 0x098, 0x5EA, 0x1A4, 0x353, 0x35B, 0x6A0, 0x686,
0x284, 0xD4A, 0x807, 0xCB5, 0x42B, 0xB52, 0xB00, 0x954,
0x270, 0x08D, 0x80E, 0x13A, 0xAA8, 0x135, 0x835, 0x9AA,
0x801, 0xF14, 0xF0D, 0xAA1, 0x709, 0x0F1, 0x06E, 0x668,
0x5C2, 0xB4D, 0x540, 0xB84, 0xD35, 0x2C2, 0x288, 0x255,
0x12B, 0x670, 0x07C, 0x7D9, 0xAD1, 0x06C, 0xD68, 0x9C4,
0x84E, 0x269, 0x85C, 0x6AD, 0xA81, 0x959, 0x540, 0x954,
0x270, 0x496, 0x9D2, 0xD40, 0x9C4, 0x453, 0x554, 0x550,
0x03E, 0x50C, 0x50B, 0xA0D, 0x6AA, 0x819, 0x584, 0xE0A,
0x8A1, 0x5DA, 0x958, 0x067, 0x6AC, 0xECE, 0x0D0, 0x485,
0x56A, 0xAA0, 0x07C, 0x2C1, 0xE62, 0x05A, 0x284, 0xCC6,
0xA86, 0x76A, 0xCEC, 0xE09, 0xA95, 0x011, 0xE62, 0x049,
0x07D, 0xC4D, 0x6AA, 0x817, 0x0AD, 0x355, 0x024, 0x83C,
0xAA7, 0x19F, 0x5B3, 0x834, 0x554, 0x227, 0x1AA, 0x01F,
0x229, 0x150, 0xCD6, 0xDA8, 0x144, 0xC57, 0x514, 0x402,
0x2ED, 0x4AC, 0x026, 0xA84, 0x907, 0xA2C, 0x608, 0xAC4,
0xAC2, 0x602, 0x3D5, 0x450, 0x551, 0xA59, 0xC1B, 0xAAE,
0x69C, 0xC41, 0x34C, 0x550, 0x10C, 0x835, 0x429, 0x33C,
0xB33, 0x4D5, 0x509, 0xCCD, 0x550, 0x35B, 0x4E2, 0xAA0,
0x5E6, 0x205, 0xB09, 0x99C, 0x09F
]),
Int32Array.from([
0xD54, 0x221, 0x154, 0x7CD, 0xBF3, 0x112, 0x89B, 0xC5E,
0x9CD, 0x07E, 0xFB6, 0x78F, 0x7FA, 0x16F, 0x377, 0x4B4,
0x62D, 0x475, 0xBC2, 0x861, 0xB72, 0x9D0, 0x76A, 0x5A1,
0x22A, 0xF74, 0xDBA, 0x8B1, 0x139, 0xDCD, 0x012, 0x293,
0x705, 0xA34, 0xDD5, 0x3D2, 0x7F8, 0x0A6, 0x89A, 0x346,
0xCE0, 0x690, 0x40E, 0xFF3, 0xC4D, 0x97F, 0x9C9, 0x016,
0x73A, 0x923, 0xBCE, 0xFA9, 0xE6A, 0xB92, 0x02A, 0x07C,
0x04B, 0x8D5, 0x753, 0x42E, 0x67E, 0x87C, 0xEE6, 0xD7D,
0x2BF, 0xFB2, 0xFF8, 0x42F, 0x4CB, 0x214, 0x779, 0x02D,
0x606, 0xA02, 0x08A, 0xD4F, 0xB87, 0xDDF, 0xC49, 0xB51,
0x0E9, 0xF89, 0xAEF, 0xC92, 0x383, 0x98D, 0x367, 0xBD3,
0xA55, 0x148, 0x9DB, 0x913, 0xC79, 0x6FF, 0x387, 0x6EA,
0x7FA, 0xC1B, 0x12D, 0x303, 0xBCA, 0x503, 0x0FB, 0xB14,
0x0D4, 0xAD1, 0xAFC, 0x9DD, 0x404, 0x145, 0x6E5, 0x8ED,
0xF94, 0xD72, 0x645, 0xA21, 0x1A8, 0xABF, 0xC03, 0x91E,
0xD53, 0x48C, 0x471, 0x4E4, 0x408, 0x33C, 0x5DF, 0x73D,
0xA2A, 0x454, 0xD77, 0xC48, 0x2F5, 0x96A, 0x9CF, 0x047,
0x611, 0xE92, 0xC2F, 0xA98, 0x56D, 0x919, 0x615, 0x535,
0x67A, 0x8C1, 0x2E2, 0xBC4, 0xBE8, 0x328, 0x04F, 0x257,
0x3F9, 0xFA5, 0x477, 0x12E, 0x94B, 0x116, 0xEF7, 0x65F,
0x6B3, 0x915, 0xC64, 0x9AF, 0xB6C, 0x6A2, 0x50D, 0xEA3,
0x26E, 0xC23, 0x817, 0xA42, 0x71A, 0x9DD, 0xDA8, 0x84D,
0x3F3, 0x85B, 0xB00, 0x1FC, 0xB0A, 0xC2F, 0x00C, 0x095,
0xC58, 0x0E3, 0x807, 0x962, 0xC4B, 0x29A, 0x6FC, 0x958,
0xD29, 0x59E, 0xB14, 0x95A, 0xEDE, 0xF3D, 0xFB8, 0x0E5,
0x348, 0x2E7, 0x38E, 0x56A, 0x410, 0x3B1, 0x4B0, 0x793,
0xAB7, 0x0BC, 0x648, 0x719, 0xE3E, 0xFB4, 0x3B4, 0xE5C,
0x950, 0xD2A, 0x50B, 0x76F, 0x8D2, 0x3C7, 0xECC, 0x87C,
0x53A, 0xBA7, 0x4C3, 0x148, 0x437, 0x820, 0xECD, 0x660,
0x095, 0x2F4, 0x661, 0x6A4, 0xB74, 0x5F3, 0x1D2, 0x7EC,
0x8E2, 0xA40, 0xA6F, 0xFC3, 0x3BE, 0x1E9, 0x52C, 0x233,
0x173, 0x4EF, 0xA7C, 0x40B, 0x14C, 0x88D, 0xF30, 0x8D9,
0xBDB, 0x0A6, 0x940, 0xD46, 0xB2B, 0x03E, 0x46A, 0x641,
0xF08, 0xAFF, 0x496, 0x68A, 0x7A4, 0x0BA, 0xD43, 0x515,
0xB26, 0xD8F, 0x05C, 0xD6E, 0xA2C, 0xF25, 0x628, 0x4E5,
0x81D, 0xA2A, 0x1FF, 0x302, 0xFBD, 0x6D9, 0x711, 0xD8B,
0xE5C, 0x5CF, 0x42E, 0x008, 0x863, 0xB6F, 0x1E1, 0x3DA,
0xACE, 0x82B, 0x2DB, 0x7EB, 0xC15, 0x79F, 0xA79, 0xDAF,
0x00D, 0x2F6, 0x0CE, 0x370, 0x7E8, 0x9E6, 0x89F, 0xAE9,
0x175, 0xA95, 0x06B, 0x9DF, 0xAFF, 0x45B, 0x823, 0xAA4,
0xC79, 0x773, 0x886, 0x854, 0x0A5, 0x6D1, 0xE55, 0xEBB,
0x518, 0xE50, 0xF8F, 0x8CC, 0x834, 0x388, 0xCD2, 0xFC1,
0xA55, 0x1F8, 0xD1F, 0xE08, 0xF93, 0x362, 0xA22, 0x9FA,
0xCE5, 0x3C3, 0xDD4, 0xC53, 0xB94, 0xAD0, 0x6EB, 0x68D,
0x660, 0x8FC, 0xBCD, 0x914, 0x16F, 0x4C0, 0x134, 0xE1A,
0x76F, 0x9CB, 0x660, 0xEA0, 0x320, 0x15A, 0xCE3, 0x7E8,
0x03E, 0xB9A, 0xC90, 0xA14, 0x256, 0x1A8, 0x639, 0x7C6,
0xA59, 0xA65, 0x956, 0x9E4, 0x592, 0x6A9, 0xCFF, 0x4DC,
0xAA3, 0xD2A, 0xFDE, 0xA87, 0xBF5, 0x9F0, 0xC32, 0x94F,
0x675, 0x9A6, 0x369, 0x648, 0x289, 0x823, 0x498, 0x574,
0x8D1, 0xA13, 0xD1A, 0xBB5, 0xA19, 0x7F7, 0x775, 0x138,
0x949, 0xA4C, 0xE36, 0x126, 0xC85, 0xE05, 0xFEE, 0x962,
0x36D, 0x08D, 0xC76, 0x1E1, 0x1EC, 0x8D7, 0x231, 0xB68,
0x03C, 0x1DE, 0x7DF, 0x2B1, 0x09D, 0xC81, 0xDA4, 0x8F7,
0x6B9, 0x947, 0x9B0
])
);
});
it('testAztec 8.1 - synthetic test cases (compact mode message)', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_PARAM, 2, 5);
});
it('testAztec 8.2 - synthetic test cases (full mode message)', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_PARAM, 4, 6);
});
it('testAztec 8.3 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_6, 10, 7);
});
it('testAztec 8.4 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_6, 20, 12);
});
it('testAztec 8.5 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_8, 20, 11);
});
it('testAztec 8.6 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_8, 128, 127);
});
it('testAztec 8.7 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_10, 128, 128);
});
it('testAztec 8.8 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_10, 768, 255);
});
it('testAztec 8.9 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_12, 3072, 1023);
});
});
const DECODER_RANDOM_TEST_ITERATIONS: number /*int*/ = 3;
const DECODER_TEST_ITERATIONS: number /*int*/ = 10;
function testEncodeDecodeRandom(field: GenericGF, dataSize: number /*int*/, ecSize: number /*int*/): void {
assert.strictEqual(dataSize > 0 && dataSize <= field.getSize() - 3, true, 'Invalid data size for ' + field);
assert.strictEqual(ecSize > 0 && ecSize + dataSize <= field.getSize(), true, 'Invalid ECC size for ' + field);
const encoder = new ReedSolomonEncoder(field);
const message = new Int32Array(dataSize + ecSize);
const dataWords = new Int32Array(dataSize); /*Int32Array(dataSize)*/
const ecWords = new Int32Array(ecSize); /*Int32Array(ecSize)*/
const random: Random = getPseudoRandom();
const iterations: number /*int*/ = field.getSize() > 256 ? 1 : DECODER_RANDOM_TEST_ITERATIONS;
for (let i: number /*int*/ = 0; i < iterations; i++) {
// generate random data
for (let k: number /*int*/ = 0; k < dataSize; k++) {
dataWords[k] = random.next(field.getSize());
}
// generate ECC words
ZXingSystem.arraycopy(dataWords, 0, message, 0, dataWords.length);
encoder.encode(message, ecWords.length);
ZXingSystem.arraycopy(message, dataSize, ecWords, 0, ecSize);
// check to see if Decoder can fix up to ecWords/2 random errors
testDecoder(field, dataWords, ecWords);
}
}
function testEncodeDecode(field: GenericGF, dataWords: Int32Array, ecWords: Int32Array): void {
testEncoder(field, dataWords, ecWords);
testDecoder(field, dataWords, ecWords);
}
function testEncoder(field: GenericGF, dataWords: Int32Array, ecWords: Int32Array): void {
const encoder = new ReedSolomonEncoder(field);
const messageExpected = new Int32Array(dataWords.length + ecWords.length);
const message = new Int32Array(dataWords.length + ecWords.length);
ZXingSystem.arraycopy(dataWords, 0, messageExpected, 0, dataWords.length);
ZXingSystem.arraycopy(ecWords, 0, messageExpected, dataWords.length, ecWords.length);
ZXingSystem.arraycopy(dataWords, 0, message, 0, dataWords.length);
encoder.encode(message, ecWords.length);
assertDataEquals(message, messageExpected, 'Encode in ' + field + ' (' + dataWords.length + ',' + ecWords.length + ') failed');
}
function testDecoder(field: GenericGF, dataWords: Int32Array, ecWords: Int32Array): void {
const decoder = new ReedSolomonDecoder(field);
const message = new Int32Array(dataWords.length + ecWords.length);
const maxErrors: number /*int*/ = Math.floor(ecWords.length / 2);
const random: Random = getPseudoRandom();
const iterations: number /*int*/ = field.getSize() > 256 ? 1 : DECODER_TEST_ITERATIONS;
for (let j: number /*int*/ = 0; j < iterations; j++) {
for (let i: number /*int*/ = 0; i < ecWords.length; i++) {
if (i > 10 && i < Math.floor(ecWords.length / 2) - 10) {
// performance improvement - skip intermediate cases in long-running tests
i += Math.floor(ecWords.length / 10);
}
ZXingSystem.arraycopy(dataWords, 0, message, 0, dataWords.length);
ZXingSystem.arraycopy(ecWords, 0, message, dataWords.length, ecWords.length);
corrupt(message, i, random, field.getSize());
try {
decoder.decode(message, ecWords.length);
} catch (e/*ReedSolomonException e*/) {
// fail only if maxErrors exceeded
assert.strictEqual(i > maxErrors, true,
'Decode in ' + field + ' (' + dataWords.length + ',' + ecWords.length + ') failed at ' + i + ' errors: ' + e);
// else stop
break;
}
if (i < maxErrors) {
assertDataEquals(message,
dataWords,
'Decode in ' + field + ' (' + dataWords.length + ',' + ecWords.length + ') failed at ' + i + ' errors');
}
}
}
}
function assertDataEquals(received: Int32Array, expected: Int32Array, message: string): void {
for (let i: number /*int*/ = 0; i < expected.length; i++) {
if (expected[i] !== received[i]) {
const receivedToString = arrayToString(Int32Array.from(received.subarray(0, expected.length)));
assert.ok(false, `${message}. Mismatch at ${i}. Expected ${arrayToString(expected)}, got ${receivedToString}`);
}
}
}
function arrayToString(data: Int32Array): String {
const sb = new ZXingStringBuilder();
sb.append('{');
for (let i: number /*int*/ = 0; i < data.length; i++) {
if (i > 0) {
sb.append(',');
}
sb.append(data[i].toString(16));
}
return sb.append('}').toString();
}
function getPseudoRandom(): Random {
return new Random('0xDEADBEEF');
}
|
zxing-js/library
|
src/test/core/common/reedsolomon/ReedSolomon.spec.ts
|
TypeScript
|
mit
| 32,086 |
#!/usr/bin/env python
"""
Project-wide application configuration.
DO NOT STORE SECRETS, PASSWORDS, ETC. IN THIS FILE.
They will be exposed to users. Use environment variables instead.
See get_secrets() below for a fast way to access them.
"""
import os
"""
NAMES
"""
# Project name used for display
PROJECT_NAME = 'quotable'
# Project name in urls
# Use dashes, not underscores!
PROJECT_SLUG = 'quotable'
# The name of the repository containing the source
REPOSITORY_NAME = 'quotable'
REPOSITORY_URL = 'git@github.com:nprapps/%s.git' % REPOSITORY_NAME
REPOSITORY_ALT_URL = None # 'git@bitbucket.org:nprapps/%s.git' % REPOSITORY_NAME'
# The name to be used in paths on the server
PROJECT_FILENAME = 'quotable'
"""
DEPLOYMENT
"""
FILE_SERVER = 'tools.apps.npr.org'
S3_BUCKET = 'tools.apps.npr.org'
ASSETS_S3_BUCKET = 'assets.apps.npr.org'
# These variables will be set at runtime. See configure_targets() below
DEBUG = True
"""
COPY EDITING
"""
COPY_GOOGLE_DOC_KEY = '0AlXMOHKxzQVRdHZuX1UycXplRlBfLVB0UVNldHJYZmc'
"""
SHARING
"""
PROJECT_DESCRIPTION = 'An opinionated project template for (mostly) server-less apps.'
SHARE_URL = 'http://%s/%s/' % (S3_BUCKET, PROJECT_SLUG)
TWITTER = {
'TEXT': PROJECT_NAME,
'URL': SHARE_URL,
# Will be resized to 120x120, can't be larger than 1MB
'IMAGE_URL': ''
}
FACEBOOK = {
'TITLE': PROJECT_NAME,
'URL': SHARE_URL,
'DESCRIPTION': PROJECT_DESCRIPTION,
# Should be square. No documented restrictions on size
'IMAGE_URL': TWITTER['IMAGE_URL'],
'APP_ID': '138837436154588'
}
GOOGLE = {
# Thumbnail image for Google News / Search.
# No documented restrictions on resolution or size
'IMAGE_URL': TWITTER['IMAGE_URL']
}
NPR_DFP = {
'STORY_ID': '203618536',
'TARGET': 'News_NPR_News_Investigations',
'ENVIRONMENT': 'NPRTEST',
'TESTSERVER': 'true'
}
"""
SERVICES
"""
GOOGLE_ANALYTICS_ID = 'UA-5828686-4'
|
18F/quotable
|
app_config.py
|
Python
|
mit
| 1,915 |
class AddIndexToSkillTotals < ActiveRecord::Migration
def change
add_index :skill_totals, :name
add_index :skill_totals, :date
end
end
|
thatguyandy27/SkillsCompiler
|
skills_app/db/migrate/20140316173555_add_index_to_skill_totals.rb
|
Ruby
|
mit
| 147 |
require_relative '../../../spec_helper'
require 'matrix'
describe "Vector#normalize" do
it "returns a normalized copy of the vector" do
x = 0.2672612419124244
Vector[1, 2, 3].normalize.should == Vector[x, x * 2, x * 3]
end
it "raises an error for zero vectors" do
lambda {
Vector[].normalize
}.should raise_error(Vector::ZeroVectorError)
lambda {
Vector[0, 0, 0].normalize
}.should raise_error(Vector::ZeroVectorError)
end
end
|
ruby/rubyspec
|
library/matrix/vector/normalize_spec.rb
|
Ruby
|
mit
| 473 |
'use strict';
/**
* Module dependencies.
*/
var users = require('../../app/controllers/users'),
goaliedash = require('../../app/controllers/goaliedash');
module.exports = function(app) {
app.route('/goaliedash')
.get(users.requiresLogin, users.hasAuthorization);
};
|
thcmc/412hockey
|
app/routes/goaliedash.server.routes.js
|
JavaScript
|
mit
| 275 |
// @flow
import React from 'react'
import withPropsStream from '@vega/utils/withPropsStream'
import {map} from 'rxjs/operators'
import styles from './styles/Communicator.css'
import ThreadList from './ThreadList'
import CreateComment from './CreateComment'
function getPropsStream(props$) {
// todo: implement open/close behavior
return props$.pipe(map(props => ({...props, isOpen: true})))
}
type Props = {
isOpen: boolean,
subjectIds: string[],
focusedCommentId: string
}
export default withPropsStream(
getPropsStream,
class Communicator extends React.Component<Props> {
state = {
createCommentIsSticky: false
}
handleCloseCreateComment = event => {
this.setState({
createCommentIsSticky: false
})
event.stopPropagation()
}
handleStickCreateComment = () => {
this.setState({
createCommentIsSticky: true
})
}
render() {
const {isOpen, subjectIds, focusedCommentId} = this.props
const {createCommentIsSticky} = this.state
return isOpen ? (
<div className={styles.root}>
<div
className={
createCommentIsSticky
? styles.feedWithWithStickyCreateComment
: styles.feed
}
>
<ThreadList
subjectId={subjectIds}
focusedCommentId={focusedCommentId}
/>
</div>
{subjectIds.length === 1 && (
<CreateComment
subjectId={subjectIds[0]}
showCloseButton={createCommentIsSticky}
className={
createCommentIsSticky
? styles.createCommentSticky
: styles.createComment
}
onClose={this.handleCloseCreateComment}
onSubmit={this.handleCloseCreateComment}
onClick={this.handleStickCreateComment}
/>
)}
</div>
) : null
}
}
)
|
VegaPublish/vega-studio
|
packages/@vega/communicator-system/src/components/providers/Communicator.js
|
JavaScript
|
mit
| 1,991 |
/*
* Copyright (c) 2014-2022 The Voxie Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include <VoxieBackend/VoxieBackend.hpp>
#include <VoxieBackend/Data/Data.hpp>
#include <VoxieClient/ObjectExport/Client.hpp>
#include <VoxieClient/ObjectExport/ExportedObject.hpp>
#include <QtCore/QPointer>
#include <QtDBus/QDBusAbstractAdaptor>
#include <QtDBus/QDBusObjectPath>
namespace vx {
class NodePrototype;
namespace io {
class Operation;
class RunFilterOperation;
} // namespace io
class Exception;
class ExternalOperationAdaptorImpl;
class VOXIEBACKEND_EXPORT ExternalOperation : public vx::RefCountedObject {
Q_OBJECT
REFCOUNTEDOBJ_DECL(ExternalOperation)
friend class ExternalOperationAdaptorImpl;
QPointer<vx::Client> client = nullptr;
QSharedPointer<vx::io::Operation> operation_;
protected:
// TODO: Get rid of this / replace it by operation()->isFinished()?
bool isFinished = false;
void checkClient();
virtual void cleanup();
public:
explicit ExternalOperation(
const QSharedPointer<vx::io::Operation>& operation);
~ExternalOperation() override;
QWeakPointer<QSharedPointer<ExternalOperation>> initialReference;
virtual QString action() = 0;
virtual QString name() = 0;
const QSharedPointer<vx::io::Operation>& operation() const {
return operation_;
}
bool isClaimed();
Q_SIGNALS:
void error(const vx::Exception& error);
// Emitted when the operation is claimed
void claimed();
};
// TODO: Should probably be moved to ExtensionImporter / ExtensionExporter
class ExternalOperationImportAdaptorImpl;
class VOXIEBACKEND_EXPORT ExternalOperationImport : public ExternalOperation {
Q_OBJECT
REFCOUNTEDOBJ_DECL(ExternalOperationImport)
friend class ExternalOperationImportAdaptorImpl;
QString filename_;
QMap<QString, QDBusVariant> properties_;
QString name_;
public:
explicit ExternalOperationImport(
const QSharedPointer<vx::io::Operation>& operation,
const QString& filename, const QMap<QString, QDBusVariant>& properties,
const QString& name);
~ExternalOperationImport() override;
QString action() override;
QString name() override;
const QString& filename() { return filename_; }
const QMap<QString, QDBusVariant>& properties() { return properties_; }
Q_SIGNALS:
void finished(const QSharedPointer<vx::Data>& data);
};
class ExternalOperationExportAdapterImpl;
class VOXIEBACKEND_EXPORT ExternalOperationExport : public ExternalOperation {
Q_OBJECT
REFCOUNTEDOBJ_DECL(ExternalOperationExport)
friend class ExternalOperationExportAdaptorImpl;
QString filename_;
QString name_;
QSharedPointer<vx::Data> data_;
public:
explicit ExternalOperationExport(
const QSharedPointer<vx::io::Operation>& operation,
const QString& filename, const QString& name,
const QSharedPointer<vx::Data>& data);
~ExternalOperationExport() override;
QString action() override;
QString name() override;
// TODO: data
const QString& filename() { return filename_; }
const QSharedPointer<vx::Data>& data() { return data_; }
Q_SIGNALS:
void finished();
};
} // namespace vx
|
voxie-viewer/voxie
|
src/VoxieBackend/Component/ExternalOperation.hpp
|
C++
|
mit
| 4,192 |
var models = require('../models');
var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
console.log(req.session);
res.render('layout');
});
module.exports = router;
|
NUSPartTime/NUSPartTime
|
routes/index.js
|
JavaScript
|
mit
| 248 |
import os
import logging
from django.core.management.base import BaseCommand
from django.core.mail import send_mail
from django.template.loader import get_template
from workshops.models import Badge, Person, Role
logger = logging.getLogger()
class Command(BaseCommand):
help = 'Report instructors activity.'
def add_arguments(self, parser):
parser.add_argument(
'--send-out-for-real', action='store_true', default=False,
help='Send information to the instructors.',
)
parser.add_argument(
'--no-may-contact-only', action='store_true', default=False,
help='Include instructors not willing to be contacted.',
)
parser.add_argument(
'--django-mailing', action='store_true', default=False,
help='Use Django mailing system. This requires some environmental '
'variables to be set, see `settings.py`.',
)
parser.add_argument(
'-s', '--sender', action='store',
default='workshops@carpentries.org',
help='E-mail used in "from:" field.',
)
def foreign_tasks(self, tasks, person, roles):
"""List of other instructors' tasks, per event."""
return [
task.event.task_set.filter(role__in=roles)
.exclude(person=person)
.select_related('person')
for task in tasks
]
def fetch_activity(self, may_contact_only=True):
roles = Role.objects.filter(name__in=['instructor', 'helper'])
instructor_badges = Badge.objects.instructor_badges()
instructors = Person.objects.filter(badges__in=instructor_badges)
instructors = instructors.exclude(email__isnull=True)
if may_contact_only:
instructors = instructors.exclude(may_contact=False)
# let's get some things faster
instructors = instructors.select_related('airport') \
.prefetch_related('task_set', 'lessons',
'award_set', 'badges')
# don't repeat the records
instructors = instructors.distinct()
result = []
for person in instructors:
tasks = person.task_set.filter(role__in=roles) \
.select_related('event', 'role')
record = {
'person': person,
'lessons': person.lessons.all(),
'instructor_awards': person.award_set.filter(
badge__in=person.badges.instructor_badges()
),
'tasks': zip(tasks,
self.foreign_tasks(tasks, person, roles)),
}
result.append(record)
return result
def make_message(self, record):
tmplt = get_template('mailing/instructor_activity.txt')
return tmplt.render(context=record)
def subject(self, record):
# in future we can vary the subject depending on the record details
return 'Updating your Software Carpentry information'
def recipient(self, record):
return record['person'].email
def send_message(self, subject, message, sender, recipient, for_real=False,
django_mailing=False):
if for_real:
if django_mailing:
send_mail(subject, message, sender, [recipient])
else:
command = 'mail -s "{subject}" -r {sender} {recipient}'.format(
subject=subject,
sender=sender,
recipient=recipient,
)
writer = os.popen(command, 'w')
writer.write(message)
writer.close()
if self.verbosity >= 2:
# write only a header
self.stdout.write('-' * 40 + '\n')
self.stdout.write('To: {}\n'.format(recipient))
self.stdout.write('Subject: {}\n'.format(subject))
self.stdout.write('From: {}\n'.format(sender))
if self.verbosity >= 3:
# write whole message out
self.stdout.write(message + '\n')
def handle(self, *args, **options):
# default is dummy run - only actually send mail if told to
send_for_real = options['send_out_for_real']
# by default include only instructors who have `may_contact==True`
no_may_contact_only = options['no_may_contact_only']
# use mailing options from settings.py or the `mail` system command?
django_mailing = options['django_mailing']
# verbosity option is added by Django
self.verbosity = int(options['verbosity'])
sender = options['sender']
results = self.fetch_activity(not no_may_contact_only)
for result in results:
message = self.make_message(result)
subject = self.subject(result)
recipient = self.recipient(result)
self.send_message(subject, message, sender, recipient,
for_real=send_for_real,
django_mailing=django_mailing)
if self.verbosity >= 1:
self.stdout.write('Sent {} emails.\n'.format(len(results)))
|
swcarpentry/amy
|
amy/workshops/management/commands/instructors_activity.py
|
Python
|
mit
| 5,305 |
<?php
/*
* This file is part of the Phuri package.
*
* Copyright © 2014 Erin Millard
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Eloquent\Phuri\Generic;
use Eloquent\Pathogen\Factory\PathFactoryInterface;
use Eloquent\Phuri\Generic\Parser\GenericUriComponentsInterface;
use Eloquent\Phuri\Normalization\UriNormalizerInterface;
use Eloquent\Phuri\Parameters\Factory\UriParametersFactoryInterface;
use Eloquent\Phuri\Path\Factory\UriPathFactory;
use Eloquent\Phuri\UriInterface;
use Eloquent\Phuri\Validation\Exception\InvalidUriComponentExceptionInterface;
/**
* An abstract base class for implementing generic URIs.
*/
abstract class AbstractGenericUri implements GenericUriInterface
{
/**
* Construct a new generic URI.
*
* @param GenericUriComponentsInterface $components The URI components.
*
* @throws InvalidUriComponentExceptionInterface If any of the components are invalid.
*/
public function __construct(GenericUriComponentsInterface $components)
{
$this->username = $components->username();
$this->password = $components->password();
$this->host = $components->host();
$this->port = $components->port();
$this->path = $components->path();
$this->fragment = $components->fragment();
if (null === $components->queryParameters()) {
$this->hasQueryDelimiter = false;
$this->queryParameters = static::queryParametersFactory()
->createEmpty();
} else {
$this->hasQueryDelimiter = true;
$this->queryParameters = static::queryParametersFactory()
->createFromEncodedPairs($components->queryParameters());
}
}
// Implementation of GenericUriInterface ===================================
/**
* Returns true if this URI has a username.
*
* This method will return false for URIs with empty string usernames.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return boolean True if this URI has a username.
*/
public function hasUsername()
{
return null !== $this->encodedUsername() &&
'' !== $this->encodedUsername();
}
/**
* Get the username.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The username, or null if there is no username.
*/
public function username()
{
if (null === $this->encodedUsername()) {
return null;
}
return static::encoder()->decode($this->encodedUsername());
}
/**
* Get the encoded username.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The encoded username, or null if there is no username.
*/
public function encodedUsername()
{
return $this->username;
}
/**
* Returns true if this URI has a password.
*
* This method will return false for URIs with empty string passwords.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return boolean True if this URI has a password.
*/
public function hasPassword()
{
return null !== $this->encodedPassword() &&
'' !== $this->encodedPassword();
}
/**
* Get the password.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The password, or null if there is no password.
*/
public function password()
{
if (null === $this->encodedPassword()) {
return null;
}
return static::encoder()->decode($this->encodedPassword());
}
/**
* Get the encoded password.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The encoded password, or null if there is no password.
*/
public function encodedPassword()
{
return $this->password;
}
/**
* Get the encoded host.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.2
*
* @return string|null The encoded host, or null if there is no host.
*/
public function encodedHost()
{
return $this->host;
}
/**
* Returns true if this URI has a port.
*
* This method will return false for URIs with empty string ports.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.3
*
* @return boolean True if this URI has a port.
*/
public function hasPort()
{
return null !== $this->encodedPort() && '' !== $this->encodedPort();
}
/**
* Get the port.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.3
*
* @return integer|null The port, or null if there is no port, or the port is an empty string.
*/
public function port()
{
if ($this->hasPort()) {
return intval($this->encodedPort());
}
return null;
}
/**
* Get the encoded port.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.3
*
* @return string|null The encoded port, or null if there is no port.
*/
public function encodedPort()
{
return $this->port;
}
/**
* Returns true if this URI has a path.
*
* This method will return false for URIs with empty string paths.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return boolean True if this URI has a path.
*/
public function hasPath()
{
return '' !== $this->path();
}
/**
* Get the path.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return string The path.
*/
public function path()
{
return static::encoder()->decode($this->encodedPath());
}
/**
* Get the encoded path.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return string The encoded path.
*/
public function encodedPath()
{
return $this->path;
}
/**
* Get the path as a path object.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return UriPathInterface The path.
*/
public function pathObject()
{
return static::pathFactory()->create($this->path());
}
/**
* Returns true if this URI has a query.
*
* This method will return false for URIs with empty string queries.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return boolean True if this URI has a query.
*/
public function hasQuery()
{
return !$this->queryParameters()->isEmpty();
}
/**
* Get the query.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return string|null The query, or null if there is no query.
*/
public function query()
{
if ($this->hasQueryDelimiter()) {
return static::encoder()->decode($this->encodedQuery());
}
return null;
}
/**
* Get the encoded query.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return string|null The encoded query, or null if there is no query.
*/
public function encodedQuery()
{
if ($this->hasQueryDelimiter()) {
return $this->queryParameters()->string();
}
return null;
}
/**
* Get the query parameters.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return UriParametersInterface The query parameters.
*/
public function queryParameters()
{
return $this->queryParameters;
}
/**
* Returns true if this URI has a fragment.
*
* This method will return false for URIs with empty string fragments.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return boolean True if this URI has a fragment.
*/
public function hasFragment()
{
return null !== $this->encodedFragment() &&
'' !== $this->encodedFragment();
}
/**
* Get the fragment.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return string|null The fragment, or null if there is no fragment.
*/
public function fragment()
{
if (null === $this->encodedFragment()) {
return null;
}
return static::encoder()->decode($this->encodedFragment());
}
/**
* Get the encoded fragment.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return string|null The encoded fragment, or null if there is no fragment.
*/
public function encodedFragment()
{
return $this->fragment;
}
/**
* Get the fragment parameters.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return UriParametersInterface The fragment parameters.
*/
public function fragmentParameters()
{
if (null === $this->encodedFragment()) {
return static::queryParametersFactory()->createEmpty();
}
return static::queryParametersFactory()
->createFromString($this->encodedFragment());
}
// Implementation of UriInterface ==========================================
/**
* Return a normalized version of this URI.
*
* @return UriInterface A normalized version of this URI.
*/
public function normalize()
{
return static::normalizer()->normalize($this);
}
/**
* Get a string representation of this URI.
*
* @return string A string representation of this URI.
*/
public function __toString()
{
return $this->string();
}
// Implementation details ==================================================
/**
* Returns true if this URI has a query delimiter.
*
* @return boolean True if this URI has a query delimiter.
*/
public function hasQueryDelimiter()
{
return $this->hasQueryDelimiter;
}
/**
* Get the most appropriate factory for this type of URI.
*
* @return Factory\GenericUriFactoryInterface The factory.
*/
protected static function factory()
{
return Factory\GenericUriFactory::instance();
}
/**
* Get the most appropriate path factory for this type of URI.
*
* @return PathFactoryInterface The factory.
*/
protected static function pathFactory()
{
return UriPathFactory::instance();
}
/**
* Get the most appropriate query parameters factory for this type of URI.
*
* @return UriParametersFactoryInterface The factory.
*/
protected static function queryParametersFactory()
{
return Factory\GenericUriQueryParametersFactory::instance();
}
/**
* Get the most appropriate validator for this type of URI.
*
* @return Validation\GenericUriValidatorInterface The validator.
*/
protected static function validator()
{
return Validation\GenericUriValidator::instance();
}
/**
* Get the most appropriate encoder for this type of URI.
*
* @return Encoding\GenericUriEncoderInterface The encoder.
*/
protected static function encoder()
{
return Encoding\GenericUriEncoder::instance();
}
/**
* Get the most appropriate normalizer for this type of URI.
*
* @return UriNormalizerInterface The normalizer.
*/
protected static function normalizer()
{
return Normalization\GenericUriNormalizer::instance();
}
private $username;
private $password;
private $host;
private $port;
private $path;
private $hasQueryDelimiter;
private $queryParameters;
private $fragment;
}
|
ezzatron/phuri
|
src/Generic/AbstractGenericUri.php
|
PHP
|
mit
| 12,051 |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.data.manipulator.block;
import static org.spongepowered.api.data.DataQuery.of;
import org.spongepowered.api.data.DataContainer;
import org.spongepowered.api.data.DataQuery;
import org.spongepowered.api.data.MemoryDataContainer;
import org.spongepowered.api.data.manipulator.block.SignaledOutputData;
import org.spongepowered.common.data.manipulator.AbstractIntData;
public class SpongeSignaledOutputData extends AbstractIntData<SignaledOutputData> implements SignaledOutputData {
public static final DataQuery OUTPUT_SIGNAL_STRENGTH = of("OutputSignalStrength");
public SpongeSignaledOutputData() {
super(SignaledOutputData.class, 0, 0, 15);
}
@Override
public int getOutputSignal() {
return this.getValue();
}
@Override
public SignaledOutputData setOutputSignal(int signal) {
return this.setValue(signal);
}
@Override
public SignaledOutputData copy() {
return new SpongeSignaledOutputData().setValue(this.getValue());
}
@Override
public DataContainer toContainer() {
return new MemoryDataContainer().set(OUTPUT_SIGNAL_STRENGTH, this.getValue());
}
}
|
gabizou/SpongeCommon
|
src/main/java/org/spongepowered/common/data/manipulator/block/SpongeSignaledOutputData.java
|
Java
|
mit
| 2,441 |
// Copyright (c) 2013 Raphael Estrada
// License: The MIT License - see "LICENSE" file for details
// Author URL: http://www.galaktor.net
// Author E-Mail: galaktor@gmx.de
using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("AutofacExtensions")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Raphael Estrada")]
[assembly: AssemblyProduct("AutofacExtensions")]
[assembly: AssemblyCopyright("Copyright (c) Raphael Estrada 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("31dee9f1-b44b-4a04-89cf-d17ea82953ef")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("0.0.0.0")]
[assembly: AssemblyVersion("0.0.0.0")]
[assembly: AssemblyFileVersion("0.0.0.0")]
|
galaktor/autofac-extensions
|
Properties/AssemblyInfo.cs
|
C#
|
mit
| 1,633 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_delete_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_request(
scope: str,
policy_assignment_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_group_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, 'str', skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_by_id_request(
policy_assignment_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class PolicyAssignmentsOperations(object):
"""PolicyAssignmentsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.policy.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def delete(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> Optional["_models.PolicyAssignment"]:
"""Deletes a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to delete.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PolicyAssignment"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def create(
self,
scope: str,
policy_assignment_name: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
content_type=content_type,
json=_json,
template_url=self.create.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def get(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to get.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def list_for_resource_group(
self,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for the resource group.
:param resource_group_name: The name of the resource group that contains policy assignments.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list_for_resource(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for a resource.
:param resource_group_name: The name of the resource group containing the resource. The name is
case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource path.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource with policy assignments.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list(
self,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets all the policy assignments for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def delete_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Deletes a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to delete. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.delete_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def create_by_id(
self,
policy_assignment_id: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment by ID.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group. When providing a scope
for the assignment, use '/subscriptions/{subscription-id}/' for subscriptions,
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for resource groups,
and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to create. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_by_id_request(
policy_assignment_id=policy_assignment_id,
content_type=content_type,
json=_json,
template_url=self.create_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def get_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to get. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.get_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2016_12_01/operations/_policy_assignments_operations.py
|
Python
|
mit
| 38,695 |
class CreateDocuments < ActiveRecord::Migration[5.0]
def change
create_table :documents do |t|
t.integer :product_id, null: false
t.string :type, null: false
t.string :url, null: false
t.timestamps
end
end
end
|
unasuke/proconist.net
|
db/migrate/20160610084904_create_documents.rb
|
Ruby
|
mit
| 265 |
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 25 16:20:12 2015
@author: Balázs Hidasi
@lastmodified: Loreto Parisi (loretoparisi at gmail dot com)
"""
import sys
import os
import numpy as np
import pandas as pd
import datetime as dt
# To redirect output to file
class Logger(object):
def __init__(self, filename="Default.log"):
self.terminal = sys.stdout
self.log = open(filename, "a")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
def flush(self):
pass
sys.stdout = Logger( os.environ['HOME' ] + '/theano.log' )
PATH_TO_ORIGINAL_DATA = os.environ['HOME'] + '/'
PATH_TO_PROCESSED_DATA = os.environ['HOME'] + '/'
data = pd.read_csv(PATH_TO_ORIGINAL_DATA + 'yoochoose-clicks.dat', sep=',', header=None, usecols=[0,1,2], dtype={0:np.int32, 1:str, 2:np.int64})
data.columns = ['SessionId', 'TimeStr', 'ItemId']
data['Time'] = data.TimeStr.apply(lambda x: dt.datetime.strptime(x, '%Y-%m-%dT%H:%M:%S.%fZ').timestamp()) #This is not UTC. It does not really matter.
del(data['TimeStr'])
session_lengths = data.groupby('SessionId').size()
data = data[np.in1d(data.SessionId, session_lengths[session_lengths>1].index)]
item_supports = data.groupby('ItemId').size()
data = data[np.in1d(data.ItemId, item_supports[item_supports>=5].index)]
session_lengths = data.groupby('SessionId').size()
data = data[np.in1d(data.SessionId, session_lengths[session_lengths>=2].index)]
tmax = data.Time.max()
session_max_times = data.groupby('SessionId').Time.max()
session_train = session_max_times[session_max_times < tmax-86400].index
session_test = session_max_times[session_max_times >= tmax-86400].index
train = data[np.in1d(data.SessionId, session_train)]
test = data[np.in1d(data.SessionId, session_test)]
test = test[np.in1d(test.ItemId, train.ItemId)]
tslength = test.groupby('SessionId').size()
test = test[np.in1d(test.SessionId, tslength[tslength>=2].index)]
print('Full train set\n\tEvents: {}\n\tSessions: {}\n\tItems: {}'.format(len(train), train.SessionId.nunique(), train.ItemId.nunique()))
train.to_csv(PATH_TO_PROCESSED_DATA + 'rsc15_train_full.txt', sep='\t', index=False)
print('Test set\n\tEvents: {}\n\tSessions: {}\n\tItems: {}'.format(len(test), test.SessionId.nunique(), test.ItemId.nunique()))
test.to_csv(PATH_TO_PROCESSED_DATA + 'rsc15_test.txt', sep='\t', index=False)
tmax = train.Time.max()
session_max_times = train.groupby('SessionId').Time.max()
session_train = session_max_times[session_max_times < tmax-86400].index
session_valid = session_max_times[session_max_times >= tmax-86400].index
train_tr = train[np.in1d(train.SessionId, session_train)]
valid = train[np.in1d(train.SessionId, session_valid)]
valid = valid[np.in1d(valid.ItemId, train_tr.ItemId)]
tslength = valid.groupby('SessionId').size()
valid = valid[np.in1d(valid.SessionId, tslength[tslength>=2].index)]
print('Train set\n\tEvents: {}\n\tSessions: {}\n\tItems: {}'.format(len(train_tr), train_tr.SessionId.nunique(), train_tr.ItemId.nunique()))
train_tr.to_csv(PATH_TO_PROCESSED_DATA + 'rsc15_train_tr.txt', sep='\t', index=False)
print('Validation set\n\tEvents: {}\n\tSessions: {}\n\tItems: {}'.format(len(valid), valid.SessionId.nunique(), valid.ItemId.nunique()))
valid.to_csv(PATH_TO_PROCESSED_DATA + 'rsc15_train_valid.txt', sep='\t', index=False)
|
loretoparisi/docker
|
theano/rsc15/preprocess.py
|
Python
|
mit
| 3,325 |
<?php
namespace PSR2R\Sniffs\Commenting;
use PHP_CodeSniffer\Files\File;
use PHP_CodeSniffer\Util\Tokens;
use PSR2R\Tools\AbstractSniff;
use PSR2R\Tools\Traits\CommentingTrait;
use PSR2R\Tools\Traits\SignatureTrait;
/**
* Methods always need doc blocks.
* Constructor and destructor may not have one if they do not have arguments.
*/
class DocBlockSniff extends AbstractSniff {
use CommentingTrait;
use SignatureTrait;
/**
* @inheritDoc
*/
public function register(): array {
return [T_FUNCTION];
}
/**
* @inheritDoc
*/
public function process(File $phpcsFile, $stackPtr): void {
$tokens = $phpcsFile->getTokens();
$nextIndex = $phpcsFile->findNext(Tokens::$emptyTokens, $stackPtr + 1, null, true);
if ($nextIndex === false) {
return;
}
if ($tokens[$nextIndex]['content'] === '__construct' || $tokens[$nextIndex]['content'] === '__destruct') {
$this->checkConstructorAndDestructor($phpcsFile, $stackPtr);
return;
}
// Don't mess with closures
$prevIndex = $phpcsFile->findPrevious(Tokens::$emptyTokens, $stackPtr - 1, null, true);
if (!$this->isGivenKind(Tokens::$methodPrefixes, $tokens[$prevIndex])) {
return;
}
$docBlockEndIndex = $this->findRelatedDocBlock($phpcsFile, $stackPtr);
if ($docBlockEndIndex) {
return;
}
// We only look for void methods right now
$returnType = $this->detectReturnTypeVoid($phpcsFile, $stackPtr);
if ($returnType === null) {
$phpcsFile->addError('Method does not have a doc block: ' . $tokens[$nextIndex]['content'] . '()', $nextIndex, 'DocBlockMissing');
return;
}
$fix = $phpcsFile->addFixableError('Method does not have a docblock with return void statement: ' . $tokens[$nextIndex]['content'], $nextIndex, 'ReturnVoidMissing');
if (!$fix) {
return;
}
$this->addDocBlock($phpcsFile, $stackPtr, $returnType);
}
/**
* @param \PHP_CodeSniffer\Files\File $phpcsFile
* @param int $index
* @param string $returnType
*
* @return void
*/
protected function addDocBlock(File $phpcsFile, int $index, string $returnType): void {
$tokens = $phpcsFile->getTokens();
$firstTokenOfLine = $this->getFirstTokenOfLine($tokens, $index);
$prevContentIndex = $phpcsFile->findPrevious(T_WHITESPACE, $firstTokenOfLine - 1, null, true);
if ($prevContentIndex === false) {
return;
}
if ($tokens[$prevContentIndex]['type'] === 'T_ATTRIBUTE_END') {
$firstTokenOfLine = $this->getFirstTokenOfLine($tokens, $prevContentIndex);
}
$indentation = $this->getIndentationWhitespace($phpcsFile, $index);
$phpcsFile->fixer->beginChangeset();
$phpcsFile->fixer->addNewlineBefore($firstTokenOfLine);
$phpcsFile->fixer->addContentBefore($firstTokenOfLine, $indentation . ' */');
$phpcsFile->fixer->addNewlineBefore($firstTokenOfLine);
$phpcsFile->fixer->addContentBefore($firstTokenOfLine, $indentation . ' * @return ' . $returnType);
$phpcsFile->fixer->addNewlineBefore($firstTokenOfLine);
$phpcsFile->fixer->addContentBefore($firstTokenOfLine, $indentation . '/**');
$phpcsFile->fixer->endChangeset();
}
/**
* @param \PHP_CodeSniffer\Files\File $phpcsFile
* @param int $stackPtr
*
* @return void
*/
protected function checkConstructorAndDestructor(File $phpcsFile, int $stackPtr): void {
$docBlockEndIndex = $this->findRelatedDocBlock($phpcsFile, $stackPtr);
if ($docBlockEndIndex) {
return;
}
$methodSignature = $this->getMethodSignature($phpcsFile, $stackPtr);
$arguments = count($methodSignature);
if (!$arguments) {
return;
}
$phpcsFile->addError('Missing doc block for method', $stackPtr, 'ConstructDesctructMissingDocBlock');
}
/**
* @param \PHP_CodeSniffer\Files\File $phpcsFile
* @param int $docBlockStartIndex
* @param int $docBlockEndIndex
*
* @return int|null
*/
protected function findDocBlockReturn(File $phpcsFile, int $docBlockStartIndex, int $docBlockEndIndex): ?int {
$tokens = $phpcsFile->getTokens();
for ($i = $docBlockStartIndex + 1; $i < $docBlockEndIndex; $i++) {
if (!$this->isGivenKind(T_DOC_COMMENT_TAG, $tokens[$i])) {
continue;
}
if ($tokens[$i]['content'] !== '@return') {
continue;
}
return $i;
}
return null;
}
/**
* For right now we only try to detect void.
*
* @param \PHP_CodeSniffer\Files\File $phpcsFile
* @param int $index
*
* @return string|null
*/
protected function detectReturnTypeVoid(File $phpcsFile, int $index): ?string {
$tokens = $phpcsFile->getTokens();
$type = 'void';
if (empty($tokens[$index]['scope_opener'])) {
return null;
}
$methodStartIndex = $tokens[$index]['scope_opener'];
$methodEndIndex = $tokens[$index]['scope_closer'];
for ($i = $methodStartIndex + 1; $i < $methodEndIndex; ++$i) {
if ($this->isGivenKind([T_FUNCTION, T_CLOSURE], $tokens[$i])) {
$endIndex = $tokens[$i]['scope_closer'];
if (!empty($tokens[$i]['nested_parenthesis'])) {
$endIndex = array_pop($tokens[$i]['nested_parenthesis']);
}
$i = $endIndex;
continue;
}
if (!$this->isGivenKind([T_RETURN], $tokens[$i])) {
continue;
}
$nextIndex = $phpcsFile->findNext(Tokens::$emptyTokens, $i + 1, null, true);
if (!$this->isGivenKind(T_SEMICOLON, $tokens[$nextIndex])) {
return null;
}
}
return $type;
}
}
|
php-fig-rectified/psr2r-sniffer
|
PSR2R/Sniffs/Commenting/DocBlockSniff.php
|
PHP
|
mit
| 5,275 |
(function() {
'use strict';
process.env.debug_sql = true;
var Class = require('ee-class')
, log = require('ee-log')
, assert = require('assert')
, fs = require('fs')
, QueryContext = require('related-query-context')
, ORM = require('related');
var TimeStamps = require('../')
, sqlStatments
, extension
, orm
, db;
// sql for test db
sqlStatments = fs.readFileSync(__dirname+'/db.postgres.sql').toString().split(';').map(function(input){
return input.trim().replace(/\n/gi, ' ').replace(/\s{2,}/g, ' ')
}).filter(function(item){
return item.length;
});
describe('Travis', function(){
it('should have set up the test db', function(done){
var config;
try {
config = require('../config.js').db
} catch(e) {
config = [{
type: 'postgres'
, schema: 'related_timestamps_test'
, database : 'test'
, hosts: [{}]
}];
}
this.timeout(5000);
orm = new ORM(config);
done();
});
it('should be able to drop & create the testing schema ('+sqlStatments.length+' raw SQL queries)', function(done){
orm.getDatabase('related_timestamps_test').getConnection('write').then((connection) => {
return new Promise((resolve, reject) => {
let exec = (index) => {
if (sqlStatments[index]) {
connection.query(new QueryContext({sql:sqlStatments[index]})).then(() => {
exec(index + 1);
}).catch(reject);
}
else resolve();
}
exec(0);
});
}).then(() => {
done();
}).catch(done);
});
});
var expect = function(val, cb){
return function(err, result){
try {
assert.equal(JSON.stringify(result), val);
} catch (err) {
return cb(err);
}
cb();
}
};
describe('The TimeStamps Extension', function() {
var oldDate;
it('should not crash when instatiated', function() {
db = orm.related_timestamps_test;
extension = new TimeStamps();
});
it('should not crash when injected into the orm', function(done) {
orm.use(extension);
orm.load(done);
});
it('should set correct timestamps when inserting a new record', function(done) {
db = orm.related_timestamps_test;
new db.event().save(function(err, evt) {
if (err) done(err);
else {
assert.notEqual(evt.created, null);
assert.notEqual(evt.updated, null);
assert.equal(evt.deleted, null);
oldDate = evt.updated;
done();
}
});
});
it('should set correct timestamps when updating a record', function(done) {
// wait, we nede a new timestamp
setTimeout(function() {
db.event({id:1}, ['*']).findOne(function(err, evt) {
if (err) done(err);
else {
evt.name = 'func with timestamps? no, that ain\'t fun!';
evt.save(function(err){
assert.notEqual(evt.created, null);
assert.notEqual(evt.updated, null);
assert.notEqual(evt.updated.toUTCString(), oldDate.toUTCString());
assert.equal(evt.deleted, null);
done();
});
}
});
}, 1500);
});
it('should set correct timestamps when deleting a record', function(done) {
db.event({id:1}, ['*']).findOne(function(err, evt) {
if (err) done(err);
else {
evt.delete(function(err) {
assert.notEqual(evt.created, null);
assert.notEqual(evt.updated, null);
assert.notEqual(evt.deleted, null);
done();
});
}
});
});
it('should not return soft deleted records when not requested', function(done) {
db.event({id:1}, ['*']).findOne(function(err, evt) {
if (err) done(err);
else {
assert.equal(evt, undefined);
done();
}
});
});
it('should return soft deleted records when requested', function(done) {
db.event({id:1}, ['*']).includeSoftDeleted().findOne(function(err, evt) {
if (err) done(err);
else {
assert.equal(evt.id, 1);
done();
}
});
});
it('should hard delete records when requested', function(done) {
db.event({id:1}, ['*']).includeSoftDeleted().findOne(function(err, evt) {
if (err) done(err);
else {
evt.hardDelete(function(err) {
if (err) done(err);
else {
db.event({id:1}, ['*']).findOne(function(err, evt) {
if (err) done(err);
else {
assert.equal(evt, undefined);
done();
}
});
}
});
}
});
});
it('should not load softdeleted references', function(done) {
new db.event({
name: 'so what'
, eventInstance: [new db.eventInstance({startdate: new Date(), deleted: new Date()})]
}).save(function(err, evt) {
if (err) done(err);
else {
db.event(['*'], {id:evt.id}).fetchEventInstance(['*']).findOne(function(err, event) {
if (err) done(err);
else {
assert.equal(event.eventInstance.length, 0);
done();
}
});
}
});
})
it ('should work when using bulk deletes', function(done) {
new db.event({name: 'bulk delete 1'}).save().then(function() {
return new db.event({name: 'bulk delete 2'}).save()
}).then(function() {
return new db.event({name: 'bulk delete 3'}).save()
}).then(function() {
return db.event('id').find();
}).then(function(records) {
if (JSON.stringify(records) !== '[{"id":2},{"id":3},{"id":4},{"id":5}]') return Promise.reject(new Error('Expected «[{"id":2},{"id":3},{"id":4},{"id":5}]», got «'+JSON.stringify(records)+'»!'))
else return db.event({
id: ORM.gt(3)
}).delete();
}).then(function() {
return db.event('id').find();
}).then(function(emptyList) {
if (JSON.stringify(emptyList) !== '[{"id":2},{"id":3}]') return Promise.reject(new Error('Expected «[{"id":2},{"id":3}]», got «'+JSON.stringify(emptyList)+'»!'))
else return db.event('id').includeSoftDeleted().find();
}).then(function(list) {
if (JSON.stringify(list) !== '[{"id":2},{"id":3},{"id":4},{"id":5}]') return Promise.reject(new Error('Expected «[{"id":2},{"id":3},{"id":4},{"id":5}]», got «'+JSON.stringify(list)+'»!'))
done();
}).catch(done);
})
});
})();
|
eventEmitter/related-timestamps
|
test/extension.js
|
JavaScript
|
mit
| 8,381 |
#!/usr/bin/env ruby
require 'tasklist'
during '2010 September' do
on '2010-09-03' do
task 'Take out garbage'
task 'Wash car'
end
on '2010-09-02' do
task 'Create tasklist DSL', '09:15:56', '', 'admin', 'done'
task 'Push tasklist to github', '09:34:00', '09:38:04', 'github'
end
end
|
kevincolyar/tasklist
|
example.rb
|
Ruby
|
mit
| 310 |
var passport = require('passport');
var WebIDStrategy = require('passport-webid').Strategy;
var tokens = require('../../util/tokens');
var ids = require('../../util/id');
var console = require('../../log');
var createError = require('http-errors');
var dateUtils = require('../../util/date');
var url = require('url');
function loadStrategy(conf, entityStorageConf) {
var auth_type = "webid";
var db = require('../../db')(conf, entityStorageConf);
var enabled = conf.enabledStrategies.filter(function (v) {
return (v === auth_type);
});
if (enabled.length === 0) {
console.log('ignoring ' + auth_type + ' strategy for user authentication. Not enabled in the configuration');
return false;
} else {
try {
passport.use(auth_type, new WebIDStrategy(
function (webid, certificate, req, done) {
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
console.log("Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
var id = {
user_name: webid,
auth_type: auth_type
};
var oauth2ReturnToParsed = url.parse(req.session.returnTo, true).query;
console.log(" sesion in strategy " + auth_type + JSON.stringify(oauth2ReturnToParsed));
console.log(" client id from session in " + auth_type + JSON.stringify(oauth2ReturnToParsed.client_id));
console.log(" response_type for oauth2 in " + auth_type + JSON.stringify(oauth2ReturnToParsed.response_type));
var accessToken = tokens.uid(30);
var d = Date.parse(certificate.valid_to);
var default_exp = dateUtils.dateToEpochMilis(d);
db.users.findByUsernameAndAuthType(webid, auth_type, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
db.accessTokens.saveOauth2Token(accessToken, user.id, oauth2ReturnToParsed.client_id, "bearer", [conf.gateway_id], default_exp, null, oauth2ReturnToParsed.response_type, function (err) {
if (err) {
return done(err);
}
return done(null, user);
});
});
}
));
console.log('finished registering passport ' + auth_type + ' strategy');
return true;
} catch (e) {
console.log('FAIL TO register a strategy');
console.log('ERROR: error loading ' + auth_type + ' passport strategy: ' + e);
return false;
}
}
}
module.exports = loadStrategy;
|
Agile-IoT/agile-idm-web-ui
|
lib/auth/providers/webid.js
|
JavaScript
|
mit
| 3,465 |
module AwsHelpers
module ElasticLoadBalancing
class CreateTag
def initialize(elastic_load_balancing_client, load_balancer_name, tag_key, tag_value)
@elastic_load_balancing_client = elastic_load_balancing_client
@load_balancer_name = load_balancer_name
@tag_key = tag_key
@tag_value = tag_value
end
def execute
resp = @elastic_load_balancing_client.add_tags({load_balancer_names: [@load_balancer_name],
tags: [{key: @tag_key, value: @tag_value}]
})
end
end
end
end
|
MYOB-Technology/aws_helpers
|
lib/aws_helpers/elastic_load_balancing/create_tag.rb
|
Ruby
|
mit
| 652 |
export declare class Console {
private static quiet;
private static debug;
private static verbose;
static Log(text: any): void;
private static readonly Timestamp;
static Debug(text: any): void;
static Verbose(text: any): void;
static Error(text: any): void;
static Exit(reason: any): void;
}
|
APEEYEDOTCOM/hapi-bells
|
node_modules/autorest/console.d.ts
|
TypeScript
|
mit
| 328 |
<?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Bundle\CoreBundle\Command;
use Sylius\Component\Core\Model\AdminUserInterface;
use Sylius\Component\User\Repository\UserRepositoryInterface;
use Symfony\Component\Console\Helper\QuestionHelper;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Console\Question\Question;
use Symfony\Component\Console\Style\SymfonyStyle;
use Symfony\Component\Validator\Constraints\Email;
use Symfony\Component\Validator\Constraints\NotBlank;
use Symfony\Component\Validator\ConstraintViolationListInterface;
use Webmozart\Assert\Assert;
final class SetupCommand extends AbstractInstallCommand
{
/**
* {@inheritdoc}
*/
protected function configure(): void
{
$this
->setName('sylius:install:setup')
->setDescription('Sylius configuration setup.')
->setHelp(<<<EOT
The <info>%command.name%</info> command allows user to configure basic Sylius data.
EOT
)
;
}
/**
* {@inheritdoc}
*/
protected function execute(InputInterface $input, OutputInterface $output): void
{
$currency = $this->get('sylius.setup.currency')->setup($input, $output, $this->getHelper('question'));
$locale = $this->get('sylius.setup.locale')->setup($input, $output);
$this->get('sylius.setup.channel')->setup($locale, $currency);
$this->setupAdministratorUser($input, $output, $locale->getCode());
}
/**
* @param InputInterface $input
* @param OutputInterface $output
* @param string $localeCode
*/
protected function setupAdministratorUser(InputInterface $input, OutputInterface $output, string $localeCode): void
{
$outputStyle = new SymfonyStyle($input, $output);
$outputStyle->writeln('Create your administrator account.');
$userManager = $this->get('sylius.manager.admin_user');
$userFactory = $this->get('sylius.factory.admin_user');
try {
$user = $this->configureNewUser($userFactory->createNew(), $input, $output);
} catch (\InvalidArgumentException $exception) {
return;
}
$user->setEnabled(true);
$user->setLocaleCode($localeCode);
$userManager->persist($user);
$userManager->flush();
$outputStyle->writeln('<info>Administrator account successfully registered.</info>');
$outputStyle->newLine();
}
/**
* @param AdminUserInterface $user
* @param InputInterface $input
* @param OutputInterface $output
*
* @return AdminUserInterface
*/
private function configureNewUser(
AdminUserInterface $user,
InputInterface $input,
OutputInterface $output
): AdminUserInterface {
/** @var UserRepositoryInterface $userRepository */
$userRepository = $this->getAdminUserRepository();
if ($input->getOption('no-interaction')) {
Assert::null($userRepository->findOneByEmail('sylius@example.com'));
$user->setEmail('sylius@example.com');
$user->setUsername('sylius');
$user->setPlainPassword('sylius');
return $user;
}
$email = $this->getAdministratorEmail($input, $output);
$user->setEmail($email);
$user->setUsername($this->getAdministratorUsername($input, $output, $email));
$user->setPlainPassword($this->getAdministratorPassword($input, $output));
return $user;
}
/**
* @return Question
*/
private function createEmailQuestion(): Question
{
return (new Question('E-mail: '))
->setValidator(function ($value) {
/** @var ConstraintViolationListInterface $errors */
$errors = $this->get('validator')->validate((string) $value, [new Email(), new NotBlank()]);
foreach ($errors as $error) {
throw new \DomainException($error->getMessage());
}
return $value;
})
->setMaxAttempts(3)
;
}
/**
* @param InputInterface $input
* @param OutputInterface $output
*
* @return string
*/
private function getAdministratorEmail(InputInterface $input, OutputInterface $output): string
{
/** @var QuestionHelper $questionHelper */
$questionHelper = $this->getHelper('question');
/** @var UserRepositoryInterface $userRepository */
$userRepository = $this->getAdminUserRepository();
do {
$question = $this->createEmailQuestion();
$email = $questionHelper->ask($input, $output, $question);
$exists = null !== $userRepository->findOneByEmail($email);
if ($exists) {
$output->writeln('<error>E-Mail is already in use!</error>');
}
} while ($exists);
return $email;
}
/**
* @param InputInterface $input
* @param OutputInterface $output
* @param string $email
*
* @return string
*/
private function getAdministratorUsername(InputInterface $input, OutputInterface $output, string $email): string
{
/** @var QuestionHelper $questionHelper */
$questionHelper = $this->getHelper('question');
/** @var UserRepositoryInterface $userRepository */
$userRepository = $this->getAdminUserRepository();
do {
$question = new Question('Username (press enter to use email): ', $email);
$username = $questionHelper->ask($input, $output, $question);
$exists = null !== $userRepository->findOneBy(['username' => $username]);
if ($exists) {
$output->writeln('<error>Username is already in use!</error>');
}
} while ($exists);
return $username;
}
/**
* @param InputInterface $input
* @param OutputInterface $output
*
* @return mixed
*/
private function getAdministratorPassword(InputInterface $input, OutputInterface $output)
{
/** @var QuestionHelper $questionHelper */
$questionHelper = $this->getHelper('question');
$validator = $this->getPasswordQuestionValidator();
do {
$passwordQuestion = $this->createPasswordQuestion('Choose password:', $validator);
$confirmPasswordQuestion = $this->createPasswordQuestion('Confirm password:', $validator);
$password = $questionHelper->ask($input, $output, $passwordQuestion);
$repeatedPassword = $questionHelper->ask($input, $output, $confirmPasswordQuestion);
if ($repeatedPassword !== $password) {
$output->writeln('<error>Passwords do not match!</error>');
}
} while ($repeatedPassword !== $password);
return $password;
}
/**
* @return \Closure
*/
private function getPasswordQuestionValidator(): \Closure
{
return function ($value) {
/** @var ConstraintViolationListInterface $errors */
$errors = $this->get('validator')->validate($value, [new NotBlank()]);
foreach ($errors as $error) {
throw new \DomainException($error->getMessage());
}
return $value;
};
}
/**
* @param string $message
* @param \Closure $validator
*
* @return Question
*/
private function createPasswordQuestion(string $message, \Closure $validator): Question
{
return (new Question($message))
->setValidator($validator)
->setMaxAttempts(3)
->setHidden(true)
->setHiddenFallback(false)
;
}
/**
* @return UserRepositoryInterface
*/
private function getAdminUserRepository(): UserRepositoryInterface
{
return $this->get('sylius.repository.admin_user');
}
}
|
vihuvac/Sylius
|
src/Sylius/Bundle/CoreBundle/Command/SetupCommand.php
|
PHP
|
mit
| 8,226 |
import { Nibble, UInt4 } from '../types'
/**
* Returns a Nibble (0-15) which equals the given bits.
*
* @example
* byte.write([1,0,1,0]) => 10
*
* @param {Array} nibble 4-bit unsigned integer
* @return {Number}
*/
export default (nibble: Nibble): UInt4 => {
if (!Array.isArray(nibble) || nibble.length !== 4)
throw new RangeError('invalid array length')
let result: UInt4 = 0
for (let i: number = 0; i < 4; i++) if (nibble[3 - i]) result |= 1 << i
return <UInt4>result
}
|
dodekeract/bitwise
|
source/nibble/write.ts
|
TypeScript
|
mit
| 489 |
import { Injectable } from '@angular/core';
import { DataService } from '../../../_service/dataconnect';
import { Router } from '@angular/router';
@Injectable()
export class WarehouseViewService {
constructor(private _dataserver: DataService, private _router: Router) { }
getwarehouseTransfer(req: any) {
return this._dataserver.post("getwarehouseTransfer", req);
}
}
|
masagatech/erpv1
|
src/app/_service/warehousestock/view/view-service.ts
|
TypeScript
|
mit
| 390 |
using System;
using Newtonsoft.Json;
namespace MultiSafepay.Model
{
public class Transaction
{
[JsonProperty("transaction_id")]
public string TransactionId { get; set; }
[JsonProperty("payment_type")]
public string PaymentType { get; set; }
[JsonProperty("order_id")]
public string OrderId { get; set; }
[JsonProperty("status")]
public string TransactionStatus { get; set; }
[JsonProperty("description")]
public string Description { get; set; }
[JsonProperty("created")]
public DateTime? CreatedDate { get; set; }
[JsonProperty("order_status")]
public string OrderStatus { get; set; }
[JsonProperty("amount")]
public int Amount { get; set; }
[JsonProperty("currency")]
public string CurrencyCode { get; set; }
[JsonProperty("customer")]
public Customer Customer { get; set; }
[JsonProperty("payment_details")]
public PaymentDetails PaymentDetails { get; set; }
}
}
|
MultiSafepay/.Net
|
Src/MultiSafepay/Model/Transaction.cs
|
C#
|
mit
| 1,056 |
require 'spec_helper'
describe MWS::Report do
describe ".method_missing" do
describe ".get_report_list" do
let(:valid_args){
{
key: "ThisIsSigningKey",
endpoint: "mws.amazonservices.com",
params: {
"AWSAccessKeyId" => "AccessKeyIdString",
"SellerId" => "SellerIdString",
"ReportTypeList" => ["_GET_FLAT_FILE_ORDERS_DATA_"],
"Acknowledged" => false,
"MaxCount" => 100
}
}
}
before do
response = double("request")
expect(response).to receive(:body).and_return("BodyString")
request = double("request")
expect(request).to receive(:execute).and_return(response)
expect(MWS::Request).to receive(:new).and_return(request)
end
subject { described_class.get_report_list(valid_args) }
it { is_expected.to be_a String }
end
end
end
|
s-osa/marketplace_web_service
|
spec/mws/report_spec.rb
|
Ruby
|
mit
| 929 |
require 'resolv'
module Geocoder
class IpAddress < String
def loopback?
valid? and !!(self == "0.0.0.0" or self.match(/\A127\./) or self == "::1")
end
def valid?
!!((self =~ Resolv::IPv4::Regex) || (self =~ Resolv::IPv6::Regex))
end
end
end
|
tiramizoo/geocoder
|
lib/geocoder/ip_address.rb
|
Ruby
|
mit
| 275 |
const chai = require('chai');
const expect = chai.expect;
const ComplexArray = require('../complex-array/complex-array');
function assertArrayEquals(first, second) {
const message = `${first} != ${second}`;
first.forEach((item, i) => {
expect(item).to.equal(second[i], message);
});
}
describe('Complex Array', () => {
describe('Consructor', () => {
it('should construct from a number', () => {
const a = new ComplexArray(10);
expect(a).to.exist;
expect(a.real.length).to.equal(10);
expect(a.imag.length).to.equal(10);
expect(a.real[0]).to.equal(0);
expect(a.imag[0]).to.equal(0);
});
it('should construct from a number with a type', () => {
const a = new ComplexArray(10, Int32Array);
expect(a.ArrayType).to.equal(Int32Array);
expect(a.real.length).to.equal(10);
expect(a.imag.length).to.equal(10);
expect(a.real[0]).to.equal(0);
expect(a.imag[0]).to.equal(0);
});
it('should contruct from a real array', () => {
const a = new ComplexArray([1, 2]);
assertArrayEquals([1, 2], a.real);
assertArrayEquals([0, 0], a.imag);
});
it('should contruct from a real array with a type', () => {
const a = new ComplexArray([1, 2], Int32Array);
expect(a.ArrayType).to.equal(Int32Array)
assertArrayEquals([1, 2], a.real);
assertArrayEquals([0, 0], a.imag);
});
it('should contruct from another complex array', () => {
const a = new ComplexArray(new ComplexArray([1, 2]));
assertArrayEquals([1, 2], a.real);
assertArrayEquals([0, 0], a.imag);
});
});
describe('`map` method', () => {
it('should alter all values', () => {
const a = new ComplexArray([1, 2]).map((value, i) => {
value.real *= 10;
value.imag = i;
});
assertArrayEquals([10, 20], a.real);
assertArrayEquals([0, 1], a.imag);
});
});
describe('`forEach` method', () => {
it('should touch every value', () => {
const a = new ComplexArray([1, 2]);
a.imag[0] = 4;
a.imag[1] = 8;
let sum = 0;
a.forEach((value, i) => {
sum += value.real;
sum += value.imag;
});
expect(sum).to.equal(15);
});
});
describe('`conjugate` method', () => {
it('should multiply a number', () => {
const a = new ComplexArray([1, 2]);
a.imag[0] = 1;
a.imag[1] = -2;
const b = a.conjugate();
assertArrayEquals([1, 2], b.real);
assertArrayEquals([-1, 2], b.imag);
});
});
describe('`magnitude` method', () => {
it('should give the an array of magnitudes', () => {
const a = new ComplexArray([1, 3]);
a.imag[0] = 0;
a.imag[1] = 4;
assertArrayEquals([1, 5], a.magnitude());
});
it('should return an iterable ArrayType object', () => {
const a = new ComplexArray([1, 2]);
let sum = 0;
a.magnitude().forEach((value, i) => {
sum += value;
});
expect(sum).to.equal(3);
});
});
});
|
JoeKarlsson/data-structures
|
test/complex-array.spec.js
|
JavaScript
|
mit
| 3,055 |
/*
* Copyright (C) 2011 by Jakub Lekstan <kuebzky@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <v8.h>
#include <node.h>
#include <sys/time.h>
#include <sys/resource.h>
int globalWho = RUSAGE_SELF;
static v8::Handle<v8::Value> get_r_usage(const v8::Arguments& args){
v8::HandleScope scope;
int localWho = globalWho;
if(args.Length() != 0){
bool isError = false;
if(args[0]->IsNumber()){
v8::Local<v8::Integer> iWho = v8::Local<v8::Integer>::Cast(args[0]);
localWho = (int)(iWho->Int32Value());
if(localWho != RUSAGE_SELF && localWho != RUSAGE_CHILDREN){
isError = true;
}
}else{
isError = true;
}
if(isError){
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("First argument must be either a RUSAGE_SELF or RUSAGE_CHILDREN")));
}
}
rusage rusagedata;
int status = getrusage(localWho, &rusagedata);
if(status != 0){
scope.Close(v8::Null());
}
v8::Local<v8::Object> data = v8::Object::New();
data->Set(v8::String::New("ru_utime.tv_sec"), v8::Number::New(rusagedata.ru_utime.tv_sec));
data->Set(v8::String::New("ru_utime.tv_usec"), v8::Number::New(rusagedata.ru_utime.tv_usec));
data->Set(v8::String::New("ru_stime.tv_sec"), v8::Number::New(rusagedata.ru_stime.tv_sec));
data->Set(v8::String::New("ru_stime.tv_usec"), v8::Number::New(rusagedata.ru_stime.tv_usec));
data->Set(v8::String::New("ru_maxrss"), v8::Number::New(rusagedata.ru_maxrss));
data->Set(v8::String::New("ru_ixrss"), v8::Number::New(rusagedata.ru_ixrss));
data->Set(v8::String::New("ru_idrss"), v8::Number::New(rusagedata.ru_idrss));
data->Set(v8::String::New("ru_isrss"), v8::Number::New(rusagedata.ru_isrss));
data->Set(v8::String::New("ru_minflt"), v8::Number::New(rusagedata.ru_minflt));
data->Set(v8::String::New("ru_majflt"), v8::Number::New(rusagedata.ru_majflt));
data->Set(v8::String::New("ru_nswap"), v8::Number::New(rusagedata.ru_nswap));
data->Set(v8::String::New("ru_inblock"), v8::Number::New(rusagedata.ru_inblock));
data->Set(v8::String::New("ru_oublock"), v8::Number::New(rusagedata.ru_oublock));
data->Set(v8::String::New("ru_msgsnd"), v8::Number::New(rusagedata.ru_msgsnd));
data->Set(v8::String::New("ru_msgrcv"), v8::Number::New(rusagedata.ru_msgrcv));
data->Set(v8::String::New("ru_nsignals"), v8::Number::New(rusagedata.ru_nsignals));
data->Set(v8::String::New("ru_nvcsw"), v8::Number::New(rusagedata.ru_nvcsw));
data->Set(v8::String::New("ru_nivcsw"), v8::Number::New(rusagedata.ru_nivcsw));
return scope.Close(data);
}
static v8::Handle<v8::Value> usage_cycles(const v8::Arguments& args){
v8::HandleScope scope;
rusage rusagedata;
int status = getrusage(globalWho, &rusagedata);
if(status != 0){
return scope.Close(v8::Null());
}
return scope.Close(v8::Number::New(rusagedata.ru_utime.tv_sec * 1e6 + rusagedata.ru_utime.tv_usec));
}
static v8::Handle<v8::Value> who(const v8::Arguments& args){
v8::HandleScope scope;
if(args.Length() != 0 && args[0]->IsNumber()){
v8::Local<v8::Integer> iWho = v8::Local<v8::Integer>::Cast(args[0]);
int localWho = (int)(iWho->Int32Value());
if(localWho != RUSAGE_SELF && localWho != RUSAGE_CHILDREN){
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("First argument must be either a RUSAGE_SELF or RUSAGE_CHILDREN")));
}
globalWho = localWho;
return scope.Close(v8::True());
}else{
return scope.Close(v8::False());
}
}
extern "C" void init (v8::Handle<v8::Object> target){
v8::HandleScope scope;
NODE_SET_METHOD(target, "get", get_r_usage);
NODE_SET_METHOD(target, "cycles", usage_cycles);
NODE_SET_METHOD(target, "who", who);
target->Set(v8::String::New("RUSAGE_SELF"), v8::Number::New(RUSAGE_SELF));
target->Set(v8::String::New("RUSAGE_CHILDREN"), v8::Number::New(RUSAGE_CHILDREN));
}
|
kuebk/node-rusage
|
src/node-rusage.cc
|
C++
|
mit
| 4,830 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package config;
import interfaces.*;
import java.sql.*;
import java.util.logging.*;
import javax.swing.*;
/**
*
* @author Luis G
*/
public class Connector {
public Connector() {
}
protected boolean getData(String query, Callback callback) {
ResultSet rs = null;
Connection conn = null;
Statement stmt = null;
boolean isNull = false;
try {
connect();
conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/escuela", "root", "");
stmt = conn.createStatement();
rs = stmt.executeQuery(query);
for (int i = 0; rs.next(); i++) {
callback.callback(rs, i);
}
stmt.close();
conn.close();
} catch (SQLException ex) {
Logger.getLogger(Connector.class.getName()).log(Level.SEVERE, null, ex);
}
return isNull;
}
protected ResultSet getData(String query) {
ResultSet rs = null;
Connection conn = null;
Statement stmt = null;
try {
connect();
conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/escuela", "root", "");
stmt = conn.createStatement();
rs = stmt.executeQuery(query);
} catch (Exception e) {
System.out.println(e);
}
return rs;
}
protected int executeQuery(String query) {
int id = -1;
try {
connect();
Connection conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/escuela", "root", "");
Statement stmt = conn.createStatement();
id = stmt.executeUpdate(query, Statement.RETURN_GENERATED_KEYS);
ResultSet rs = stmt.getGeneratedKeys();
if (rs.next()) {
id = rs.getInt(1);
}
stmt.close();
conn.close();
} catch (SQLException e) {
Logger.getLogger(Connector.class.getName()).log(Level.SEVERE, null, e);
switch (e.getErrorCode()) {
case 1062:
JOptionPane.showMessageDialog(null, "Ese correo ya esta registrado", "error", 0);
break;
case 1054:
JOptionPane.showMessageDialog(null, "El registro no existe", "error", 0);
break;
default:
JOptionPane.showMessageDialog(null, "A ocurrido un error " + e, "error", 0);
System.out.println(e);
break;
}
}
return id;
}
private void connect() {
try {
Class.forName("com.mysql.jdbc.Driver");
} catch (Exception e) {
}
}
}
|
Luis-Gdx/escuela
|
Topicos Avanzados de Programacion/Tabla/Tabla con base de datos y login/src/config/Connector.java
|
Java
|
mit
| 2,950 |
<?php
/*
* This file is part of NodalFlow.
* (c) Fabrice de Stefanis / https://github.com/fab2s/NodalFlow
* This source file is licensed under the MIT license which you will
* find in the LICENSE file or at https://opensource.org/licenses/MIT
*/
namespace fab2s\NodalFlow\Nodes;
use fab2s\NodalFlow\Flows\FlowInterface;
use fab2s\NodalFlow\NodalFlowException;
/**
* Class BranchNode
*/
class BranchNode extends PayloadNodeAbstract implements BranchNodeInterface
{
/**
* This Node is a Branch
*
* @var bool
*/
protected $isAFlow = true;
/**
* @var FlowInterface
*/
protected $payload;
/**
* Instantiate the BranchNode
*
* @param FlowInterface $payload
* @param bool $isAReturningVal
*
* @throws NodalFlowException
*/
public function __construct(FlowInterface $payload, bool $isAReturningVal)
{
// branch Node does not (yet) support traversing
parent::__construct($payload, $isAReturningVal, false);
}
/**
* Execute the BranchNode
*
* @param mixed|null $param
*
* @return mixed
*/
public function exec($param = null)
{
// in the branch case, we actually exec a Flow
return $this->payload->exec($param);
}
}
|
fab2s/NodalFlow
|
src/Nodes/BranchNode.php
|
PHP
|
mit
| 1,307 |
<?PHP
/**
* password view.
*
* includes form for username and email to send password to user.
*
*/
?>
<div id="content_area">
<div class="row" id="login"> <!--login box-->
<div class="col-xs-24" >
<?php
//begins the login form.
echo form_open(base_url().'Account/password');
//display error messages
if (isset($message_display))
{
echo $message_display;
}
if (isset($error_message))
{
echo "<div class='alert alert-danger text-center' role='alert'>";
echo $error_message;
echo validation_errors();
echo "</div>"; //display error_msg
}
//login form itself
echo '<h5 class="text-center">Provide your username and email address.</h5>';
?>
<label>Username :</label>
<p>
<input type="text" name="username" id="name" placeholder="username"/>
</p>
<label>Email :</label>
<p>
<input type="email" name="email" id="email" placeholder="email@email.com"/>
</p>
<!-- End of the form, begin submit button -->
<div class='submit_button_container text-center'>
<button type="submit" class="btn btn-primary btn-center" name="submit"/>Get Password</button>
<!--Link to retrieve username -->
<div style="padding-top:10px">
<?php echo anchor('Account/username','Forgot Username?') ?>
</div>
</div><!-- end submit button container -->
<?php echo form_close(); ?>
</div> <!-- end login div -->
</div>
</div><!-- end content_area div -->
</div>
<?PHP
/*End of file login.php*/
/*Location: ./application/veiws/Account/password.php*/
|
rshanecole/TheFFFL
|
views/account/password.php
|
PHP
|
mit
| 2,088 |
using System;
using System.Threading;
using MonoTouch.Foundation;
using MonoTouch.UIKit;
namespace vplan
{
public class PrefManager
{
NSUserDefaults locstore = new NSUserDefaults();
bool notified = false;
public PrefManager ()
{
refresh ();
}
protected void refresh () {
locstore.Synchronize ();
}
public int getInt (string key) {
int val;
val = locstore.IntForKey (key);
return val;
}
public string getString (string key) {
string val;
val = locstore.StringForKey (key);
return val;
}
public void setInt (string key, int val) {
locstore.SetInt (val, key);
refresh ();
}
public void setString (string key, string val) {
locstore.SetString (val, key);
refresh ();
}
}
}
|
reknih/informant-ios
|
vplan/vplan.Kit/PrefManager.cs
|
C#
|
mit
| 743 |
/*
DISKSPD
Copyright(c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#include "StdAfx.h"
#include "XmlResultParser.UnitTests.h"
#include "Common.h"
#include "xmlresultparser.h"
#include <stdlib.h>
#include <vector>
using namespace WEX::TestExecution;
using namespace WEX::Logging;
using namespace std;
namespace UnitTests
{
void XmlResultParserUnitTests::Test_ParseResults()
{
Profile profile;
TimeSpan timeSpan;
Target target;
XmlResultParser parser;
Results results;
results.fUseETW = false;
double fTime = 120.0;
results.ullTimeCount = PerfTimer::SecondsToPerfTime(fTime);
// First group has 1 core
SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION systemProcessorInfo = {};
systemProcessorInfo.UserTime.QuadPart = static_cast<LONGLONG>(fTime * 30 * 100000);
systemProcessorInfo.IdleTime.QuadPart = static_cast<LONGLONG>(fTime * 45 * 100000);
systemProcessorInfo.KernelTime.QuadPart = static_cast<LONGLONG>(fTime * 70 * 100000);
results.vSystemProcessorPerfInfo.push_back(systemProcessorInfo);
// Second group has a maximum of 4 cores with 2 active
SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION zeroSystemProcessorInfo = { 0 };
zeroSystemProcessorInfo.UserTime.QuadPart = static_cast<LONGLONG>(fTime * 0 * 100000);
zeroSystemProcessorInfo.IdleTime.QuadPart = static_cast<LONGLONG>(fTime * 100 * 100000);
zeroSystemProcessorInfo.KernelTime.QuadPart = static_cast<LONGLONG>(fTime * 100 * 100000);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
// TODO: multiple target cases, full profile/result variations
target.SetPath("testfile1.dat");
target.SetCacheMode(TargetCacheMode::DisableOSCache);
target.SetWriteThroughMode(WriteThroughMode::On);
target.SetThroughputIOPS(1000);
timeSpan.AddTarget(target);
timeSpan.SetCalculateIopsStdDev(true);
TargetResults targetResults;
targetResults.sPath = "testfile1.dat";
targetResults.ullFileSize = 10 * 1024 * 1024;
targetResults.ullReadBytesCount = 4 * 1024 * 1024;
targetResults.ullReadIOCount = 6;
targetResults.ullWriteBytesCount = 2 * 1024 * 1024;
targetResults.ullWriteIOCount = 10;
targetResults.ullBytesCount = targetResults.ullReadBytesCount + targetResults.ullWriteBytesCount;
targetResults.ullIOCount = targetResults.ullReadIOCount + targetResults.ullWriteIOCount;
// TODO: Histogram<float> readLatencyHistogram;
// TODO: Histogram<float> writeLatencyHistogram;
// TODO: IoBucketizer writeBucketizer;
targetResults.readBucketizer.Initialize(1000, timeSpan.GetDuration());
for (size_t i = 0; i < timeSpan.GetDuration(); i++)
{
// add an io halfway through the bucket's time interval
targetResults.readBucketizer.Add(i*1000 + 500, 0);
}
ThreadResults threadResults;
threadResults.vTargetResults.push_back(targetResults);
results.vThreadResults.push_back(threadResults);
vector<Results> vResults;
vResults.push_back(results);
// just throw away the computername and reset the timestamp - for the ut, it's
// as useful (and simpler) to verify statics as anything else. Reconstruct
// processor topo to a fixed example as well.
SystemInformation system;
system.ResetTime();
system.sComputerName.clear();
system.processorTopology._ulProcCount = 5;
system.processorTopology._ulActiveProcCount = 3;
system.processorTopology._vProcessorGroupInformation.clear();
system.processorTopology._vProcessorGroupInformation.emplace_back((BYTE)1, (BYTE)1, (WORD)0, (KAFFINITY)0x1);
system.processorTopology._vProcessorGroupInformation.emplace_back((BYTE)4, (BYTE)2, (WORD)1, (KAFFINITY)0x6);
system.processorTopology._vProcessorNumaInformation.clear();
system.processorTopology._vProcessorNumaInformation.emplace_back((DWORD)0, (WORD)0, (KAFFINITY)0x1);
system.processorTopology._vProcessorNumaInformation.emplace_back((DWORD)1, (WORD)1, (KAFFINITY)0x6);
ProcessorSocketInformation socket;
socket._vProcessorMasks.emplace_back((WORD)0, (KAFFINITY)0x1);
socket._vProcessorMasks.emplace_back((WORD)1, (KAFFINITY)0x6);
system.processorTopology._vProcessorSocketInformation.clear();
system.processorTopology._vProcessorSocketInformation.push_back(socket);
system.processorTopology._vProcessorHyperThreadInformation.clear();
system.processorTopology._vProcessorHyperThreadInformation.emplace_back((WORD)0, (KAFFINITY)0x1);
system.processorTopology._vProcessorHyperThreadInformation.emplace_back((WORD)1, (KAFFINITY)0x6);
// finally, add the timespan to the profile and dump.
profile.AddTimeSpan(timeSpan);
string sResults = parser.ParseResults(profile, system, vResults);
// stringify random text, quoting "'s and adding newline/preserving tabs
// gc some.txt |% { write-host $("`"{0}\n`"" -f $($_ -replace "`"","\`"" -replace "`t","\t")) }
const char *pcszExpectedOutput = \
"<Results>\n"
" <System>\n"
" <ComputerName></ComputerName>\n"
" <Tool>\n"
" <Version>" DISKSPD_NUMERIC_VERSION_STRING "</Version>\n"
" <VersionDate>" DISKSPD_DATE_VERSION_STRING "</VersionDate>\n"
" </Tool>\n"
" <RunTime></RunTime>\n"
" <ProcessorTopology>\n"
" <Group Group=\"0\" MaximumProcessors=\"1\" ActiveProcessors=\"1\" ActiveProcessorMask=\"0x1\"/>\n"
" <Group Group=\"1\" MaximumProcessors=\"4\" ActiveProcessors=\"2\" ActiveProcessorMask=\"0x6\"/>\n"
" <Node Node=\"0\" Group=\"0\" Processors=\"0x1\"/>\n"
" <Node Node=\"1\" Group=\"1\" Processors=\"0x6\"/>\n"
" <Socket>\n"
" <Group Group=\"0\" Processors=\"0x1\"/>\n"
" <Group Group=\"1\" Processors=\"0x6\"/>\n"
" </Socket>\n"
" <HyperThread Group=\"0\" Processors=\"0x1\"/>\n"
" <HyperThread Group=\"1\" Processors=\"0x6\"/>\n"
" </ProcessorTopology>\n"
" </System>\n"
" <Profile>\n"
" <Progress>0</Progress>\n"
" <ResultFormat>text</ResultFormat>\n"
" <Verbose>false</Verbose>\n"
" <TimeSpans>\n"
" <TimeSpan>\n"
" <CompletionRoutines>false</CompletionRoutines>\n"
" <MeasureLatency>false</MeasureLatency>\n"
" <CalculateIopsStdDev>true</CalculateIopsStdDev>\n"
" <DisableAffinity>false</DisableAffinity>\n"
" <Duration>10</Duration>\n"
" <Warmup>5</Warmup>\n"
" <Cooldown>0</Cooldown>\n"
" <ThreadCount>0</ThreadCount>\n"
" <RequestCount>0</RequestCount>\n"
" <IoBucketDuration>1000</IoBucketDuration>\n"
" <RandSeed>0</RandSeed>\n"
" <Targets>\n"
" <Target>\n"
" <Path>testfile1.dat</Path>\n"
" <BlockSize>65536</BlockSize>\n"
" <BaseFileOffset>0</BaseFileOffset>\n"
" <SequentialScan>false</SequentialScan>\n"
" <RandomAccess>false</RandomAccess>\n"
" <TemporaryFile>false</TemporaryFile>\n"
" <UseLargePages>false</UseLargePages>\n"
" <DisableOSCache>true</DisableOSCache>\n"
" <WriteThrough>true</WriteThrough>\n"
" <WriteBufferContent>\n"
" <Pattern>sequential</Pattern>\n"
" </WriteBufferContent>\n"
" <ParallelAsyncIO>false</ParallelAsyncIO>\n"
" <StrideSize>65536</StrideSize>\n"
" <InterlockedSequential>false</InterlockedSequential>\n"
" <ThreadStride>0</ThreadStride>\n"
" <MaxFileSize>0</MaxFileSize>\n"
" <RequestCount>2</RequestCount>\n"
" <WriteRatio>0</WriteRatio>\n"
" <Throughput unit=\"IOPS\">1000</Throughput>\n"
" <ThreadsPerFile>1</ThreadsPerFile>\n"
" <IOPriority>3</IOPriority>\n"
" <Weight>1</Weight>\n"
" </Target>\n"
" </Targets>\n"
" </TimeSpan>\n"
" </TimeSpans>\n"
" </Profile>\n"
" <TimeSpan>\n"
" <TestTimeSeconds>120.00</TestTimeSeconds>\n"
" <ThreadCount>1</ThreadCount>\n"
" <RequestCount>0</RequestCount>\n"
" <ProcCount>3</ProcCount>\n"
" <CpuUtilization>\n"
" <CPU>\n"
" <Group>0</Group>\n"
" <Id>0</Id>\n"
" <UsagePercent>55.00</UsagePercent>\n"
" <UserPercent>30.00</UserPercent>\n"
" <KernelPercent>25.00</KernelPercent>\n"
" <IdlePercent>45.00</IdlePercent>\n"
" </CPU>\n"
" <CPU>\n"
" <Group>1</Group>\n"
" <Id>1</Id>\n"
" <UsagePercent>0.00</UsagePercent>\n"
" <UserPercent>0.00</UserPercent>\n"
" <KernelPercent>0.00</KernelPercent>\n"
" <IdlePercent>100.00</IdlePercent>\n"
" </CPU>\n"
" <CPU>\n"
" <Group>1</Group>\n"
" <Id>2</Id>\n"
" <UsagePercent>0.00</UsagePercent>\n"
" <UserPercent>0.00</UserPercent>\n"
" <KernelPercent>0.00</KernelPercent>\n"
" <IdlePercent>100.00</IdlePercent>\n"
" </CPU>\n"
" <Average>\n"
" <UsagePercent>18.33</UsagePercent>\n"
" <UserPercent>10.00</UserPercent>\n"
" <KernelPercent>8.33</KernelPercent>\n"
" <IdlePercent>81.67</IdlePercent>\n"
" </Average>\n"
" </CpuUtilization>\n"
" <Iops>\n"
" <ReadIopsStdDev>0.000</ReadIopsStdDev>\n"
" <IopsStdDev>0.000</IopsStdDev>\n"
" <Bucket SampleMillisecond=\"1000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"2000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"3000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"4000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"5000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"6000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"7000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"8000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"9000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"10000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" </Iops>\n"
" <Thread>\n"
" <Id>0</Id>\n"
" <Target>\n"
" <Path>testfile1.dat</Path>\n"
" <BytesCount>6291456</BytesCount>\n"
" <FileSize>10485760</FileSize>\n"
" <IOCount>16</IOCount>\n"
" <ReadBytes>4194304</ReadBytes>\n"
" <ReadCount>6</ReadCount>\n"
" <WriteBytes>2097152</WriteBytes>\n"
" <WriteCount>10</WriteCount>\n"
" <Iops>\n"
" <ReadIopsStdDev>0.000</ReadIopsStdDev>\n"
" <IopsStdDev>0.000</IopsStdDev>\n"
" <Bucket SampleMillisecond=\"1000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"2000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"3000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"4000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"5000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"6000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"7000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"8000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"9000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"10000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" </Iops>\n"
" </Target>\n"
" </Thread>\n"
" </TimeSpan>\n"
"</Results>";
#if 0
HANDLE h;
DWORD written;
h = CreateFileW(L"g:\\xmlresult-received.txt", GENERIC_WRITE, FILE_SHARE_READ, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
WriteFile(h, sResults.c_str(), (DWORD)sResults.length(), &written, NULL);
VERIFY_ARE_EQUAL(sResults.length(), written);
CloseHandle(h);
h = CreateFileW(L"g:\\xmlresult-expected.txt", GENERIC_WRITE, FILE_SHARE_READ, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
WriteFile(h, pcszExpectedOutput, (DWORD)strlen(pcszExpectedOutput), &written, NULL);
VERIFY_ARE_EQUAL((DWORD)strlen(pcszExpectedOutput), written);
CloseHandle(h);
printf("--\n%s\n", sResults.c_str());
printf("-------------------------------------------------\n");
printf("--\n%s\n", pcszExpectedOutput);
#endif
VERIFY_ARE_EQUAL(0, strcmp(sResults.c_str(), pcszExpectedOutput));
}
void XmlResultParserUnitTests::Test_ParseProfile()
{
Profile profile;
XmlResultParser parser;
TimeSpan timeSpan;
Target target;
timeSpan.AddTarget(target);
profile.AddTimeSpan(timeSpan);
string s = parser.ParseProfile(profile);
const char *pcszExpectedOutput = "<Profile>\n"
" <Progress>0</Progress>\n"
" <ResultFormat>text</ResultFormat>\n"
" <Verbose>false</Verbose>\n"
" <TimeSpans>\n"
" <TimeSpan>\n"
" <CompletionRoutines>false</CompletionRoutines>\n"
" <MeasureLatency>false</MeasureLatency>\n"
" <CalculateIopsStdDev>false</CalculateIopsStdDev>\n"
" <DisableAffinity>false</DisableAffinity>\n"
" <Duration>10</Duration>\n"
" <Warmup>5</Warmup>\n"
" <Cooldown>0</Cooldown>\n"
" <ThreadCount>0</ThreadCount>\n"
" <RequestCount>0</RequestCount>\n"
" <IoBucketDuration>1000</IoBucketDuration>\n"
" <RandSeed>0</RandSeed>\n"
" <Targets>\n"
" <Target>\n"
" <Path></Path>\n"
" <BlockSize>65536</BlockSize>\n"
" <BaseFileOffset>0</BaseFileOffset>\n"
" <SequentialScan>false</SequentialScan>\n"
" <RandomAccess>false</RandomAccess>\n"
" <TemporaryFile>false</TemporaryFile>\n"
" <UseLargePages>false</UseLargePages>\n"
" <WriteBufferContent>\n"
" <Pattern>sequential</Pattern>\n"
" </WriteBufferContent>\n"
" <ParallelAsyncIO>false</ParallelAsyncIO>\n"
" <StrideSize>65536</StrideSize>\n"
" <InterlockedSequential>false</InterlockedSequential>\n"
" <ThreadStride>0</ThreadStride>\n"
" <MaxFileSize>0</MaxFileSize>\n"
" <RequestCount>2</RequestCount>\n"
" <WriteRatio>0</WriteRatio>\n"
" <Throughput>0</Throughput>\n"
" <ThreadsPerFile>1</ThreadsPerFile>\n"
" <IOPriority>3</IOPriority>\n"
" <Weight>1</Weight>\n"
" </Target>\n"
" </Targets>\n"
" </TimeSpan>\n"
" </TimeSpans>\n"
"</Profile>\n";
//VERIFY_ARE_EQUAL(pcszExpectedOutput, s.c_str());
VERIFY_ARE_EQUAL(strlen(pcszExpectedOutput), s.length());
VERIFY_IS_TRUE(!strcmp(pcszExpectedOutput, s.c_str()));
}
void XmlResultParserUnitTests::Test_ParseTargetProfile()
{
Target target;
string sResults;
char pszExpectedOutput[4096];
int nWritten;
const char *pcszOutputTemplate = \
"<Target>\n"
" <Path>testfile1.dat</Path>\n"
" <BlockSize>65536</BlockSize>\n"
" <BaseFileOffset>0</BaseFileOffset>\n"
" <SequentialScan>false</SequentialScan>\n"
" <RandomAccess>false</RandomAccess>\n"
" <TemporaryFile>false</TemporaryFile>\n"
" <UseLargePages>false</UseLargePages>\n"
" <DisableOSCache>true</DisableOSCache>\n"
" <WriteThrough>true</WriteThrough>\n"
" <WriteBufferContent>\n"
" <Pattern>sequential</Pattern>\n"
" </WriteBufferContent>\n"
" <ParallelAsyncIO>false</ParallelAsyncIO>\n"
" <StrideSize>65536</StrideSize>\n"
" <InterlockedSequential>false</InterlockedSequential>\n"
" <ThreadStride>0</ThreadStride>\n"
" <MaxFileSize>0</MaxFileSize>\n"
" <RequestCount>2</RequestCount>\n"
" <WriteRatio>0</WriteRatio>\n"
" <Throughput%s>%s</Throughput>\n" // 2 param
" <ThreadsPerFile>1</ThreadsPerFile>\n"
" <IOPriority>3</IOPriority>\n"
" <Weight>1</Weight>\n"
"</Target>\n";
target.SetPath("testfile1.dat");
target.SetCacheMode(TargetCacheMode::DisableOSCache);
target.SetWriteThroughMode(WriteThroughMode::On);
// Base case - no limit
nWritten = sprintf_s(pszExpectedOutput, sizeof(pszExpectedOutput),
pcszOutputTemplate, "", "0");
VERIFY_IS_GREATER_THAN(nWritten, 0);
sResults = target.GetXml(0);
VERIFY_ARE_EQUAL(sResults, pszExpectedOutput);
// IOPS - with units
target.SetThroughputIOPS(1000);
nWritten = sprintf_s(pszExpectedOutput, sizeof(pszExpectedOutput),
pcszOutputTemplate, " unit=\"IOPS\"", "1000");
VERIFY_IS_GREATER_THAN(nWritten, 0);
sResults = target.GetXml(0);
VERIFY_ARE_EQUAL(sResults, pszExpectedOutput);
// BPMS - not specified with units in output
target.SetThroughput(1000);
nWritten = sprintf_s(pszExpectedOutput, sizeof(pszExpectedOutput),
pcszOutputTemplate, "", "1000");
VERIFY_IS_GREATER_THAN(nWritten, 0);
sResults = target.GetXml(0);
VERIFY_ARE_EQUAL(sResults, pszExpectedOutput);
}
}
|
microsoft/diskspd
|
UnitTests/XmlResultParser/XmlResultParser.UnitTests.cpp
|
C++
|
mit
| 26,838 |
/** @jsx h */
import h from '../../helpers/h'
export const schema = {
blocks: {
paragraph: {
marks: [{ type: 'bold' }, { type: 'underline' }],
},
},
}
export const input = (
<value>
<document>
<paragraph>
one <i>two</i> three
</paragraph>
</document>
</value>
)
export const output = (
<value>
<document>
<paragraph>one two three</paragraph>
</document>
</value>
)
|
ashutoshrishi/slate
|
packages/slate/test/schema/custom/node-mark-invalid-default.js
|
JavaScript
|
mit
| 437 |
/* ========================================================================
* DOM-based Routing
* Based on http://goo.gl/EUTi53 by Paul Irish
*
* Only fires on body classes that match. If a body class contains a dash,
* replace the dash with an underscore when adding it to the object below.
*
* .noConflict()
* The routing is enclosed within an anonymous function so that you can
* always reference jQuery with $, even when in .noConflict() mode.
*
* Google CDN, Latest jQuery
* To use the default WordPress version of jQuery, go to lib/config.php and
* remove or comment out: add_theme_support('jquery-cdn');
* ======================================================================== */
(function($) {
// Use this variable to set up the common and page specific functions. If you
// rename this variable, you will also need to rename the namespace below.
var Sage = {
// All pages
'common': {
init: function() {
// JavaScript to be fired on all pages
},
finalize: function() {
// JavaScript to be fired on all pages, after page specific JS is fired
}
},
// Home page
'home': {
init: function() {
// JavaScript to be fired on the home page
},
finalize: function() {
// JavaScript to be fired on the home page, after the init JS
}
},
// About us page, note the change from about-us to about_us.
'about_us': {
init: function() {
// JavaScript to be fired on the about us page
}
}
};
// The routing fires all common scripts, followed by the page specific scripts.
// Add additional events for more control over timing e.g. a finalize event
var UTIL = {
fire: function(func, funcname, args) {
var fire;
var namespace = Sage;
funcname = (funcname === undefined) ? 'init' : funcname;
fire = func !== '';
fire = fire && namespace[func];
fire = fire && typeof namespace[func][funcname] === 'function';
if (fire) {
namespace[func][funcname](args);
}
},
loadEvents: function() {
// Fire common init JS
UTIL.fire('common');
// Fire page-specific init JS, and then finalize JS
$.each(document.body.className.replace(/-/g, '_').split(/\s+/), function(i, classnm) {
UTIL.fire(classnm);
UTIL.fire(classnm, 'finalize');
});
// Fire common finalize JS
UTIL.fire('common', 'finalize');
}
};
// Load Events
$(document).ready(UTIL.loadEvents);
})(jQuery); // Fully reference jQuery after this point.
$(document).ready(function(){
$("#sidebar-home ul li").addClass( "col-md-3 col-sm-6" );
$("#sidebar-home div").addClass( "clearfix" );
});
|
erikkowalski/hoe-sage-8.1.0
|
assets/scripts/main.js
|
JavaScript
|
mit
| 2,737 |
import {bootstrap} from '@angular/platform-browser-dynamic';
import {ROUTER_PROVIDERS} from '@angular/router-deprecated';
import {HTTP_PROVIDERS} from '@angular/http';
import {AppComponent} from './app.component';
import {LoggerService} from './blocks/logger.service';
bootstrap(AppComponent, [
LoggerService, ROUTER_PROVIDERS, HTTP_PROVIDERS
]);
|
IMAMBAKS/data_viz_pa
|
app/main.ts
|
TypeScript
|
mit
| 352 |
package org.apache.shiro.grails.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.apache.shiro.authz.Permission;
@Target({ElementType.FIELD, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface PermissionRequired {
Class<? extends Permission> type();
/**
* The name of the role required to be granted this authorization.
*/
String target() default "*";
String actions() default "";
}
|
putin266/Vote
|
target/work/plugins/shiro-1.2.1/src/java/org/apache/shiro/grails/annotations/PermissionRequired.java
|
Java
|
mit
| 572 |
module.exports = {
before: [function () {
console.log('global beforeAll1');
}, 'alias1'],
'alias1': 'alias2',
'alias2': function () {
console.log('global beforeAll2');
},
'One': function () {
this.sum = 1;
},
'plus one': function () {
this.sum += 1;
},
'equals two': function () {
if (this.sum !== 2) {
throw new Error(this.sum + ' !== 2');
}
}
};
|
twolfson/doubleshot
|
test/test_files/complex_global_hooks/content.js
|
JavaScript
|
mit
| 399 |
from attributes import *
from constants import *
# ------------------------------------------------------------------------------
#
class UnitManager (Attributes) :
"""
UnitManager class -- manages a pool
"""
# --------------------------------------------------------------------------
#
def __init__ (self, url=None, scheduler='default', session=None) :
Attributes.__init__ (self)
# --------------------------------------------------------------------------
#
def add_pilot (self, pid) :
"""
add (Compute or Data)-Pilot(s) to the pool
"""
raise Exception ("%s.add_pilot() is not implemented" % self.__class__.__name__)
# --------------------------------------------------------------------------
#
def list_pilots (self, ptype=ANY) :
"""
List IDs of data and/or compute pilots
"""
raise Exception ("%s.list_pilots() is not implemented" % self.__class__.__name__)
# --------------------------------------------------------------------------
#
def remove_pilot (self, pid, drain=False) :
"""
Remove pilot(s) (does not cancel the pilot(s), but removes all units
from the pilot(s).
`drain` determines what happens to the units which are managed by the
removed pilot(s). If `True`, the pilot removal is delayed until all
units reach a final state. If `False` (the default), then `RUNNING`
units will be canceled, and `PENDING` units will be re-assinged to the
unit managers for re-scheduling to other pilots.
"""
raise Exception ("%s.remove_pilot() is not implemented" % self.__class__.__name__)
# --------------------------------------------------------------------------
#
def submit_unit (self, description) :
"""
Instantiate and return (Compute or Data)-Unit object(s)
"""
raise Exception ("%s.submit_unit() is not implemented" % self.__class__.__name__)
# --------------------------------------------------------------------------
#
def list_units (self, utype=ANY) :
"""
List IDs of data and/or compute units
"""
raise Exception ("%s.list_units() is not implemented" % self.__class__.__name__)
# --------------------------------------------------------------------------
#
def get_unit (self, uids) :
"""
Reconnect to and return (Compute or Data)-Unit object(s)
"""
raise Exception ("%s.get_unit() is not implemented" % self.__class__.__name__)
# --------------------------------------------------------------------------
#
def wait_unit (self, uids, state=[DONE, FAILED, CANCELED], timeout=-1.0) :
"""
Wait for given unit(s) to enter given state
"""
raise Exception ("%s.wait_unit() is not implemented" % self.__class__.__name__)
# --------------------------------------------------------------------------
#
def cancel_units (self, uids) :
"""
Cancel given unit(s)
"""
raise Exception ("%s.cancel_unit() is not implemented" % self.__class__.__name__)
# ------------------------------------------------------------------------------
#
|
JensTimmerman/radical.pilot
|
docs/architecture/api_draft/unit_manager.py
|
Python
|
mit
| 3,311 |
'use strict';
angular.module('terminaaliApp')
.factory('Auth', function Auth($location, $rootScope, $http, User, $cookieStore, $q) {
var currentUser = {};
if($cookieStore.get('token')) {
currentUser = User.get();
}
return {
/**
* Authenticate user and save token
*
* @param {Object} user - login info
* @param {Function} callback - optional
* @return {Promise}
*/
login: function(user, callback) {
var cb = callback || angular.noop;
var deferred = $q.defer();
$http.post('/auth/local', {
email: user.email,
password: user.password
}).
success(function(data) {
$cookieStore.put('token', data.token);
currentUser = User.get();
deferred.resolve(data);
return cb();
}).
error(function(err) {
this.logout();
deferred.reject(err);
return cb(err);
}.bind(this));
return deferred.promise;
},
/**
* Delete access token and user info
*
* @param {Function}
*/
logout: function() {
$cookieStore.remove('token');
currentUser = {};
},
/**
* Create a new user
*
* @param {Object} user - user info
* @param {Function} callback - optional
* @return {Promise}
*/
createUser: function(user, callback) {
var cb = callback || angular.noop;
return User.save(user,
function(data) {
$cookieStore.put('token', data.token);
currentUser = User.get();
return cb(user);
},
function(err) {
this.logout();
return cb(err);
}.bind(this)).$promise;
},
/**
* Change password
*
* @param {String} oldPassword
* @param {String} newPassword
* @param {Function} callback - optional
* @return {Promise}
*/
changePassword: function(oldPassword, newPassword, callback) {
var cb = callback || angular.noop;
return User.changePassword({ id: currentUser._id }, {
oldPassword: oldPassword,
newPassword: newPassword
}, function(user) {
return cb(user);
}, function(err) {
return cb(err);
}).$promise;
},
/**
* Gets all available info on authenticated user
*
* @return {Object} user
*/
getCurrentUser: function() {
return currentUser;
},
/**
* Check if a user is logged in
*
* @return {Boolean}
*/
isLoggedIn: function() {
return currentUser.hasOwnProperty('role');
},
/**
* Waits for currentUser to resolve before checking if user is logged in
*/
isLoggedInAsync: function(cb) {
if(currentUser.hasOwnProperty('$promise')) {
currentUser.$promise.then(function() {
cb(true);
}).catch(function() {
cb(false);
});
} else if(currentUser.hasOwnProperty('role')) {
cb(true);
} else {
cb(false);
}
},
/**
* Check if a user is an admin
*
* @return {Boolean}
*/
isAdmin: function() {
return currentUser.role === 'admin';
},
/**
* Get auth token
*/
getToken: function() {
return $cookieStore.get('token');
}
};
});
|
henrikre/terminaali
|
client/components/auth/auth.service.js
|
JavaScript
|
mit
| 3,575 |
$(document).ready(function(){
var toggleMuffEditor = function(stat=false){
$("#muff-opt").remove();
// bind event
if(stat){
$(".muff").mouseover(function() {
$("#muff-opt").remove();
muffShowOptions($(this));
$(window).scroll(function(){
$("#muff-opt").remove();
})
});
}else{// unbind event
$(".muff").unbind("mouseover");
}
};
function muffShowOptions( e ){
var t = "";
var id = e.attr("data-muff-id");
var title = e.attr("data-muff-title");
var p = e.offset();
var opttop = p.top + 15;
var optleft = p.left + 5;
if(e.hasClass("muff-div")){ t="div";
}else if(e.hasClass("muff-text")){ t="text";
}else if(e.hasClass("muff-a")){ t="link";
}else if(e.hasClass("muff-img")){ t="image";
}
if(!title){ title = t;}
// check position is beyond document
if((p.left + 25 + 75) > $(window).width()){
optleft -= 75;
}
var opt = "<div id='muff-opt' style='position:absolute;top:"+opttop+"px;left:"+optleft+"px;z-index:99998;display:none;'>";
opt += "<a href='admin/"+t+"/"+id+"/edit' class='mbtn edit'></a>";
opt += "<a href='admin/"+t+"/delete/' class='mbtn delete' data-mod='"+t+"' data-id='"+id+"'></a>";
opt += "<span>"+title+"</span>";
opt += "</div>";
$("body").prepend(opt);
$("#muff-opt").slideDown(300);
$("body").find("#muff-opt > a.delete").click(function(e){
var path = $(this).attr('href');
var mod = $(this).attr('data-mod');
// e.preventDefault();
swal({
title: "Are you sure?",
text: "You are about to delete this "+mod,
type: "warning",
showCancelButton: true,
confirmButtonColor: "#DD6B55",
confirmButtonText: "Yes, delete it!",
cancelButtonText: "Cancel",
closeOnConfirm: true,
closeOnCancel: true
},
function(isConfirm){
if (isConfirm) {
// window.location.href = path;
proceedDelete(path, id);
}
});
return false;
});
}
toggleMuffEditor(false);
// set checkbox editor event
$("input[name=cb-muff-editor]").click(function(){
if($(this).is(':checked')){ toggleMuffEditor(true); }
else{ toggleMuffEditor(false) }
});
function proceedDelete(path, id){
var newForm = jQuery('<form>', {
'action': path,
'method': 'POST',
'target': '_top'
}).append(jQuery('<input>', {
'name': '_token',
'value': $("meta[name=csrf-token]").attr("content"),
'type': 'hidden'
})).append(jQuery('<input>', {
'name': 'id',
'value': id,
'type': 'hidden'
}));
newForm.hide().appendTo("body").submit();
}
// $(".opt-div a.delete, .w-conf a.delete, .w-conf-hvr a.delete").click(function(e){
// var path = $(this).attr('href');
// var mod = $(this).attr('data-mod');
// // e.preventDefault();
// swal({
// title: "Are you sure?",
// text: "You are about to delete this "+mod,
// type: "warning",
// showCancelButton: true,
// confirmButtonColor: "#DD6B55",
// confirmButtonText: "Yes, delete it!",
// cancelButtonText: "Cancel",
// closeOnConfirm: true,
// closeOnCancel: true
// },
// function(isConfirm){
// if (isConfirm) {
// window.location.href = path;
// }
// });
// return false;
// });
// top nav click
$(".top-nav>li").click(function(){
var i = $(this).find('.dropdown-menu');
toggleClassExcept('.top-nav .dropdown-menu', 'rmv', 'active', i);
i.toggleClass("active");
});
/** toggle a certain class except the given object
* works with li and lists
* @param id identifier
* @param a action
* @param c class
* @param ex object
*/
function toggleClassExcept(id, a, c, ex){
$(id).each(function(){
switch(a){
case 'remove':
case 'rmv':
if(!$(this).is(ex)) $(this).removeClass(c);
break;
case 'add':
if(!$(this).is(ex)) $(this).addClass(c);
break;
default:
break;
}
});
}
$(".w-add .muff-add").click(function(event){
event.preventDefault();
var b = $(this);
var newForm = jQuery('<form>', {
'action': b.data('href'),
'method': 'GET',
'target': '_top'
}).append(jQuery('<input>', {
'name': '_token',
'value': $("meta[name=csrf-token]").attr("content"),
'type': 'hidden'
})).append(jQuery('<input>', {
'name': 'url',
'value': $("meta[name=muffin-url]").attr("content"),
'type': 'hidden'
})).append(jQuery('<input>', {
'name': 'location',
'value': b.data("loc"),
'type': 'hidden'
}));
// console.log(newForm);
newForm.hide().appendTo("body").submit();
})
// TAGs
//var tagArea = '.tag-area';
if($('.tagarea')[0]){
var backSpace;
var close = '<a class="close"></a>';
var PreTags = $('.tagarea').val().trim().split(" ");
$('.tagarea').after('<ul class="tag-box"></ul>');
for (i=0 ; i < PreTags.length; i++ ){
var pretag = PreTags[i].split("_").join(" ");
if($('.tagarea').val().trim() != "" )
$('.tag-box').append('<li class="tags"><input type="hidden" name="tags[]" value="'+pretag+'">'+pretag+close+'</li>');
}
$('.tag-box').append('<li class="new-tag"><input class="input-tag" type="text"></li>');
// unbind submit form when pressing enter
$('.input-tag').on('keyup keypress', function(e) {
var keyCode = e.keyCode || e.which;
if (keyCode === 13) {
e.preventDefault();
return false;
}
});
// Taging
$('.input-tag').bind("keydown", function (kp) {
var tag = $('.input-tag').val().trim();
if(tag.length > 0){
$(".tags").removeClass("danger");
if(kp.keyCode == 13 || kp.keyCode == 9){
$(".new-tag").before('<li class="tags"><input type="hidden" name="tags[]" value="'+tag+'">'+tag+close+'</li>');
$(this).val('');
}}
else {if(kp.keyCode == 8 ){
if($(".new-tag").prev().hasClass("danger")){
$(".new-tag").prev().remove();
}else{
$(".new-tag").prev().addClass("danger");
}
}
}
});
//Delete tag
$(".tag-box").on("click", ".close", function() {
$(this).parent().remove();
});
$(".tag-box").click(function(){
$('.input-tag').focus();
});
// Edit
$('.tag-box').on("dblclick" , ".tags", function(cl){
var tags = $(this);
var tag = tags.text().trim();
$('.tags').removeClass('edit');
tags.addClass('edit');
tags.html('<input class="input-tag" value="'+tag+'" type="text">')
$(".new-tag").hide();
tags.find('.input-tag').focus();
tag = $(this).find('.input-tag').val() ;
$('.tags').dblclick(function(){
tags.html(tag + close);
$('.tags').removeClass('edit');
$(".new-tag").show();
});
tags.find('.input-tag').bind("keydown", function (edit) {
tag = $(this).val() ;
if(edit.keyCode == 13){
$(".new-tag").show();
$('.input-tag').focus();
$('.tags').removeClass('edit');
if(tag.length > 0){
tags.html('<input type="hidden" name="tags[]" value="'+tag+'">'+tag + close);
}
else{
tags.remove();
}
}
});
});
}
// sorting
// $(function() {
// $( ".tag-box" ).sortable({
// items: "li:not(.new-tag)",
// containment: "parent",
// scrollSpeed: 100
// });
// $( ".tag-box" ).disableSelection();
// });
});
|
johnguild/muffincms
|
src/Public/main/js/muffincms.js
|
JavaScript
|
mit
| 7,581 |
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = undefined;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn');
var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2);
var _inherits2 = require('babel-runtime/helpers/inherits');
var _inherits3 = _interopRequireDefault(_inherits2);
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _objectAssign = require('object-assign');
var _objectAssign2 = _interopRequireDefault(_objectAssign);
var _rcTable = require('rc-table');
var _rcTable2 = _interopRequireDefault(_rcTable);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
var __assign = undefined && undefined.__assign || Object.assign || function (t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) {
if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
}
return t;
};
var Table = function (_React$Component) {
(0, _inherits3["default"])(Table, _React$Component);
function Table() {
(0, _classCallCheck3["default"])(this, Table);
return (0, _possibleConstructorReturn3["default"])(this, _React$Component.apply(this, arguments));
}
Table.prototype.render = function render() {
var _props = this.props,
columns = _props.columns,
dataSource = _props.dataSource,
direction = _props.direction,
scrollX = _props.scrollX,
titleFixed = _props.titleFixed;
var _props2 = this.props,
style = _props2.style,
className = _props2.className;
var restProps = (0, _objectAssign2["default"])({}, this.props);
['style', 'className'].forEach(function (prop) {
if (restProps.hasOwnProperty(prop)) {
delete restProps[prop];
}
});
var table = void 0;
// 默认纵向
if (!direction || direction === 'vertical') {
if (titleFixed) {
table = _react2["default"].createElement(_rcTable2["default"], __assign({}, restProps, { columns: columns, data: dataSource, className: "am-table", scroll: { x: true }, showHeader: false }));
} else {
table = _react2["default"].createElement(_rcTable2["default"], __assign({}, restProps, { columns: columns, data: dataSource, className: "am-table", scroll: { x: scrollX } }));
}
} else if (direction === 'horizon') {
columns[0].className = 'am-table-horizonTitle';
table = _react2["default"].createElement(_rcTable2["default"], __assign({}, restProps, { columns: columns, data: dataSource, className: "am-table", showHeader: false, scroll: { x: scrollX } }));
} else if (direction === 'mix') {
columns[0].className = 'am-table-horizonTitle';
table = _react2["default"].createElement(_rcTable2["default"], __assign({}, restProps, { columns: columns, data: dataSource, className: "am-table", scroll: { x: scrollX } }));
}
return _react2["default"].createElement("div", { className: className, style: style }, table);
};
return Table;
}(_react2["default"].Component);
exports["default"] = Table;
Table.defaultProps = {
dataSource: [],
prefixCls: 'am-table'
};
module.exports = exports['default'];
|
forwk1990/wechart-checkin
|
antd-mobile-custom/antd-mobile/lib/table/index.web.js
|
JavaScript
|
mit
| 3,649 |
using System.ComponentModel;
namespace NSysmon.Collector.HAProxy
{
/// <summary>
/// Current server statuses
/// </summary>
public enum ProxyServerStatus
{
[Description("Status Unknown!")]
None = 0, //Won't be populated for backends
[Description("Server is up, status normal.")]
ActiveUp = 2,
[Description("Server has not responded to checks in a timely manner, going down.")]
ActiveUpGoingDown = 8,
[Description("Server is responsive and recovering.")]
ActiveDownGoingUp = 6,
[Description("Backup server is up, status normal.")]
BackupUp = 3,
[Description("Backup server has not responded to checks in a timely manner, going down.")]
BackupUpGoingDown = 9,
[Description("Backup server is responsive and recovering.")]
BackupDownGoingUp = 7,
[Description("Server is not checked.")]
NotChecked = 4,
[Description("Server is down and receiving no requests.")]
Down = 10,
[Description("Server is in maintenance and receiving no requests.")]
Maintenance = 5,
[Description("Front end is open to receiving requests.")]
Open = 1
}
public static class ProxyServerStatusExtensions
{
public static string ShortDescription(this ProxyServerStatus status)
{
switch (status)
{
case ProxyServerStatus.ActiveUp:
return "Active";
case ProxyServerStatus.ActiveUpGoingDown:
return "Active (Up -> Down)";
case ProxyServerStatus.ActiveDownGoingUp:
return "Active (Down -> Up)";
case ProxyServerStatus.BackupUp:
return "Backup";
case ProxyServerStatus.BackupUpGoingDown:
return "Backup (Up -> Down)";
case ProxyServerStatus.BackupDownGoingUp:
return "Backup (Down -> Up)";
case ProxyServerStatus.NotChecked:
return "Not Checked";
case ProxyServerStatus.Down:
return "Down";
case ProxyServerStatus.Maintenance:
return "Maintenance";
case ProxyServerStatus.Open:
return "Open";
//case ProxyServerStatus.None:
default:
return "Unknown";
}
}
public static bool IsBad(this ProxyServerStatus status)
{
switch (status)
{
case ProxyServerStatus.ActiveUpGoingDown:
case ProxyServerStatus.BackupUpGoingDown:
case ProxyServerStatus.Down:
return true;
default:
return false;
}
}
}
}
|
clearwavebuild/nsysmon
|
NSysmon.Collector/HAProxy/ProxyServerStatus.cs
|
C#
|
mit
| 2,906 |
import datetime
from django.contrib.contenttypes.models import ContentType
from django.utils import timezone
from .models import Action
def create_action(user, verb, target=None):
now = timezone.now()
last_minute = now - datetime.timedelta(seconds=60)
similar_actions = Action.objects.filter(user_id=user.id, verb=verb, created__gte=last_minute)
if target:
target_ct = ContentType.objects.get_for_model(target)
similar_actions = Action.objects.filter(target_ct=target_ct, target_id=target.id)
if not similar_actions:
action = Action(user=user, verb=verb, target=target)
action.save()
return True
return False
|
EssaAlshammri/django-by-example
|
bookmarks/bookmarks/actions/utils.py
|
Python
|
mit
| 679 |
package engine;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Point;
public class CircleShape extends Shape {
double radius; //radius of shape
public CircleShape(double rad, Vector2D v, double r, double d, Color c) {
super(v, r, d, c);
radius = rad;
}
@Override
public void calculateInertia() {
mass = radius * radius * Math.PI * density;
inertia = radius * radius * mass;
}
@Override
public void paint(Graphics2D g) {
super.paint(g);
vector.readyPoint();
g.fillOval((int) (x - radius), (int) (y - radius), (int) radius * 2, (int) radius * 2);
g.drawOval((int) (x - radius), (int) (y - radius), (int) radius * 2, (int) radius * 2);
g.setColor(Color.BLACK);
g.drawLine((int) (x), (int) (y), (int) (x + Math.cos(rotation) * radius), (int) (y + Math.sin(rotation) * radius));
}
@Override
public boolean contains(Point.Double p) {
return p.distanceSq(x, y) < radius * radius;
}
}
|
bjornenalfa/GA
|
src/engine/CircleShape.java
|
Java
|
mit
| 1,041 |
<?xml version="1.0" ?><!DOCTYPE TS><TS language="ms_MY" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>DarkSwift</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The DarkSwift developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Klik dua kali untuk mengubah alamat atau label</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Cipta alamat baru</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Salin alamat terpilih ke dalam sistem papan klip</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your DarkSwift addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Padam</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Fail yang dipisahkan dengan koma</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-58"/>
<source>DarkSwift will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show information about DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>Pilihan</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+180"/>
<source>&About DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>DarkSwift client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to DarkSwift network</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About DarkSwift card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about DarkSwift card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid DarkSwift address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. DarkSwift can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid DarkSwift address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>DarkSwift-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start DarkSwift after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start DarkSwift on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the DarkSwift client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the DarkSwift network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting DarkSwift.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show DarkSwift addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting DarkSwift.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the DarkSwift network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the DarkSwift-Qt help message to get a list with possible DarkSwift command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>DarkSwift - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>DarkSwift Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the DarkSwift debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the DarkSwift RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 BOST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Baki</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 BOST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a DarkSwift address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a DarkSwift address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a DarkSwift address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter DarkSwift signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Fail yang dipisahkan dengan koma</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>DarkSwift version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or DarkSwiftd</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: DarkSwift.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: DarkSwiftd.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong DarkSwift will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=DarkSwiftrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "DarkSwift Alert" admin@foo.com
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. DarkSwift is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart DarkSwift to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. DarkSwift is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS>
|
DarkSwift/DarkSwift
|
src/qt/locale/bitcoin_ms_MY.ts
|
TypeScript
|
mit
| 107,393 |
# -*- coding: utf-8 -*-
#
# RedPipe documentation build configuration file, created by
# sphinx-quickstart on Wed Apr 19 13:22:45 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
import os
import sys
from os import path
ROOTDIR = path.abspath(os.path.dirname(os.path.dirname(__file__)))
sys.path.insert(0, ROOTDIR)
import redpipe # noqa
extensions = [
'alabaster',
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'RedPipe'
copyright = u'2017, John Loehrer'
author = u'John Loehrer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = redpipe.__version__
# The full version, including alpha/beta/rc tags.
release = redpipe.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'logo': 'redpipe-logo.gif',
'github_banner': True,
'github_user': '72squared',
'github_repo': 'redpipe',
'travis_button': True,
'analytics_id': 'UA-98626018-1',
}
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
]
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'RedPipedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'RedPipe.tex', u'%s Documentation' % project,
u'John Loehrer', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, project, u'%s Documentation' % project,
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, project, u'%s Documentation' % project,
author, project, 'making redis pipelines easy in python',
'Miscellaneous'),
]
suppress_warnings = ['image.nonlocal_uri']
|
72squared/redpipe
|
docs/conf.py
|
Python
|
mit
| 5,400 |
import {
assign,
forEach,
isArray
} from 'min-dash';
var abs= Math.abs,
round = Math.round;
var TOLERANCE = 10;
export default function BendpointSnapping(eventBus) {
function snapTo(values, value) {
if (isArray(values)) {
var i = values.length;
while (i--) if (abs(values[i] - value) <= TOLERANCE) {
return values[i];
}
} else {
values = +values;
var rem = value % values;
if (rem < TOLERANCE) {
return value - rem;
}
if (rem > values - TOLERANCE) {
return value - rem + values;
}
}
return value;
}
function mid(element) {
if (element.width) {
return {
x: round(element.width / 2 + element.x),
y: round(element.height / 2 + element.y)
};
}
}
// connection segment snapping //////////////////////
function getConnectionSegmentSnaps(context) {
var snapPoints = context.snapPoints,
connection = context.connection,
waypoints = connection.waypoints,
segmentStart = context.segmentStart,
segmentStartIndex = context.segmentStartIndex,
segmentEnd = context.segmentEnd,
segmentEndIndex = context.segmentEndIndex,
axis = context.axis;
if (snapPoints) {
return snapPoints;
}
var referenceWaypoints = [
waypoints[segmentStartIndex - 1],
segmentStart,
segmentEnd,
waypoints[segmentEndIndex + 1]
];
if (segmentStartIndex < 2) {
referenceWaypoints.unshift(mid(connection.source));
}
if (segmentEndIndex > waypoints.length - 3) {
referenceWaypoints.unshift(mid(connection.target));
}
context.snapPoints = snapPoints = { horizontal: [] , vertical: [] };
forEach(referenceWaypoints, function(p) {
// we snap on existing bendpoints only,
// not placeholders that are inserted during add
if (p) {
p = p.original || p;
if (axis === 'y') {
snapPoints.horizontal.push(p.y);
}
if (axis === 'x') {
snapPoints.vertical.push(p.x);
}
}
});
return snapPoints;
}
eventBus.on('connectionSegment.move.move', 1500, function(event) {
var context = event.context,
snapPoints = getConnectionSegmentSnaps(context),
x = event.x,
y = event.y,
sx, sy;
if (!snapPoints) {
return;
}
// snap
sx = snapTo(snapPoints.vertical, x);
sy = snapTo(snapPoints.horizontal, y);
// correction x/y
var cx = (x - sx),
cy = (y - sy);
// update delta
assign(event, {
dx: event.dx - cx,
dy: event.dy - cy,
x: sx,
y: sy
});
});
// bendpoint snapping //////////////////////
function getBendpointSnaps(context) {
var snapPoints = context.snapPoints,
waypoints = context.connection.waypoints,
bendpointIndex = context.bendpointIndex;
if (snapPoints) {
return snapPoints;
}
var referenceWaypoints = [ waypoints[bendpointIndex - 1], waypoints[bendpointIndex + 1] ];
context.snapPoints = snapPoints = { horizontal: [] , vertical: [] };
forEach(referenceWaypoints, function(p) {
// we snap on existing bendpoints only,
// not placeholders that are inserted during add
if (p) {
p = p.original || p;
snapPoints.horizontal.push(p.y);
snapPoints.vertical.push(p.x);
}
});
return snapPoints;
}
eventBus.on('bendpoint.move.move', 1500, function(event) {
var context = event.context,
snapPoints = getBendpointSnaps(context),
target = context.target,
targetMid = target && mid(target),
x = event.x,
y = event.y,
sx, sy;
if (!snapPoints) {
return;
}
// snap
sx = snapTo(targetMid ? snapPoints.vertical.concat([ targetMid.x ]) : snapPoints.vertical, x);
sy = snapTo(targetMid ? snapPoints.horizontal.concat([ targetMid.y ]) : snapPoints.horizontal, y);
// correction x/y
var cx = (x - sx),
cy = (y - sy);
// update delta
assign(event, {
dx: event.dx - cx,
dy: event.dy - cy,
x: event.x - cx,
y: event.y - cy
});
});
}
BendpointSnapping.$inject = [ 'eventBus' ];
|
pedesen/diagram-js
|
lib/features/bendpoints/BendpointSnapping.js
|
JavaScript
|
mit
| 4,283 |
var changeSpan;
var i = 0;
var hobbies = [
'Music',
'HTML5',
'Learning',
'Exploring',
'Art',
'Teaching',
'Virtual Reality',
'The Cosmos',
'Unity3D',
'Tilemaps',
'Reading',
'Butterscotch',
'Drawing',
'Taking Photos',
'Smiles',
'The Poetics of Space',
'Making Sounds',
'Board games',
'Travelling',
'Sweetened condensed milk'
];
function changeWord() {
changeSpan.textContent = hobbies[i];
i++;
if (i >= hobbies.length) i = 0;
}
function init() {
console.log('initialising scrolling text');
changeSpan = document.getElementById("scrollingText");
nIntervId = setInterval(changeWord, 950);
changeWord();
}
if (document.addEventListener) {
init();
} else {
init();
}
|
oddgoo/oddgoo.com
|
static/js/hobbies.js
|
JavaScript
|
mit
| 725 |
<?php
namespace App\Http\ViewComposers;
use App\Models\Character;
use App\Models\Message;
use Illuminate\Database\Eloquent\Builder;
use Illuminate\Support\Arr;
use Illuminate\Support\Facades\Auth;
use Illuminate\View\View;
class CharacterMessagesComposer
{
/**
* Bind data to the view.
*
* @param View $view
* @return void
*/
public function compose(View $view)
{
$data = $view->getData();
/** @var Character $currentCharacter */
/** @var Character $otherCharacter */
$currentCharacter = Auth::user()->character;
$otherCharacter = Arr::get($data, 'character');
$messages = Message::query()->where(function (Builder $query) use ($currentCharacter, $otherCharacter) {
$query->where([
'to_id' => $currentCharacter->id,
'from_id' => $otherCharacter->id,
]);
})->orWhere(function (Builder $query) use ($currentCharacter, $otherCharacter) {
$query->where([
'to_id' => $otherCharacter->id,
'from_id' => $currentCharacter->id,
]);
})->orderByDesc('created_at')->paginate(5);
$otherCharacter->sentMessages()->whereIn('id', $messages->pluck('id'))->markAsRead();
$contentLimit = Message::CONTENT_LIMIT;
$view->with(compact('messages', 'currentCharacter', 'otherCharacter', 'contentLimit'));
}
}
|
mchekin/rpg
|
app/Http/ViewComposers/CharacterMessagesComposer.php
|
PHP
|
mit
| 1,435 |
from src.tools.dictionaries import PostLoadedDict
# Utility class
################################################
class ServerImplementationDict(PostLoadedDict):
def __missing__(self, key):
try:
return super().__missing__(key)
except KeyError:
return NotImplemented
################################################
class Server():
def __init__(self, shortname, loader):
# Not preloaded
# loaders must produce dictionaries (or an appropriate iterable)
# with the required keys.
# The reason for this is that code for certain servers need not be loaded
# if it's not going to be used at all
# It also prevents import loop collisions.
global __ServerImplementationDict
self.__data = ServerImplementationDict(loader)
self.__shortname = shortname
@property
def shortname(self):
# This is the only property provided from above
return self.__shortname
def __str__(self):
return str(self.__shortname)
# All other properties must come from canonical sources
# provided by the server loader
# CONSTANTS (STRINGS, BOOLEANS, INTS, ETC.)
@property
def name(self):
return self.__data['str_name']
@property
def internal_shortname(self):
return self.__data['str_shortname']
@property
def beta(self):
return self.__data['bool_tester']
# CLASSES
# 1- Credentials:
@property
def Auth(self): # I really don't know how to call this.
return self.__data['cls_auth']
@property
def auth_fields(self):
return self.__data['list_authkeys']
# 2- Server Elements:
@property
def Player(self):
return self.__data['cls_player']
@property
def Tournament(self):
return self.__data['cls_tournament']
|
juanchodepisa/sbtk
|
SBTK_League_Helper/src/interfacing/servers.py
|
Python
|
mit
| 1,946 |
import {
GraphQLInputObjectType,
GraphQLID,
GraphQLList,
GraphQLBoolean,
} from 'graphql';
import RecipientTypeEnum from './RecipientTypeEnum';
import MessageTypeEnum from './MessageTypeEnum';
import NoteInputType from './NoteInputType';
import TranslationInputType from './TranslationInputType';
import CommunicationInputType from './CommunicationInputType';
const MessageInputType = new GraphQLInputObjectType({
name: 'MessageInput',
fields: {
parentId: {
type: GraphQLID,
},
note: {
type: NoteInputType,
},
communication: {
type: CommunicationInputType,
},
subject: {
type: TranslationInputType,
},
enforceEmail: {
type: GraphQLBoolean,
},
isDraft: {
type: GraphQLBoolean,
},
recipients: {
type: new GraphQLList(GraphQLID),
},
recipientType: { type: RecipientTypeEnum },
messageType: { type: MessageTypeEnum },
},
});
export default MessageInputType;
|
nambawan/g-old
|
src/data/types/MessageInputType.js
|
JavaScript
|
mit
| 979 |
require "rubygems"
require 'active_support'
require "ruby-debug"
gem 'test-unit'
require "test/unit"
require 'active_support'
require 'active_support/test_case'
require 'shoulda'
require 'rr'
require File.dirname(__FILE__) + '/../lib/ubiquitously'
Ubiquitously.configure("test/config/secrets.yml")
Passport.configure("test/config/tokens.yml")
ActiveSupport::TestCase.class_eval do
def create_user(options)
@user = Ubiquitously::User.new(options.merge(:username => "viatropos"))
end
end
|
lancejpollard/ubiquitously
|
test/test_helper.rb
|
Ruby
|
mit
| 497 |
#include <boost/lexical_cast.hpp>
#include <disccord/models/user.hpp>
namespace disccord
{
namespace models
{
user::user()
: username(""), avatar(), email(), discriminator(0),
bot(false), mfa_enabled(), verified()
{ }
user::~user()
{ }
void user::decode(web::json::value json)
{
entity::decode(json);
username = json.at("username").as_string();
// HACK: use boost::lexical_cast here since it safely
// validates values
auto str_js = json.at("discriminator");
discriminator = boost::lexical_cast<uint16_t>(str_js.as_string());
#define get_field(var, conv) \
if (json.has_field(#var)) { \
auto field = json.at(#var); \
if (!field.is_null()) { \
var = decltype(var)(field.conv()); \
} else { \
var = decltype(var)::no_value(); \
} \
} else { \
var = decltype(var)(); \
}
get_field(avatar, as_string);
bot = json.at("bot").as_bool();
//get_field(bot, as_bool);
get_field(mfa_enabled, as_bool);
get_field(verified, as_bool);
get_field(email, as_string);
#undef get_field
}
void user::encode_to(std::unordered_map<std::string,
web::json::value> &info)
{
entity::encode_to(info);
info["username"] = web::json::value(get_username());
info["discriminator"] =
web::json::value(std::to_string(get_discriminator()));
if (get_avatar().is_specified())
info["avatar"] = get_avatar();
info["bot"] = web::json::value(get_bot());
if (get_mfa_enabled().is_specified())
info["mfa_enabled"] = get_mfa_enabled();
if (get_verified().is_specified())
info["verified"] = get_verified();
if (get_email().is_specified())
info["email"] = get_email();
}
#define define_get_method(field_name) \
decltype(user::field_name) user::get_##field_name() { \
return field_name; \
}
define_get_method(username)
define_get_method(discriminator)
define_get_method(avatar)
define_get_method(bot)
define_get_method(mfa_enabled)
define_get_method(verified)
define_get_method(email)
util::optional<std::string> user::get_avatar_url()
{
if (get_avatar().is_specified())
{
std::string url = "https://cdn.discordapp.com/avatars/" +
std::to_string(get_id()) + "/" +
get_avatar().get_value()+".png?size=1024";
return util::optional<std::string>(url);
}
else
return util::optional<std::string>::no_value();
}
#undef define_get_method
}
}
|
FiniteReality/disccord
|
lib/models/user.cpp
|
C++
|
mit
| 3,213 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using ASPPatterns.Chap7.Library.Services.Views;
namespace ASPPatterns.Chap7.Library.Services.Messages
{
public class FindMembersResponse : ResponseBase
{
public IEnumerable<MemberView> MembersFound { get; set; }
}
}
|
liqipeng/helloGithub
|
Book-Code/ASP.NET Design Pattern/ASPPatternsc07/ASPPatterns.Chap7.Library/ASPPatterns.Chap7.Library.Services/Messages/FindMembersResponse.cs
|
C#
|
mit
| 327 |
let _ = require('underscore'),
React = require('react');
class Icon extends React.Component {
render() {
let className = "icon " + this.props.icon;
let other = _.omit(this.props.icon, "icon");
return (
<span className={className} role="img" {...other}></span>
);
}
}
Icon.propTypes = {
icon: React.PropTypes.string.isRequired
};
module.exports = Icon;
|
legendary-code/chaos-studio-web
|
app/src/js/components/Icon.js
|
JavaScript
|
mit
| 433 |
using System.IO;
namespace Mandro.Utils.Setup
{
public class DirectoryHelper
{
public DirectoryHelper()
{
}
public static void CopyDirectory(string sourceDirName, string destDirName, bool copySubDirs)
{
// Get the subdirectories for the specified directory.
DirectoryInfo dir = new DirectoryInfo(sourceDirName);
DirectoryInfo[] dirs = dir.GetDirectories();
if (!dir.Exists)
{
throw new DirectoryNotFoundException(
"Source directory does not exist or could not be found: "
+ sourceDirName);
}
// If the destination directory doesn't exist, create it.
if (!Directory.Exists(destDirName))
{
Directory.CreateDirectory(destDirName);
}
// Get the files in the directory and copy them to the new location.
FileInfo[] files = dir.GetFiles();
foreach (FileInfo file in files)
{
string temppath = Path.Combine(destDirName, file.Name);
file.CopyTo(temppath, true);
}
// If copying subdirectories, copy them and their contents to new location.
if (copySubDirs)
{
foreach (DirectoryInfo subdir in dirs)
{
string temppath = Path.Combine(destDirName, subdir.Name);
CopyDirectory(subdir.FullName, temppath, copySubDirs);
}
}
}
}
}
|
mandrek44/Mandro.Utils
|
Mandro.Utils/Setup/DirectoryHelper.cs
|
C#
|
mit
| 1,605 |
import { NotificationType } from 'vscode-languageclient'
export enum Status {
ok = 1,
warn = 2,
error = 3
}
export interface StatusParams {
state: Status
}
export const type = new NotificationType<StatusParams>('standard/status')
|
chenxsan/vscode-standardjs
|
client/src/utils/StatusNotification.ts
|
TypeScript
|
mit
| 241 |
import React from 'react';
import IconBase from './../components/IconBase/IconBase';
export default class ChevronDown extends React.Component {
render() {
if(this.props.bare) {
return <g>
<path d="M256,298.3L256,298.3L256,298.3l174.2-167.2c4.3-4.2,11.4-4.1,15.8,0.2l30.6,29.9c4.4,4.3,4.5,11.3,0.2,15.5L264.1,380.9
c-2.2,2.2-5.2,3.2-8.1,3c-3,0.1-5.9-0.9-8.1-3L35.2,176.7c-4.3-4.2-4.2-11.2,0.2-15.5L66,131.3c4.4-4.3,11.5-4.4,15.8-0.2L256,298.3
z"></path>
</g>;
} return <IconBase>
<path d="M256,298.3L256,298.3L256,298.3l174.2-167.2c4.3-4.2,11.4-4.1,15.8,0.2l30.6,29.9c4.4,4.3,4.5,11.3,0.2,15.5L264.1,380.9
c-2.2,2.2-5.2,3.2-8.1,3c-3,0.1-5.9-0.9-8.1-3L35.2,176.7c-4.3-4.2-4.2-11.2,0.2-15.5L66,131.3c4.4-4.3,11.5-4.4,15.8-0.2L256,298.3
z"></path>
</IconBase>;
}
};ChevronDown.defaultProps = {bare: false}
|
fbfeix/react-icons
|
src/icons/ChevronDown.js
|
JavaScript
|
mit
| 819 |
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin');
const webpack = require('webpack');
const paths = require('./tools/paths');
const env = {
'process.env.NODE_ENV': JSON.stringify('development')
};
module.exports = {
devtool: 'cheap-module-eval-source-map',
entry: [
require.resolve('./tools/polyfills'),
'webpack-dev-server/client?http://localhost:3000',
'webpack/hot/only-dev-server',
'react-hot-loader/patch',
'./src/index'
],
output: {
filename: 'static/js/bundle.js',
path: paths.appDist,
pathinfo: true,
publicPath: '/'
},
module: {
rules: [
// Default loader: load all assets that are not handled
// by other loaders with the url loader.
// Note: This list needs to be updated with every change of extensions
// the other loaders match.
// E.g., when adding a loader for a new supported file extension,
// we need to add the supported extension to this loader too.
// Add one new line in `exclude` for each loader.
//
// "file" loader makes sure those assets get served by WebpackDevServer.
// When you `import` an asset, you get its (virtual) filename.
// In production, they would get copied to the `dist` folder.
// "url" loader works like "file" loader except that it embeds assets
// smaller than specified limit in bytes as data URLs to avoid requests.
// A missing `test` is equivalent to a match.
{
exclude: [
/\.html$/,
/\.js$/,
/\.scss$/,
/\.json$/,
/\.svg$/,
/node_modules/
],
use: [{
loader: 'url-loader',
options: {
limit: 10000,
name: 'static/media/[name].[hash:8].[ext]'
}
}]
},
{
test: /\.js$/,
enforce: 'pre',
include: paths.appSrc,
use: [{
loader: 'xo-loader',
options: {
// This loader must ALWAYS return warnings during development. If
// errors are emitted, no changes will be pushed to the browser for
// testing until the errors have been resolved.
emitWarning: true
}
}]
},
{
test: /\.js$/,
include: paths.appSrc,
use: [{
loader: 'babel-loader',
options: {
// This is a feature of `babel-loader` for webpack (not Babel itself).
// It enables caching results in ./node_modules/.cache/babel-loader/
// directory for faster rebuilds.
cacheDirectory: true
}
}]
},
{
test: /\.scss$/,
use: [
'style-loader',
{
loader: 'css-loader',
options: {
importLoaders: 2
}
},
'postcss-loader',
'sass-loader'
]
},
{
test: /\.svg$/,
use: [{
loader: 'file-loader',
options: {
name: 'static/media/[name].[hash:8].[ext]'
}
}]
}
]
},
plugins: [
new HtmlWebpackPlugin({
inject: true,
template: paths.appHtml
}),
new webpack.DefinePlugin(env),
new webpack.HotModuleReplacementPlugin(),
new webpack.NoEmitOnErrorsPlugin(),
// Watcher doesn't work well if you mistype casing in a path so we use
// a plugin that prints an error when you attempt to do this.
// See https://github.com/facebookincubator/create-react-app/issues/240
new CaseSensitivePathsPlugin(),
// If you require a missing module and then `npm install` it, you still have
// to restart the development server for Webpack to discover it. This plugin
// makes the discovery automatic so you don't have to restart.
// See https://github.com/facebookincubator/create-react-app/issues/186
new WatchMissingNodeModulesPlugin(paths.appNodeModules)
],
// Some libraries import Node modules but don't use them in the browser.
// Tell Webpack to provide empty mocks for them so importing them works.
node: {
fs: 'empty',
net: 'empty',
tls: 'empty'
}
};
|
hn3etta/VS2015-React-Redux-Webpack-Front-end-example
|
webpack.config.js
|
JavaScript
|
mit
| 3,928 |
<?php
namespace BackOfficeBundle\Entity;
use Doctrine\ORM\EntityRepository;
/**
* PosteCollaborateurRepository
*
* This class was generated by the Doctrine ORM. Add your own custom
* repository methods below.
*/
class PosteCollaborateurRepository extends EntityRepository
{
}
|
elmabdgrub/azplatform
|
src/BackOfficeBundle/Entity/PosteCollaborateurRepository.php
|
PHP
|
mit
| 284 |
// ==========================================================================
// snd_app
// ==========================================================================
// Copyright (c) 2006-2012, Knut Reinert, FU Berlin
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of Knut Reinert or the FU Berlin nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
// ==========================================================================
// Author: Your Name <your.email@example.net>
// ==========================================================================
#include <seqan/basic.h>
#include <seqan/sequence.h>
#include <seqan/arg_parse.h>
// ==========================================================================
// Classes
// ==========================================================================
// --------------------------------------------------------------------------
// Class AppOptions
// --------------------------------------------------------------------------
// This struct stores the options from the command line.
//
// You might want to rename this to reflect the name of your app.
struct AppOptions
{
// Verbosity level. 0 -- quiet, 1 -- normal, 2 -- verbose, 3 -- very verbose.
int verbosity;
// The first (and only) argument of the program is stored here.
seqan::CharString text;
AppOptions() :
verbosity(1)
{}
};
// ==========================================================================
// Functions
// ==========================================================================
// --------------------------------------------------------------------------
// Function parseCommandLine()
// --------------------------------------------------------------------------
seqan::ArgumentParser::ParseResult
parseCommandLine(AppOptions & options, int argc, char const ** argv)
{
// Setup ArgumentParser.
seqan::ArgumentParser parser("snd_app");
// Set short description, version, and date.
setShortDescription(parser, "Put a Short Description Here");
setVersion(parser, "0.1");
setDate(parser, "July 2012");
// Define usage line and long description.
addUsageLine(parser, "[\\fIOPTIONS\\fP] \"\\fITEXT\\fP\"");
addDescription(parser, "This is the application skelleton and you should modify this string.");
// We require one argument.
addArgument(parser, seqan::ArgParseArgument(seqan::ArgParseArgument::STRING, "TEXT"));
addOption(parser, seqan::ArgParseOption("q", "quiet", "Set verbosity to a minimum."));
addOption(parser, seqan::ArgParseOption("v", "verbose", "Enable verbose output."));
addOption(parser, seqan::ArgParseOption("vv", "very-verbose", "Enable very verbose output."));
// Add Examples Section.
addTextSection(parser, "Examples");
addListItem(parser, "\\fBsnd_app\\fP \\fB-v\\fP \\fItext\\fP",
"Call with \\fITEXT\\fP set to \"text\" with verbose output.");
// Parse command line.
seqan::ArgumentParser::ParseResult res = seqan::parse(parser, argc, argv);
// Only extract options if the program will continue after parseCommandLine()
if (res != seqan::ArgumentParser::PARSE_OK)
return res;
// Extract option values.
if (isSet(parser, "quiet"))
options.verbosity = 0;
if (isSet(parser, "verbose"))
options.verbosity = 2;
if (isSet(parser, "very-verbose"))
options.verbosity = 3;
seqan::getArgumentValue(options.text, parser, 0);
return seqan::ArgumentParser::PARSE_OK;
}
// --------------------------------------------------------------------------
// Function main()
// --------------------------------------------------------------------------
// Program entry point.
int main(int argc, char const ** argv)
{
// Parse the command line.
seqan::ArgumentParser parser;
AppOptions options;
seqan::ArgumentParser::ParseResult res = parseCommandLine(options, argc, argv);
// If there was an error parsing or built-in argument parser functionality
// was triggered then we exit the program. The return code is 1 if there
// were errors and 0 if there were none.
if (res != seqan::ArgumentParser::PARSE_OK)
return res == seqan::ArgumentParser::PARSE_ERROR;
std::cout << "EXAMPLE PROGRAM\n"
<< "===============\n\n";
// Print the command line arguments back to the user.
if (options.verbosity > 0)
{
std::cout << "__OPTIONS____________________________________________________________________\n"
<< '\n'
<< "VERBOSITY\t" << options.verbosity << '\n'
<< "TEXT \t" << options.text << "\n\n";
}
return 0;
}
|
bkahlert/seqan-research
|
raw/pmsb13/pmsb13-data-20130530/sources/130wu1dgzoqmxyu2/2013-04-09T10-23-18.897+0200/sandbox/my_sandbox/apps/snd_app/snd_app.cpp
|
C++
|
mit
| 6,165 |
<?php
/**
* @file
* Contains \Drupal\shortcut\ShortcutSetStorageControllerInterface.
*/
namespace Drupal\shortcut;
use Drupal\Core\Entity\EntityStorageControllerInterface;
use Drupal\shortcut\ShortcutSetInterface;
/**
* Defines a common interface for shortcut entity controller classes.
*/
interface ShortcutSetStorageControllerInterface extends EntityStorageControllerInterface {
/**
* Assigns a user to a particular shortcut set.
*
* @param \Drupal\shortcut\ShortcutSetInterface $shortcut_set
* An object representing the shortcut set.
* @param $account
* A user account that will be assigned to use the set.
*/
public function assignUser(ShortcutSetInterface $shortcut_set, $account);
/**
* Unassigns a user from any shortcut set they may have been assigned to.
*
* The user will go back to using whatever default set applies.
*
* @param $account
* A user account that will be removed from the shortcut set assignment.
*
* @return bool
* TRUE if the user was previously assigned to a shortcut set and has been
* successfully removed from it. FALSE if the user was already not assigned
* to any set.
*/
public function unassignUser($account);
/**
* Delete shortcut sets assigned to users.
*
* @param \Drupal\shortcut\ShortcutSetInterface $entity
* Delete the user assigned sets belonging to this shortcut.
*/
public function deleteAssignedShortcutSets(ShortcutSetInterface $entity);
/**
* Get the name of the set assigned to this user.
*
* @param \Drupal\user\Plugin\Core\Entity\User
* The user account.
*
* @return string
* The name of the shortcut set assigned to this user.
*/
public function getAssignedToUser($account);
/**
* Get the number of users who have this set assigned to them.
*
* @param \Drupal\shortcut\ShortcutSetInterface $shortcut_set
* The shortcut to count the users assigned to.
*
* @return int
* The number of users who have this set assigned to them.
*/
public function countAssignedUsers(ShortcutSetInterface $shortcut_set);
}
|
augustash/d8.dev
|
core/modules/shortcut/lib/Drupal/shortcut/ShortcutSetStorageControllerInterface.php
|
PHP
|
mit
| 2,130 |
using System;
using System.Collections.Generic;
using System.Reactive.Linq;
using System.Reactive.Threading.Tasks;
using MS.Core;
namespace System.Runtime.Remoting.Contexts
{
public static class __ContextAttribute
{
public static IObservable<System.Boolean> IsNewContextOK(
this IObservable<System.Runtime.Remoting.Contexts.ContextAttribute> ContextAttributeValue,
IObservable<System.Runtime.Remoting.Contexts.Context> newCtx)
{
return Observable.Zip(ContextAttributeValue, newCtx,
(ContextAttributeValueLambda, newCtxLambda) => ContextAttributeValueLambda.IsNewContextOK(newCtxLambda));
}
public static IObservable<System.Reactive.Unit> Freeze(
this IObservable<System.Runtime.Remoting.Contexts.ContextAttribute> ContextAttributeValue,
IObservable<System.Runtime.Remoting.Contexts.Context> newContext)
{
return ObservableExt.ZipExecute(ContextAttributeValue, newContext,
(ContextAttributeValueLambda, newContextLambda) => ContextAttributeValueLambda.Freeze(newContextLambda));
}
public static IObservable<System.Boolean> Equals(
this IObservable<System.Runtime.Remoting.Contexts.ContextAttribute> ContextAttributeValue,
IObservable<System.Object> o)
{
return Observable.Zip(ContextAttributeValue, o,
(ContextAttributeValueLambda, oLambda) => ContextAttributeValueLambda.Equals(oLambda));
}
public static IObservable<System.Int32> GetHashCode(
this IObservable<System.Runtime.Remoting.Contexts.ContextAttribute> ContextAttributeValue)
{
return Observable.Select(ContextAttributeValue,
(ContextAttributeValueLambda) => ContextAttributeValueLambda.GetHashCode());
}
public static IObservable<System.Boolean> IsContextOK(
this IObservable<System.Runtime.Remoting.Contexts.ContextAttribute> ContextAttributeValue,
IObservable<System.Runtime.Remoting.Contexts.Context> ctx,
IObservable<System.Runtime.Remoting.Activation.IConstructionCallMessage> ctorMsg)
{
return Observable.Zip(ContextAttributeValue, ctx, ctorMsg,
(ContextAttributeValueLambda, ctxLambda, ctorMsgLambda) =>
ContextAttributeValueLambda.IsContextOK(ctxLambda, ctorMsgLambda));
}
public static IObservable<System.Reactive.Unit> GetPropertiesForNewContext(
this IObservable<System.Runtime.Remoting.Contexts.ContextAttribute> ContextAttributeValue,
IObservable<System.Runtime.Remoting.Activation.IConstructionCallMessage> ctorMsg)
{
return ObservableExt.ZipExecute(ContextAttributeValue, ctorMsg,
(ContextAttributeValueLambda, ctorMsgLambda) =>
ContextAttributeValueLambda.GetPropertiesForNewContext(ctorMsgLambda));
}
public static IObservable<System.String> get_Name(
this IObservable<System.Runtime.Remoting.Contexts.ContextAttribute> ContextAttributeValue)
{
return Observable.Select(ContextAttributeValue,
(ContextAttributeValueLambda) => ContextAttributeValueLambda.Name);
}
}
}
|
RixianOpenTech/RxWrappers
|
Source/Wrappers/mscorlib/System.Runtime.Remoting.Contexts.ContextAttribute.cs
|
C#
|
mit
| 3,328 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009, 2011, 2013 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Python library for serializing any arbitrary object graph into JSON.
jsonpickle can take almost any Python object and turn the object into JSON.
Additionally, it can reconstitute the object back into Python.
The object must be accessible globally via a module and must
inherit from object (AKA new-style classes).
Create an object::
class Thing(object):
def __init__(self, name):
self.name = name
obj = Thing('Awesome')
Use jsonpickle to transform the object into a JSON string::
import jsonpickle
frozen = jsonpickle.encode(obj)
Use jsonpickle to recreate a Python object from a JSON string::
thawed = jsonpickle.decode(frozen)
.. warning::
Loading a JSON string from an untrusted source represents a potential
security vulnerability. jsonpickle makes no attempt to sanitize the input.
The new object has the same type and data, but essentially is now a copy of
the original.
.. code-block:: python
assert obj.name == thawed.name
If you will never need to load (regenerate the Python class from JSON), you can
pass in the keyword unpicklable=False to prevent extra information from being
added to JSON::
oneway = jsonpickle.encode(obj, unpicklable=False)
result = jsonpickle.decode(oneway)
assert obj.name == result['name'] == 'Awesome'
"""
import sys, os
from music21 import common
sys.path.append(common.getSourceFilePath() + os.path.sep + 'ext')
from jsonpickle import pickler
from jsonpickle import unpickler
from jsonpickle.backend import JSONBackend
from jsonpickle.version import VERSION
# ensure built-in handlers are loaded
__import__('jsonpickle.handlers')
__all__ = ('encode', 'decode')
__version__ = VERSION
json = JSONBackend()
# Export specific JSONPluginMgr methods into the jsonpickle namespace
set_preferred_backend = json.set_preferred_backend
set_encoder_options = json.set_encoder_options
load_backend = json.load_backend
remove_backend = json.remove_backend
enable_fallthrough = json.enable_fallthrough
def encode(value,
unpicklable=True,
make_refs=True,
keys=False,
max_depth=None,
backend=None,
warn=False,
max_iter=None):
"""Return a JSON formatted representation of value, a Python object.
:param unpicklable: If set to False then the output will not contain the
information necessary to turn the JSON data back into Python objects,
but a simpler JSON stream is produced.
:param max_depth: If set to a non-negative integer then jsonpickle will
not recurse deeper than 'max_depth' steps into the object. Anything
deeper than 'max_depth' is represented using a Python repr() of the
object.
:param make_refs: If set to False jsonpickle's referencing support is
disabled. Objects that are id()-identical won't be preserved across
encode()/decode(), but the resulting JSON stream will be conceptually
simpler. jsonpickle detects cyclical objects and will break the cycle
by calling repr() instead of recursing when make_refs is set False.
:param keys: If set to True then jsonpickle will encode non-string
dictionary keys instead of coercing them into strings via `repr()`.
:param warn: If set to True then jsonpickle will warn when it
returns None for an object which it cannot pickle
(e.g. file descriptors).
:param max_iter: If set to a non-negative integer then jsonpickle will
consume at most `max_iter` items when pickling iterators.
>>> encode('my string')
'"my string"'
>>> encode(36)
'36'
>>> encode({'foo': True})
'{"foo": true}'
>>> encode({'foo': True}, max_depth=0)
'"{\\'foo\\': True}"'
>>> encode({'foo': True}, max_depth=1)
'{"foo": "True"}'
"""
if backend is None:
backend = json
return pickler.encode(value,
backend=backend,
unpicklable=unpicklable,
make_refs=make_refs,
keys=keys,
max_depth=max_depth,
warn=warn)
def decode(string, backend=None, keys=False):
"""Convert a JSON string into a Python object.
The keyword argument 'keys' defaults to False.
If set to True then jsonpickle will decode non-string dictionary keys
into python objects via the jsonpickle protocol.
>>> str(decode('"my string"'))
'my string'
>>> decode('36')
36
"""
if backend is None:
backend = json
return unpickler.decode(string, backend=backend, keys=keys)
# json.load(),loads(), dump(), dumps() compatibility
dumps = encode
loads = decode
|
arnavd96/Cinemiezer
|
myvenv/lib/python3.4/site-packages/music21/ext/jsonpickle/__init__.py
|
Python
|
mit
| 5,049 |
require 'spec_helper'
describe "beings/show" do
before(:each) do
@being = FactoryGirl.create(:being)
@being.randomize!
end
end
|
slabgorb/populinator-0
|
spec/views/beings/show.html.haml_spec.rb
|
Ruby
|
mit
| 142 |
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
class ProteinTranslator {
private static final Integer CODON_LENGTH = 3;
private static final Map<String, String> CODON_TO_PROTEIN =
Map.ofEntries(
Map.entry("AUG", "Methionine"),
Map.entry("UUU", "Phenylalanine"),
Map.entry("UUC", "Phenylalanine"),
Map.entry("UUA", "Leucine"),
Map.entry("UUG", "Leucine"),
Map.entry("UCU", "Serine"),
Map.entry("UCC", "Serine"),
Map.entry("UCA", "Serine"),
Map.entry("UCG", "Serine"),
Map.entry("UAU", "Tyrosine"),
Map.entry("UAC", "Tyrosine"),
Map.entry("UGU", "Cysteine"),
Map.entry("UGC", "Cysteine"),
Map.entry("UGG", "Tryptophan"));
private static final Set<String> STOP_CODONS = Set.of("UAA", "UAG", "UGA");
public List<String> translate(final String rnaSequence) {
final List<String> codons = splitIntoCodons(rnaSequence);
List<String> proteins = new ArrayList<>();
for (String codon : codons) {
if (STOP_CODONS.contains(codon)) {
return proteins;
}
proteins.add(translateCodon(codon));
}
;
return proteins;
}
private static List<String> splitIntoCodons(final String rnaSequence) {
final List<String> codons = new ArrayList<>();
for (int i = 0; i < rnaSequence.length(); i += CODON_LENGTH) {
codons.add(rnaSequence.substring(i, Math.min(rnaSequence.length(), i + CODON_LENGTH)));
}
return codons;
}
private static String translateCodon(final String codon) {
return CODON_TO_PROTEIN.get(codon);
}
}
|
rootulp/exercism
|
java/protein-translation/src/main/java/ProteinTranslator.java
|
Java
|
mit
| 1,679 |
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 09 13:04:53 2015
* If TimerTool.exe is running, kill the process.
* If input parameter is given, start TimerTool and set clock resolution
Starts TimerTool.exe and sets the clock resolution to argv[0] ms
Ex: python set_clock_resolution 0.5
@author: marcus
"""
import time, datetime
from socket import gethostname, gethostbyname
import os
import numpy as np
def main():
my_path = os.path.join('C:',os.sep,'Share','sync_clocks')
os.chdir(my_path)
# Initial timestamps
t1 = time.clock()
t2 = time.time()
t3 = datetime.datetime.now()
td1 = []
td2 = []
td3 = []
for i in xrange(100):
td1.append(time.clock()-t1)
td2.append(time.time() -t2)
td3.append((datetime.datetime.now()-t3).total_seconds())
time.sleep(0.001)
# Create text file and write header
t = datetime.datetime.now()
ip = gethostbyname(gethostname()).split('.')[-1]
f_name = '_'.join([ip,'test_clock_res',str(t.year),str(t.month),str(t.day),
str(t.hour),str(t.minute),str(t.second)])
f = open(f_name+'.txt','w')
f.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' %
('mean_clock','median_clock','sd_clock',
'mean_time','median_time','sd_time',
'mean_datetime','median_datetime','sd_datetime',))
# Write results to text file
f.write('%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\n' %
(np.mean(np.diff(td1))*1000, np.median(np.diff(td1))*1000,np.std(np.diff(td1))*1000,
np.mean(np.diff(td2))*1000, np.median(np.diff(td2))*1000,np.std(np.diff(td2))*1000,
np.mean(np.diff(td3))*1000, np.median(np.diff(td3))*1000,np.std(np.diff(td3))*1000))
f.close()
if __name__ == "__main__":
main()
|
marcus-nystrom/share-gaze
|
sync_clocks/test_clock_resolution.py
|
Python
|
mit
| 1,930 |
<?php
use Illuminate\Database\Seeder;
use jeremykenedy\LaravelRoles\Models\Permission;
class PermissionsTableSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
/*
* Add Permissions
*
*/
if (Permission::where('name', '=', 'Can View Users')->first() === null) {
Permission::create([
'name' => 'Can View Users',
'slug' => 'view.users',
'description' => 'Can view users',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'Can Create Users')->first() === null) {
Permission::create([
'name' => 'Can Create Users',
'slug' => 'create.users',
'description' => 'Can create new users',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'Can Edit Users')->first() === null) {
Permission::create([
'name' => 'Can Edit Users',
'slug' => 'edit.users',
'description' => 'Can edit users',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'Can Delete Users')->first() === null) {
Permission::create([
'name' => 'Can Delete Users',
'slug' => 'delete.users',
'description' => 'Can delete users',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'Super Admin Permissions')->first() === null) {
Permission::create([
'name' => 'Super Admin Permissions',
'slug' => 'perms.super-admin',
'description' => 'Has Super Admin Permissions',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'Admin Permissions')->first() === null) {
Permission::create([
'name' => 'Admin Permissions',
'slug' => 'perms.admin',
'description' => 'Has Admin Permissions',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'Moderator Permissions')->first() === null) {
Permission::create([
'name' => 'Moderator Permissions',
'slug' => 'perms.moderator',
'description' => 'Has Moderator Permissions',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'Writer Permissions')->first() === null) {
Permission::create([
'name' => 'Writer Permissions',
'slug' => 'perms.writer',
'description' => 'Has Writer Permissions',
'model' => 'Permission',
]);
}
if (Permission::where('name', '=', 'User Permissions')->first() === null) {
Permission::create([
'name' => 'User Permissions',
'slug' => 'perms.user',
'description' => 'Has User Permissions',
'model' => 'Permission',
]);
}
}
}
|
jeremykenedy/larablog
|
database/seeds/PermissionsTableSeeder.php
|
PHP
|
mit
| 3,481 |
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Xna.Framework;
namespace TrueSync.Physics2D
{
// Original Code by Steven Lu - see http://www.box2d.org/forum/viewtopic.php?f=3&t=1688
// Ported to Farseer 3.0 by Nicolás Hormazábal
internal struct ShapeData
{
public Body Body;
public FP Max;
public FP Min; // absolute angles
}
/// <summary>
/// This is a comprarer used for
/// detecting angle difference between rays
/// </summary>
internal class RayDataComparer : IComparer<FP>
{
#region IComparer<FP> Members
int IComparer<FP>.Compare(FP a, FP b)
{
FP diff = (a - b);
if (diff > 0)
return 1;
if (diff < 0)
return -1;
return 0;
}
#endregion
}
/* Methodology:
* Force applied at a ray is inversely proportional to the square of distance from source
* AABB is used to query for shapes that may be affected
* For each RIGID BODY (not shape -- this is an optimization) that is matched, loop through its vertices to determine
* the extreme points -- if there is structure that contains outlining polygon, use that as an additional optimization
* Evenly cast a number of rays against the shape - number roughly proportional to the arc coverage
* - Something like every 3 degrees should do the trick although this can be altered depending on the distance (if really close don't need such a high density of rays)
* - There should be a minimum number of rays (3-5?) applied to each body so that small bodies far away are still accurately modeled
* - Be sure to have the forces of each ray be proportional to the average arc length covered by each.
* For each ray that actually intersects with the shape (non intersections indicate something blocking the path of explosion):
* - Apply the appropriate force dotted with the negative of the collision normal at the collision point
* - Optionally apply linear interpolation between aforementioned Normal force and the original explosion force in the direction of ray to simulate "surface friction" of sorts
*/
/// <summary>
/// Creates a realistic explosion based on raycasting. Objects in the open will be affected, but objects behind
/// static bodies will not. A body that is half in cover, half in the open will get half the force applied to the end in
/// the open.
/// </summary>
public sealed class RealExplosion : PhysicsLogic
{
/// <summary>
/// Two degrees: maximum angle from edges to first ray tested
/// </summary>
private static readonly FP MaxEdgeOffset = FP.Pi / 90;
/// <summary>
/// Ratio of arc length to angle from edges to first ray tested.
/// Defaults to 1/40.
/// </summary>
public FP EdgeRatio = 1.0f / 40.0f;
/// <summary>
/// Ignore Explosion if it happens inside a shape.
/// Default value is false.
/// </summary>
public bool IgnoreWhenInsideShape = false;
/// <summary>
/// Max angle between rays (used when segment is large).
/// Defaults to 15 degrees
/// </summary>
public FP MaxAngle = FP.Pi / 15;
/// <summary>
/// Maximum number of shapes involved in the explosion.
/// Defaults to 100
/// </summary>
public int MaxShapes = 100;
/// <summary>
/// How many rays per shape/body/segment.
/// Defaults to 5
/// </summary>
public int MinRays = 5;
private List<ShapeData> _data = new List<ShapeData>();
private RayDataComparer _rdc;
public RealExplosion(World world)
: base(world, PhysicsLogicType.Explosion)
{
_rdc = new RayDataComparer();
_data = new List<ShapeData>();
}
/// <summary>
/// Activate the explosion at the specified position.
/// </summary>
/// <param name="pos">The position where the explosion happens </param>
/// <param name="radius">The explosion radius </param>
/// <param name="maxForce">The explosion force at the explosion point (then is inversely proportional to the square of the distance)</param>
/// <returns>A list of bodies and the amount of force that was applied to them.</returns>
public Dictionary<Fixture, TSVector2> Activate(TSVector2 pos, FP radius, FP maxForce)
{
AABB aabb;
aabb.LowerBound = pos + new TSVector2(-radius, -radius);
aabb.UpperBound = pos + new TSVector2(radius, radius);
Fixture[] shapes = new Fixture[MaxShapes];
// More than 5 shapes in an explosion could be possible, but still strange.
Fixture[] containedShapes = new Fixture[5];
bool exit = false;
int shapeCount = 0;
int containedShapeCount = 0;
// Query the world for overlapping shapes.
World.QueryAABB(
fixture =>
{
if (fixture.TestPoint(ref pos))
{
if (IgnoreWhenInsideShape)
{
exit = true;
return false;
}
containedShapes[containedShapeCount++] = fixture;
}
else
{
shapes[shapeCount++] = fixture;
}
// Continue the query.
return true;
}, ref aabb);
if (exit)
return new Dictionary<Fixture, TSVector2>();
Dictionary<Fixture, TSVector2> exploded = new Dictionary<Fixture, TSVector2>(shapeCount + containedShapeCount);
// Per shape max/min angles for now.
FP[] vals = new FP[shapeCount * 2];
int valIndex = 0;
for (int i = 0; i < shapeCount; ++i)
{
PolygonShape ps;
CircleShape cs = shapes[i].Shape as CircleShape;
if (cs != null)
{
// We create a "diamond" approximation of the circle
Vertices v = new Vertices();
TSVector2 vec = TSVector2.zero + new TSVector2(cs.Radius, 0);
v.Add(vec);
vec = TSVector2.zero + new TSVector2(0, cs.Radius);
v.Add(vec);
vec = TSVector2.zero + new TSVector2(-cs.Radius, cs.Radius);
v.Add(vec);
vec = TSVector2.zero + new TSVector2(0, -cs.Radius);
v.Add(vec);
ps = new PolygonShape(v, 0);
}
else
ps = shapes[i].Shape as PolygonShape;
if ((shapes[i].Body.BodyType == BodyType.Dynamic) && ps != null)
{
TSVector2 toCentroid = shapes[i].Body.GetWorldPoint(ps.MassData.Centroid) - pos;
FP angleToCentroid = FP.Atan2(toCentroid.y, toCentroid.x);
FP min = FP.MaxValue;
FP max = FP.MinValue;
FP minAbsolute = 0.0f;
FP maxAbsolute = 0.0f;
for (int j = 0; j < ps.Vertices.Count; ++j)
{
TSVector2 toVertex = (shapes[i].Body.GetWorldPoint(ps.Vertices[j]) - pos);
FP newAngle = FP.Atan2(toVertex.y, toVertex.x);
FP diff = (newAngle - angleToCentroid);
diff = (diff - FP.Pi) % (2 * FP.Pi);
// the minus pi is important. It means cutoff for going other direction is at 180 deg where it needs to be
if (diff < 0.0f)
diff += 2 * FP.Pi; // correction for not handling negs
diff -= FP.Pi;
if (FP.Abs(diff) > FP.Pi)
continue; // Something's wrong, point not in shape but exists angle diff > 180
if (diff > max)
{
max = diff;
maxAbsolute = newAngle;
}
if (diff < min)
{
min = diff;
minAbsolute = newAngle;
}
}
vals[valIndex] = minAbsolute;
++valIndex;
vals[valIndex] = maxAbsolute;
++valIndex;
}
}
Array.Sort(vals, 0, valIndex, _rdc);
_data.Clear();
bool rayMissed = true;
for (int i = 0; i < valIndex; ++i)
{
Fixture fixture = null;
FP midpt;
int iplus = (i == valIndex - 1 ? 0 : i + 1);
if (vals[i] == vals[iplus])
continue;
if (i == valIndex - 1)
{
// the single edgecase
midpt = (vals[0] + FP.PiTimes2 + vals[i]);
}
else
{
midpt = (vals[i + 1] + vals[i]);
}
midpt = midpt / 2;
TSVector2 p1 = pos;
TSVector2 p2 = radius * new TSVector2(FP.Cos(midpt), FP.Sin(midpt)) + pos;
// RaycastOne
bool hitClosest = false;
World.RayCast((f, p, n, fr) =>
{
Body body = f.Body;
if (!IsActiveOn(body))
return 0;
hitClosest = true;
fixture = f;
return fr;
}, p1, p2);
//draws radius points
if ((hitClosest) && (fixture.Body.BodyType == BodyType.Dynamic))
{
if ((_data.Any()) && (_data.Last().Body == fixture.Body) && (!rayMissed))
{
int laPos = _data.Count - 1;
ShapeData la = _data[laPos];
la.Max = vals[iplus];
_data[laPos] = la;
}
else
{
// make new
ShapeData d;
d.Body = fixture.Body;
d.Min = vals[i];
d.Max = vals[iplus];
_data.Add(d);
}
if ((_data.Count > 1)
&& (i == valIndex - 1)
&& (_data.Last().Body == _data.First().Body)
&& (_data.Last().Max == _data.First().Min))
{
ShapeData fi = _data[0];
fi.Min = _data.Last().Min;
_data.RemoveAt(_data.Count - 1);
_data[0] = fi;
while (_data.First().Min >= _data.First().Max)
{
fi.Min -= FP.PiTimes2;
_data[0] = fi;
}
}
int lastPos = _data.Count - 1;
ShapeData last = _data[lastPos];
while ((_data.Count > 0)
&& (_data.Last().Min >= _data.Last().Max)) // just making sure min<max
{
last.Min = _data.Last().Min - FP.PiTimes2;
_data[lastPos] = last;
}
rayMissed = false;
}
else
{
rayMissed = true; // raycast did not find a shape
}
}
for (int i = 0; i < _data.Count; ++i)
{
if (!IsActiveOn(_data[i].Body))
continue;
FP arclen = _data[i].Max - _data[i].Min;
FP first = TSMath.Min(MaxEdgeOffset, EdgeRatio * arclen);
int insertedRays = FP.Ceiling((((arclen - 2.0f * first) - (MinRays - 1) * MaxAngle) / MaxAngle)).AsInt();
if (insertedRays < 0)
insertedRays = 0;
FP offset = (arclen - first * 2.0f) / ((FP)MinRays + insertedRays - 1);
//Note: This loop can go into infinite as it operates on FPs.
//Added FPEquals with a large epsilon.
for (FP j = _data[i].Min + first;
j < _data[i].Max || MathUtils.FPEquals(j, _data[i].Max, 0.0001f);
j += offset)
{
TSVector2 p1 = pos;
TSVector2 p2 = pos + radius * new TSVector2(FP.Cos(j), FP.Sin(j));
TSVector2 hitpoint = TSVector2.zero;
FP minlambda = FP.MaxValue;
List<Fixture> fl = _data[i].Body.FixtureList;
for (int x = 0; x < fl.Count; x++)
{
Fixture f = fl[x];
RayCastInput ri;
ri.Point1 = p1;
ri.Point2 = p2;
ri.MaxFraction = 50f;
RayCastOutput ro;
if (f.RayCast(out ro, ref ri, 0))
{
if (minlambda > ro.Fraction)
{
minlambda = ro.Fraction;
hitpoint = ro.Fraction * p2 + (1 - ro.Fraction) * p1;
}
}
// the force that is to be applied for this particular ray.
// offset is angular coverage. lambda*length of segment is distance.
FP impulse = (arclen / (MinRays + insertedRays)) * maxForce * 180.0f / FP.Pi * (1.0f - TrueSync.TSMath.Min(FP.One, minlambda));
// We Apply the impulse!!!
TSVector2 vectImp = TSVector2.Dot(impulse * new TSVector2(FP.Cos(j), FP.Sin(j)), -ro.Normal) * new TSVector2(FP.Cos(j), FP.Sin(j));
_data[i].Body.ApplyLinearImpulse(ref vectImp, ref hitpoint);
// We gather the fixtures for returning them
if (exploded.ContainsKey(f))
exploded[f] += vectImp;
else
exploded.Add(f, vectImp);
if (minlambda > 1.0f)
hitpoint = p2;
}
}
}
// We check contained shapes
for (int i = 0; i < containedShapeCount; ++i)
{
Fixture fix = containedShapes[i];
if (!IsActiveOn(fix.Body))
continue;
FP impulse = MinRays * maxForce * 180.0f / FP.Pi;
TSVector2 hitPoint;
CircleShape circShape = fix.Shape as CircleShape;
if (circShape != null)
{
hitPoint = fix.Body.GetWorldPoint(circShape.Position);
}
else
{
PolygonShape shape = fix.Shape as PolygonShape;
hitPoint = fix.Body.GetWorldPoint(shape.MassData.Centroid);
}
TSVector2 vectImp = impulse * (hitPoint - pos);
fix.Body.ApplyLinearImpulse(ref vectImp, ref hitPoint);
if (!exploded.ContainsKey(fix))
exploded.Add(fix, vectImp);
}
return exploded;
}
}
}
|
Xaer033/YellowSign
|
YellowSignUnity/Assets/ThirdParty/Networking/TrueSync/Physics/Farseer/Common/PhysicsLogic/RealExplosion.cs
|
C#
|
mit
| 16,308 |
require "rails_helper"
describe Linter::Shellcheck do
it_behaves_like "a linter" do
let(:lintable_files) { %w(foo.sh foo.zsh foo.bash) }
let(:not_lintable_files) { %w(foo.js) }
end
describe "#file_review" do
it "returns a saved and incomplete file review" do
commit_file = build_commit_file(filename: "lib/a.sh")
linter = build_linter
result = linter.file_review(commit_file)
expect(result).to be_persisted
expect(result).not_to be_completed
end
it "schedules a review job" do
build = build(:build, commit_sha: "foo", pull_request_number: 123)
commit_file = build_commit_file(filename: "lib/a.sh")
allow(LintersJob).to receive(:perform_async)
linter = build_linter(build)
linter.file_review(commit_file)
expect(LintersJob).to have_received(:perform_async).with(
filename: commit_file.filename,
commit_sha: build.commit_sha,
linter_name: "shellcheck",
pull_request_number: build.pull_request_number,
patch: commit_file.patch,
content: commit_file.content,
config: "--- {}\n",
linter_version: nil,
)
end
end
end
|
thoughtbot/hound
|
spec/models/linter/shellcheck_spec.rb
|
Ruby
|
mit
| 1,185 |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var THREE = require('./three-math.js');
var PredictionMode = {
NONE: 'none',
INTERPOLATE: 'interpolate',
PREDICT: 'predict'
}
// How much to interpolate between the current orientation estimate and the
// previous estimate position. This is helpful for devices with low
// deviceorientation firing frequency (eg. on iOS8 and below, it is 20 Hz). The
// larger this value (in [0, 1]), the smoother but more delayed the head
// tracking is.
var INTERPOLATION_SMOOTHING_FACTOR = 0.01;
// Angular threshold, if the angular speed (in deg/s) is less than this, do no
// prediction. Without it, the screen flickers quite a bit.
var PREDICTION_THRESHOLD_DEG_PER_S = 0.01;
//var PREDICTION_THRESHOLD_DEG_PER_S = 0;
// How far into the future to predict.
window.WEBVR_PREDICTION_TIME_MS = 80;
// Whether to predict or what.
window.WEBVR_PREDICTION_MODE = PredictionMode.PREDICT;
function PosePredictor() {
this.lastQ = new THREE.Quaternion();
this.lastTimestamp = null;
this.outQ = new THREE.Quaternion();
this.deltaQ = new THREE.Quaternion();
}
PosePredictor.prototype.getPrediction = function(currentQ, rotationRate, timestamp) {
// If there's no previous quaternion, output the current one and save for
// later.
if (!this.lastTimestamp) {
this.lastQ.copy(currentQ);
this.lastTimestamp = timestamp;
return currentQ;
}
// DEBUG ONLY: Try with a fixed 60 Hz update speed.
//var elapsedMs = 1000/60;
var elapsedMs = timestamp - this.lastTimestamp;
switch (WEBVR_PREDICTION_MODE) {
case PredictionMode.INTERPOLATE:
this.outQ.copy(currentQ);
this.outQ.slerp(this.lastQ, INTERPOLATION_SMOOTHING_FACTOR);
// Save the current quaternion for later.
this.lastQ.copy(currentQ);
break;
case PredictionMode.PREDICT:
var axisAngle;
if (rotationRate) {
axisAngle = this.getAxisAngularSpeedFromRotationRate_(rotationRate);
} else {
axisAngle = this.getAxisAngularSpeedFromGyroDelta_(currentQ, elapsedMs);
}
// If there is no predicted axis/angle, don't do prediction.
if (!axisAngle) {
this.outQ.copy(currentQ);
this.lastQ.copy(currentQ);
break;
}
var angularSpeedDegS = axisAngle.speed;
var axis = axisAngle.axis;
var predictAngleDeg = (WEBVR_PREDICTION_TIME_MS / 1000) * angularSpeedDegS;
// If we're rotating slowly, don't do prediction.
if (angularSpeedDegS < PREDICTION_THRESHOLD_DEG_PER_S) {
this.outQ.copy(currentQ);
this.lastQ.copy(currentQ);
break;
}
// Calculate the prediction delta to apply to the original angle.
this.deltaQ.setFromAxisAngle(axis, THREE.Math.degToRad(predictAngleDeg));
// DEBUG ONLY: As a sanity check, use the same axis and angle as before,
// which should cause no prediction to happen.
//this.deltaQ.setFromAxisAngle(axis, angle);
this.outQ.copy(this.lastQ);
this.outQ.multiply(this.deltaQ);
// Use the predicted quaternion as the new last one.
//this.lastQ.copy(this.outQ);
this.lastQ.copy(currentQ);
break;
case PredictionMode.NONE:
default:
this.outQ.copy(currentQ);
}
this.lastTimestamp = timestamp;
return this.outQ;
};
PosePredictor.prototype.setScreenOrientation = function(screenOrientation) {
this.screenOrientation = screenOrientation;
};
PosePredictor.prototype.getAxis_ = function(quat) {
// x = qx / sqrt(1-qw*qw)
// y = qy / sqrt(1-qw*qw)
// z = qz / sqrt(1-qw*qw)
var d = Math.sqrt(1 - quat.w * quat.w);
return new THREE.Vector3(quat.x / d, quat.y / d, quat.z / d);
};
PosePredictor.prototype.getAngle_ = function(quat) {
// angle = 2 * acos(qw)
// If w is greater than 1 (THREE.js, how can this be?), arccos is not defined.
if (quat.w > 1) {
return 0;
}
var angle = 2 * Math.acos(quat.w);
// Normalize the angle to be in [-π, π].
if (angle > Math.PI) {
angle -= 2 * Math.PI;
}
return angle;
};
PosePredictor.prototype.getAxisAngularSpeedFromRotationRate_ = function(rotationRate) {
if (!rotationRate) {
return null;
}
var screenRotationRate;
if (/iPad|iPhone|iPod/.test(navigator.platform)) {
// iOS: angular speed in deg/s.
var screenRotationRate = this.getScreenAdjustedRotationRateIOS_(rotationRate);
} else {
// Android: angular speed in rad/s, so need to convert.
rotationRate.alpha = THREE.Math.radToDeg(rotationRate.alpha);
rotationRate.beta = THREE.Math.radToDeg(rotationRate.beta);
rotationRate.gamma = THREE.Math.radToDeg(rotationRate.gamma);
var screenRotationRate = this.getScreenAdjustedRotationRate_(rotationRate);
}
var vec = new THREE.Vector3(
screenRotationRate.beta, screenRotationRate.alpha, screenRotationRate.gamma);
/*
var vec;
if (/iPad|iPhone|iPod/.test(navigator.platform)) {
vec = new THREE.Vector3(rotationRate.gamma, rotationRate.alpha, rotationRate.beta);
} else {
vec = new THREE.Vector3(rotationRate.beta, rotationRate.alpha, rotationRate.gamma);
}
// Take into account the screen orientation too!
vec.applyQuaternion(this.screenTransform);
*/
// Angular speed in deg/s.
var angularSpeedDegS = vec.length();
var axis = vec.normalize();
return {
speed: angularSpeedDegS,
axis: axis
}
};
PosePredictor.prototype.getScreenAdjustedRotationRate_ = function(rotationRate) {
var screenRotationRate = {
alpha: -rotationRate.alpha,
beta: rotationRate.beta,
gamma: rotationRate.gamma
};
switch (this.screenOrientation) {
case 90:
screenRotationRate.beta = - rotationRate.gamma;
screenRotationRate.gamma = rotationRate.beta;
break;
case 180:
screenRotationRate.beta = - rotationRate.beta;
screenRotationRate.gamma = - rotationRate.gamma;
break;
case 270:
case -90:
screenRotationRate.beta = rotationRate.gamma;
screenRotationRate.gamma = - rotationRate.beta;
break;
default: // SCREEN_ROTATION_0
screenRotationRate.beta = rotationRate.beta;
screenRotationRate.gamma = rotationRate.gamma;
break;
}
return screenRotationRate;
};
PosePredictor.prototype.getScreenAdjustedRotationRateIOS_ = function(rotationRate) {
var screenRotationRate = {
alpha: rotationRate.alpha,
beta: rotationRate.beta,
gamma: rotationRate.gamma
};
// Values empirically derived.
switch (this.screenOrientation) {
case 90:
screenRotationRate.beta = -rotationRate.beta;
screenRotationRate.gamma = rotationRate.gamma;
break;
case 180:
// You can't even do this on iOS.
break;
case 270:
case -90:
screenRotationRate.alpha = -rotationRate.alpha;
screenRotationRate.beta = rotationRate.beta;
screenRotationRate.gamma = rotationRate.gamma;
break;
default: // SCREEN_ROTATION_0
screenRotationRate.alpha = rotationRate.beta;
screenRotationRate.beta = rotationRate.alpha;
screenRotationRate.gamma = rotationRate.gamma;
break;
}
return screenRotationRate;
};
PosePredictor.prototype.getAxisAngularSpeedFromGyroDelta_ = function(currentQ, elapsedMs) {
// Sometimes we use the same sensor timestamp, in which case prediction
// won't work.
if (elapsedMs == 0) {
return null;
}
// Q_delta = Q_last^-1 * Q_curr
this.deltaQ.copy(this.lastQ);
this.deltaQ.inverse();
this.deltaQ.multiply(currentQ);
// Convert from delta quaternion to axis-angle.
var axis = this.getAxis_(this.deltaQ);
var angleRad = this.getAngle_(this.deltaQ);
// It took `elapsed` ms to travel the angle amount over the axis. Now,
// we make a new quaternion based how far in the future we want to
// calculate.
var angularSpeedRadMs = angleRad / elapsedMs;
var angularSpeedDegS = THREE.Math.radToDeg(angularSpeedRadMs) * 1000;
// If no rotation rate is provided, do no prediction.
return {
speed: angularSpeedDegS,
axis: axis
};
};
module.exports = PosePredictor;
|
lacker/universe
|
vr_webpack/webvr-polyfill/src/pose-predictor.js
|
JavaScript
|
mit
| 8,628 |
<?php
namespace Kuxin\Helper;
/**
* Class Collect
*
* @package Kuxin\Helper
* @author Pakey <pakey@qq.com>
*/
class Collect
{
/**
* 获取内容
*
* @param $data
* @return bool|mixed|string
*/
public static function getContent($data, $header = [], $option = [])
{
if (is_string($data))
$data = ['rule' => $data, 'charset' => 'auto'];
if (strpos($data['rule'], '[timestamp]') || strpos($data['rule'], '[时间]')) {
$data['rule'] = str_replace(['[timestamp]', '[时间]'], [time() - 64566122, date('Y-m-d H:i:s')], $data['rule']);
} elseif (isset($data['usetimestamp']) && $data['usetimestamp'] == 1) {
$data['rule'] .= (strpos($data['rule'], '?') ? '&_ptcms=' : '?_ptcms=') . (time() - 13456867);
}
if (isset($data['method']) && strtolower($data['method']) == 'post') {
$content = Http::post($data['rule'], [], $header, $option);
} else {
$content = Http::get($data['rule'], [], $header, $option);
}
if ($content) {
// 处理编码
if (empty($data['charset']) || !in_array($data['charset'], ['auto', 'utf-8', 'gbk'])) {
$data['charset'] = 'auto';
}
// 检测编码
if ($data['charset'] == 'auto') {
if (preg_match('/[;\s\'"]charset[=\'\s]+?big/i', $content)) {
$data['charset'] = 'big5';
} elseif (preg_match('/[;\s\'"]charset[=\'"\s]+?gb/i', $content) || preg_match('/[;\s\'"]encoding[=\'"\s]+?gb/i', $content)) {
$data['charset'] = 'gbk';
} elseif (mb_detect_encoding($content) != 'UTF-8') {
$data['charset'] = 'gbk';
}
}
// 转换
switch ($data['charset']) {
case 'gbk':
$content = mb_convert_encoding($content, 'UTF-8', 'GBK');
break;
case 'big5':
$content = mb_convert_encoding($content, 'UTF-8', 'big-5');
$content = big5::toutf8($content);
break;
case 'utf-16':
$content = mb_convert_encoding($content, 'UTF-8', 'UTF-16');
default:
}
//错误标识
if (!empty($data['error']) && strpos($content, $data['error']) !== false) {
return '';
}
if (!empty($data['replace'])) {
$content = self::replace($content, $data['replace']);
}
return $content;
}
return '';
}
/**
* 根据正则批量获取
*
* @param mixed $pregArr 正则
* @param string $code 源内容
* @param int $needposition 确定是否需要间距数字
* @return array|bool
*/
public static function getMatchAll($pregArr, $code, $needposition = 0)
{
if (is_numeric($pregArr)) {
return $pregArr;
} elseif (is_string($pregArr)) {
$pregArr = ['rule' => self::parseMatchRule($pregArr)];
} elseif (empty($pregArr['rule'])) {
return [];
}
if (!self::isreg($pregArr['rule']))
return [];
$pregstr = '{' . $pregArr['rule'] . '}';
$pregstr .= empty($pregArr['option']) ? '' : $pregArr['option'];
$matchvar = $match = [];
if (!empty($pregstr)) {
if ($needposition) {
preg_match_all($pregstr, $code, $match, PREG_SET_ORDER + PREG_OFFSET_CAPTURE);
} else {
preg_match_all($pregstr, $code, $match);
}
}
if (is_array($match)) {
if ($needposition) {
foreach ($match as $var) {
if (is_array($var)) {
$matchvar[] = $var[count($var) - 1];
} else {
$matchvar[] = $var;
}
}
} else {
if (isset($match['2'])) {
$count = count($match);
foreach ($match['1'] as $k => $v) {
if ($v == '') {
for ($i = 2; $i < $count; $i++) {
if (!empty($match[$i][$k])) {
$match['1'][$k] = $match[$i][$k];
break;
}
}
}
}
}
if (isset($match['1'])) {
$matchvar = $match['1'];
} else {
return false;
}
}
if (!empty($pregArr['replace'])) {
foreach ($matchvar as $k => $v) {
$matchvar[$k] = self::replace($v, $pregArr['replace']);
}
}
return $matchvar;
}
return [];
}
/**
* 根据正则获取指定数据 单个
*
* @param mixed $pregArr 正则
* @param string $code 源内容
* @return bool|string
*/
public static function getMatch($pregArr, $code)
{
if (is_numeric($pregArr)) {
return $pregArr;
} elseif (empty($pregArr) || (isset($pregArr['rule']) && empty($pregArr['rule']))) {
return '';
} elseif (is_string($pregArr)) {
$pregArr = ['rule' => self::parseMatchRule($pregArr), 'replace' => []];
}
if (!self::isreg($pregArr['rule']))
return $pregArr['rule'];
$pregstr = '{' . $pregArr['rule'] . '}';
$pregstr .= empty($pregArr['option']) ? '' : $pregArr['option'];
preg_match($pregstr, $code, $match);
if (isset($match['1'])) {
if (empty($pregArr['replace'])) {
return $match['1'];
} else {
return self::replace($match[1], $pregArr['replace']);
}
}
return '';
}
/**
* 内容替换 支持正则批量替换
*
* @param string $con 代替换的内容
* @param array $arr 替换规则数组 单个元素如下
* array(
* 'rule'=>'规则1',//♂后面表示要替换的 内容
* 'option'=>'参数',
* 'method'=>1,//1 正则 0普通
* v ),
* @return mixed
*/
public static function replace($con, array $arr)
{
foreach ($arr as $v) {
if (!empty($v['rule'])) {
$tmp = explode('♂', $v['rule']);
$rule = $tmp['0'];
$replace = isset($tmp['1']) ? $tmp['1'] : '';
$v['option'] = isset($v['option']) ? $v['option'] : '';
if ($v['method'] == 1) { //正则
$con = preg_replace("{" . $rule . "}{$v['option']}", $replace, $con);
} else {
if (strpos($v['option'], 'i') === false) {
$con = str_replace($rule, $replace, $con);
} else {
$con = str_ireplace($rule, $replace, $con);
}
}
}
}
return $con;
}
/**
* 处理链接,根据当前页面地址得到完整的链接地址
*
* @param string $url 当前链接
* @param string $path 当前页面地址
* @return string
*/
public static function parseUrl($url, $path)
{
if ($url) {
if (strpos($url, '://') === false) {
if (substr($url, 0, 1) == '/') {
$tmp = parse_url($path);
$url = $tmp['scheme'] . '://' . $tmp['host'] . $url;
} elseif (substr($url, 0, 3) == '../') {
$url = dirname($path) . substr($url, 2);
} elseif (substr($path, -1) == '/') {
$url = $path . $url;
} else {
$url = dirname($path) . '/' . $url;
}
}
return $url;
} else {
return '';
}
}
/**
* 内容切割方式
*
* @param string $strings 要切割的内容
* @param string $argl 左侧标识 如果带有.+?则为正则模式
* @param string $argr 右侧标识 如果带有.+?则为正则模式
* @param bool $lt 是否包含左切割字符串
* @param bool $gt 是否包含右切割字符串
* @return string
*/
public static function cut($strings, $argl, $argr, $lt = false, $gt = false)
{
if (!$strings)
return ("");
if (strpos($argl, ".+?")) {
$argl = strtr($argl, ["/" => "\/"]);
if (preg_match("/" . $argl . "/", $strings, $match))
$argl = $match[0];
}
if (strpos($argr, ".+?")) {
$argr = strtr($argr, ["/" => "\/"]);
if (preg_match("/" . $argr . "/", $strings, $match))
$argr = $match[0];
}
$args = explode($argl, $strings);
$args = explode($argr, $args[1]);
$args = $args[0];
if ($args) {
if ($lt)
$args = $argl . $args;
if ($gt)
$args .= $argr;
} else {
$args = "";
}
return ($args);
}
/**
* 简写规则转化
*
* @param $rules
* @return array|string
*/
public static function parseMatchRule($rules)
{
$replace_pairs = [
'{' => '\{',
'}' => '\}',
'[内容]' => '(.*?)',
'[数字]' => '\d*',
'[空白]' => '\s*',
'[任意]' => '.*?',
'[参数]' => '[^\>\<]*?',
'[属性]' => '[^\>\<\'"]*?',
];
if (is_array($rules)) {
$rules['rule'] = strtr($rules['rule'], $replace_pairs);
return $rules;
}
return strtr($rules, $replace_pairs);
}
/**
* 是否正则
*
* @param $str
* @return bool
*/
public static function isreg($str)
{
return (strpos($str, ')') !== false || strpos($str, '(') !== false);
}
/**
* @param $data
* @return array
*/
public static function parseListData($data)
{
$list = [];
$num = 0;
foreach ($data as $v) {
if ($v) {
if ($num) {
if ($num != count($v))
return [];
} else {
$num = count($v);
}
}
}
foreach ($data as $k => $v) {
if ($v) {
foreach ($v as $kk => $vv) {
$list[$kk][$k] = $vv;
}
} else {
for ($i = 0; $i < $num; $i++) {
$list[$i][$k] = '';
}
}
}
return $list;
}
}
|
pakey/PTFrameWork
|
kuxin/helper/collect.php
|
PHP
|
mit
| 11,227 |
/**
* JS for the player character.
* * * * */
import * as Consts from './consts';
var leftLeg;
var rightLeg;
var leftArm;
var rightArm;
const BODY_HEIGHT = 5;
const LEG_HEIGHT = 5;
const HEAD_HEIGHT = Consts.BLOCK_WIDTH * (3/5);
const SKIN_COLORS = [0xFADCAB, 0x9E7245, 0x4F3F2F];
const BASE_MAT = new THREE.MeshLambertMaterial({color: 0xFF0000});
export var Player = function() {
THREE.Object3D.call(this);
this.position.y += BODY_HEIGHT / 2 + LEG_HEIGHT / 2 + HEAD_HEIGHT / 2 + HEAD_HEIGHT;
this.moveLeft = false;
this.moveRight = false;
this.moveUp = false;
this.moveDown = false;
this.orientation = "backward";
var scope = this;
var legGeo = new THREE.BoxGeometry(Consts.BLOCK_WIDTH / 2, LEG_HEIGHT, Consts.BLOCK_WIDTH / 2);
var armGeo = new THREE.BoxGeometry(Consts.BLOCK_WIDTH / 2, BODY_HEIGHT, Consts.BLOCK_WIDTH / 2);
// Base mat(s)
var redMaterial = new THREE.MeshLambertMaterial({color: 0xFF2E00});
var blueMaterial = new THREE.MeshLambertMaterial({color: 0x23A8FC});
var yellowMaterial = new THREE.MeshLambertMaterial({color: 0xFFD000});
// Skin color mat, only used for head
var skinColor = SKIN_COLORS[Math.floor(Math.random() * SKIN_COLORS.length)]
var skinMat = new THREE.MeshLambertMaterial({color: skinColor});
// Body material
var bodyFrontMat = new THREE.MeshPhongMaterial({color: 0xFFFFFF});
var bodyFrontTexture = new THREE.TextureLoader().load("img/tetratowerbodyfront.png", function(texture) {
bodyFrontMat.map = texture;
bodyFrontMat.needsUpdate = true;
})
var bodyMat = new THREE.MultiMaterial([
redMaterial,
redMaterial,
redMaterial,
redMaterial,
bodyFrontMat,
bodyFrontMat
]);
var armSideMat = new THREE.MeshLambertMaterial({color: 0xFFFFFF})
var armTopMat = new THREE.MeshLambertMaterial({color: 0xFFFFFF});
var armMat = new THREE.MultiMaterial([
armSideMat,
armSideMat,
armTopMat,
armTopMat,
armSideMat,
armSideMat
]);
// Leg material
var legSideMat = new THREE.MeshLambertMaterial({color: 0xFFFFFF})
var legMat = new THREE.MultiMaterial([
legSideMat,
legSideMat,
blueMaterial,
blueMaterial,
legSideMat,
legSideMat
]);
var legTexture = new THREE.TextureLoader().load("/img/tetratowerleg.png", function (texture) {
legSideMat.map = texture;
legSideMat.needsUpdate = true;
});
var textureURL;
switch (skinColor) {
case SKIN_COLORS[0]:
textureURL = "/img/tetratowerarm_white.png";
break;
case SKIN_COLORS[1]:
textureURL = "/img/tetratowerarm_brown.png";
break;
case SKIN_COLORS[2]:
textureURL = "/img/tetratowerarm_black.png";
break;
default:
textureURL = "/img/tetratowerarm.png";
break;
}
var armTexture = new THREE.TextureLoader().load(textureURL, function(texture) {
armSideMat.map = texture;
armSideMat.needsUpdate = true;
});
var armTopTexture = new THREE.TextureLoader().load("img/tetratowerarmtop.png", function(texture) {
armTopMat.map = texture;
armTopMat.needsUpdate = true;
})
// Create a body
var bodyGeo = new THREE.BoxGeometry(Consts.BLOCK_WIDTH, BODY_HEIGHT, Consts.BLOCK_WIDTH / 2);
var body = new THREE.Mesh(bodyGeo, bodyMat);
this.add(body);
// Create some leggy legs
leftLeg = new THREE.Mesh(legGeo, legMat);
this.add(leftLeg)
leftLeg.translateX(-Consts.BLOCK_WIDTH / 4);
leftLeg.translateY(-(LEG_HEIGHT + BODY_HEIGHT) / 2);
rightLeg = new THREE.Mesh(legGeo, legMat);
this.add(rightLeg);
rightLeg.translateX(Consts.BLOCK_WIDTH / 4);
rightLeg.translateY(-(LEG_HEIGHT + BODY_HEIGHT) / 2);
// Create the arms
leftArm = new THREE.Mesh(armGeo, armMat);
this.add(leftArm);
leftArm.translateX(-(Consts.BLOCK_WIDTH / 4 + Consts.BLOCK_WIDTH / 2));
rightArm = new THREE.Mesh(armGeo, armMat);
this.add(rightArm);
rightArm.translateX((Consts.BLOCK_WIDTH / 4 + Consts.BLOCK_WIDTH / 2));
// Now add a head
var headGeo = new THREE.BoxGeometry(Consts.BLOCK_WIDTH * (3/5), Consts.BLOCK_WIDTH * (3/5), Consts.BLOCK_WIDTH * (3/5));
var head = new THREE.Mesh(headGeo, skinMat);
this.add(head);
head.translateY((BODY_HEIGHT + HEAD_HEIGHT) / 2);
// And a fashionable hat
var hatBodyGeo = new THREE.BoxGeometry(HEAD_HEIGHT * 1.05, HEAD_HEIGHT * (4/5), HEAD_HEIGHT * 1.05);
var hatBody = new THREE.Mesh(hatBodyGeo, yellowMaterial);
head.add(hatBody);
hatBody.translateY(HEAD_HEIGHT * (4/5));
var hatBrimGeo = new THREE.BoxGeometry(HEAD_HEIGHT * 1.05, HEAD_HEIGHT / 5, HEAD_HEIGHT * 0.525);
var hatBrim = new THREE.Mesh(hatBrimGeo, yellowMaterial);
head.add(hatBrim);
hatBrim.translateZ((HEAD_HEIGHT * 1.05) / 2 + (HEAD_HEIGHT * 0.525 / 2));
hatBrim.translateY(HEAD_HEIGHT / 2);
// Add some listeners
var onKeyDown = function(event) {
switch(event.keyCode) {
case 38: // up
case 87: // w
scope.moveForward = true;
break;
case 40: // down
case 83: // s
scope.moveBackward = true;
break;
case 37: // left
case 65: // a
scope.moveLeft = true;
break;
case 39: // right
case 68: // d
scope.moveRight = true;
break;
}
}
var onKeyUp = function(event) {
switch(event.keyCode) {
case 38: // up
case 87: // w
scope.moveForward = false;
break;
case 40: // down
case 83: // s
scope.moveBackward = false;
break;
case 37: // left
case 65: // a
scope.moveLeft = false;
break;
case 39: // right
case 68: // d
scope.moveRight = false;
break;
}
}
document.addEventListener('keydown', onKeyDown, false);
document.addEventListener('keyup', onKeyUp, false);
}
Player.prototype = new THREE.Object3D();
Player.prototype.constructor = Player;
THREE.Object3D.prototype.worldToLocal = function ( vector ) {
if ( !this.__inverseMatrixWorld ) this.__inverseMatrixWorld = new THREE.Matrix4();
return vector.applyMatrix4( this.__inverseMatrixWorld.getInverse( this.matrixWorld ));
};
THREE.Object3D.prototype.lookAtWorld = function( vector ) {
vector = vector.clone();
this.parent.worldToLocal( vector );
this.lookAt( vector );
};
|
Bjorkbat/tetratower
|
js/src/player.js
|
JavaScript
|
mit
| 6,258 |
blocklevel = ["blockquote", "div", "form", "p", "table", "video", "h1", "h2", "h3", "h4", "h5", "h6", "hr", "details", "article", "header", "main"]
def normalizeEnter(src):
#Deletes all user defined for readability reason existing line breaks that are issues for the HTML output
for elem in blocklevel:
while src.find("\r<" + elem) > -1:
src = src.replace("\r<" + elem, "<" + elem)
while src.find("</" + elem + ">\r") > -1:
src = src.replace("</" + elem + ">\r", "</" + elem + ">")
while src.find(">\r") > -1:
src = src.replace(">\r", ">") #It is really needed, it created some other bugs?!
while src.find("\r</") > -1:
src = src.replace("\r</", "</") ##It is really needed, it created some other bugs?!
return src
def main(islinput, inputfile, pluginData, globalData):
currentIndex = 0
for item in islinput:
item = normalizeEnter(item) #Deletes not wanted line breaks in order to prevent the problem we have with Markdown.
islinput[currentIndex] = item
currentIndex += 1
return islinput, pluginData, globalData
|
ValorNaram/isl
|
inputchangers/002.py
|
Python
|
mit
| 1,044 |
<?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class DataQuestionnaire extends Model
{
protected $fillable = ['game_question', 'game_opponent_evaluation', 'study_evaluation'];
/**
* Relationship with parent DataParticipant.
*
* @return \Illuminate\Database\Eloquent\Relations\BelongsTo
*/
public function data_participant()
{
return $this->belongsTo('App\Models\DataParticipant');
}
}
|
mihaiconstantin/game-theory-tilburg
|
app/Models/DataQuestionnaire.php
|
PHP
|
mit
| 460 |
<?php
namespace Tests\Map\Gazzetta;
use PHPUnit\Framework\TestCase;
use FFQP\Map\Gazzetta\GazzettaMapSince2013;
class GazzettaMapSince2013Test extends TestCase
{
public function testExtractRows()
{
$map = new GazzettaMapSince2013();
$this->assertInternalType('int', 3);
$rows = $map->extractRows('tests/fixtures/2014_quotazioni_gazzetta_01.xls');
$this->assertSame(665, count($rows));
// First Footballer
$this->assertSame('101', $rows[0]->code);
$this->assertSame('ABBIATI', $rows[0]->player);
$this->assertSame('MILAN', $rows[0]->team);
$this->assertSame('P', $rows[0]->role);
$this->assertSame('P', $rows[0]->secondaryRole);
$this->assertSame('1', $rows[0]->status);
$this->assertSame('12', $rows[0]->quotation);
$this->assertSame('-', $rows[0]->magicPoints);
$this->assertSame('-', $rows[0]->vote);
$this->assertSame('', $rows[0]->goals);
$this->assertSame('', $rows[0]->yellowCards);
$this->assertSame('', $rows[0]->redCards);
$this->assertSame('', $rows[0]->penalties);
$this->assertSame('', $rows[0]->autoGoals);
$this->assertSame('', $rows[0]->assists);
// Footballer with a vote
$this->assertSame('106', $rows[5]->code);
$this->assertSame('BARDI', $rows[5]->player);
$this->assertSame('CHIEVO', $rows[5]->team);
$this->assertSame('P', $rows[5]->role);
$this->assertSame('P', $rows[5]->secondaryRole);
$this->assertSame('1', $rows[5]->status);
$this->assertSame('8', $rows[5]->quotation);
$this->assertSame('4', $rows[5]->magicPoints);
$this->assertSame('5', $rows[5]->vote);
$this->assertSame('-1', $rows[5]->goals);
$this->assertSame('0', $rows[5]->yellowCards);
$this->assertSame('0', $rows[5]->redCards);
$this->assertSame('0', $rows[5]->penalties);
$this->assertSame('0', $rows[5]->autoGoals);
$this->assertSame('0', $rows[5]->assists);
// Footballer without a vote
$this->assertSame('105', $rows[4]->code);
$this->assertSame('AVRAMOV', $rows[4]->player);
$this->assertSame('ATALANTA', $rows[4]->team);
$this->assertSame('P', $rows[4]->role);
$this->assertSame('P', $rows[4]->secondaryRole);
$this->assertSame('1', $rows[4]->status);
$this->assertSame('1', $rows[4]->quotation);
$this->assertSame('-', $rows[4]->magicPoints);
$this->assertSame('-', $rows[4]->vote);
$this->assertSame('', $rows[4]->goals);
$this->assertSame('', $rows[4]->yellowCards);
$this->assertSame('', $rows[4]->redCards);
$this->assertSame('', $rows[4]->penalties);
$this->assertSame('', $rows[4]->autoGoals);
$this->assertSame('', $rows[4]->assists);
}
}
|
astronati/fantasy-football-quotations-parser
|
tests/Map/Gazzetta/GazzettaMapSince2013Test.php
|
PHP
|
mit
| 2,877 |
package com.github.kolandroid.kol.model.elements.basic;
import com.github.kolandroid.kol.model.elements.interfaces.ModelGroup;
import java.util.ArrayList;
import java.util.Iterator;
public class BasicGroup<E> implements ModelGroup<E> {
/**
* Autogenerated by eclipse.
*/
private static final long serialVersionUID = 356357357356695L;
private final ArrayList<E> items;
private final String name;
public BasicGroup(String name) {
this(name, new ArrayList<E>());
}
public BasicGroup(String name, ArrayList<E> items) {
this.name = name;
this.items = items;
}
@Override
public int size() {
return items.size();
}
@Override
public E get(int index) {
return items.get(index);
}
@Override
public void set(int index, E value) {
items.set(index, value);
}
@Override
public void remove(int index) {
items.remove(index);
}
public void add(E item) {
items.add(item);
}
@Override
public String getName() {
return name;
}
@Override
public Iterator<E> iterator() {
return items.iterator();
}
}
|
Kasekopf/kolandroid
|
kol_base/src/main/java/com/github/kolandroid/kol/model/elements/basic/BasicGroup.java
|
Java
|
mit
| 1,193 |
#!/usr/bin/env node
(function () {
var DirectoryLayout = require('../lib/index.js'),
program = require('commander'),
options;
program
.version('1.0.2')
.usage('[options] <path, ...>')
.option('-g, --generate <path> <output-directory-layout-file-path>', 'Generate directory layout')
.option('-v, --verify <input-directory-layout-file-path> <path>', 'Verify directory layout')
.parse(process.argv);
if(program.generate) {
options = {
output: program.args[0] || 'layout.md',
ignore: []
};
console.log('Generating layout for ' + program.generate + '... \n')
DirectoryLayout
.generate(program.generate, options)
.then(function() {
console.log('Layout generated at: ' + options.output);
});
}
else if(program.verify) {
options = {
root: program.args[0]
};
console.log('Verifying layout for ' + options.root + ' ...\n');
DirectoryLayout
.verify(program.verify, options)
.then(function() {
console.log('Successfully verified layout available in ' + program.verify + '.');
});
}
}());
|
ApoorvSaxena/directory-layout
|
bin/index.js
|
JavaScript
|
mit
| 1,267 |
var class_snowflake_1_1_game_1_1_game_database =
[
[ "GameDatabase", "class_snowflake_1_1_game_1_1_game_database.html#a2f09c1f7fe18beaf8be1447e541f4d68", null ],
[ "AddGame", "class_snowflake_1_1_game_1_1_game_database.html#a859513bbac24328df5d3fe2e47dbc183", null ],
[ "GetAllGames", "class_snowflake_1_1_game_1_1_game_database.html#a7c43f2ccabe44f0491ae25e9b88bb07a", null ],
[ "GetGameByUUID", "class_snowflake_1_1_game_1_1_game_database.html#ada7d853b053f0bbfbc6dea8eb89e85c4", null ],
[ "GetGamesByName", "class_snowflake_1_1_game_1_1_game_database.html#ac1bbd90e79957e360dd5542f6052616e", null ],
[ "GetGamesByPlatform", "class_snowflake_1_1_game_1_1_game_database.html#a2e93a35ea18a9caac9a6165cb33b5494", null ]
];
|
SnowflakePowered/snowflakepowered.github.io
|
doc/html/class_snowflake_1_1_game_1_1_game_database.js
|
JavaScript
|
mit
| 745 |
package org.winterblade.minecraft.harmony.api.questing;
import org.winterblade.minecraft.scripting.api.IScriptObjectDeserializer;
import org.winterblade.minecraft.scripting.api.ScriptObjectDeserializer;
/**
* Created by Matt on 5/29/2016.
*/
public enum QuestStatus {
INVALID, ACTIVE, LOCKED, COMPLETE, CLOSED;
@ScriptObjectDeserializer(deserializes = QuestStatus.class)
public static class Deserializer implements IScriptObjectDeserializer {
@Override
public Object Deserialize(Object input) {
if(!String.class.isAssignableFrom(input.getClass())) return null;
return QuestStatus.valueOf(input.toString().toUpperCase());
}
}
}
|
legendblade/CraftingHarmonics
|
api/src/main/java/org/winterblade/minecraft/harmony/api/questing/QuestStatus.java
|
Java
|
mit
| 697 |
package leetcode11_20;
/**Given a linked list, remove the nth node from the end of list and return its head.
For example, Given linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes 1->2->3->5.
Note: Given n will always be valid. Try to do this in one pass.
*/
public class RemoveNthFromEnd {
// Definition for singly-linked list.
public static class ListNode {
int val;
ListNode next;
ListNode(int x) { val = x; }
}
//one pass
public ListNode removeNthFromEnd(ListNode head, int n) {
ListNode dummy = new ListNode(0);
dummy.next = head;
ListNode slow = dummy, fast = dummy;
//Move fast in front so that the gap between slow and fast becomes n
for(int i=1; i<=n+1; i++) { //TODO 注意边界
fast = fast.next;
}
while(fast != null) {//Move fast to the end, maintaining the gap
slow = slow.next;
fast = fast.next;
}
slow.next = slow.next.next;//Skip the desired node
return dummy.next;
}
//two pass
public ListNode removeNthFromEnd1(ListNode head, int n) {
int length = 0;
ListNode temp = head;
while (temp != null){
length++;
temp = temp.next;
}
if (n == length) return head.next;
temp = head;
for (int i = 2; i <= length - n; i++){ //TODO 循环条件极易出错
temp = temp.next;
}
temp.next = temp.next.next;
return head;
}
}
|
Ernestyj/JStudy
|
src/main/java/leetcode11_20/RemoveNthFromEnd.java
|
Java
|
mit
| 1,586 |
package org.kohsuke.github;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import static java.lang.String.*;
/**
* Release in a github repository.
*
* @see GHRepository#getReleases() GHRepository#getReleases()
* @see GHRepository#listReleases() () GHRepository#listReleases()
* @see GHRepository#createRelease(String) GHRepository#createRelease(String)
*/
public class GHRelease extends GHObject {
GHRepository owner;
private String html_url;
private String assets_url;
private List<GHAsset> assets;
private String upload_url;
private String tag_name;
private String target_commitish;
private String name;
private String body;
private boolean draft;
private boolean prerelease;
private Date published_at;
private String tarball_url;
private String zipball_url;
private String discussion_url;
/**
* Gets discussion url. Only present if a discussion relating to the release exists
*
* @return the discussion url
*/
public String getDiscussionUrl() {
return discussion_url;
}
/**
* Gets assets url.
*
* @return the assets url
*/
public String getAssetsUrl() {
return assets_url;
}
/**
* Gets body.
*
* @return the body
*/
public String getBody() {
return body;
}
/**
* Is draft boolean.
*
* @return the boolean
*/
public boolean isDraft() {
return draft;
}
/**
* Sets draft.
*
* @param draft
* the draft
* @return the draft
* @throws IOException
* the io exception
* @deprecated Use {@link #update()}
*/
@Deprecated
public GHRelease setDraft(boolean draft) throws IOException {
return update().draft(draft).update();
}
public URL getHtmlUrl() {
return GitHubClient.parseURL(html_url);
}
/**
* Gets name.
*
* @return the name
*/
public String getName() {
return name;
}
/**
* Sets name.
*
* @param name
* the name
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets owner.
*
* @return the owner
*/
@SuppressFBWarnings(value = { "EI_EXPOSE_REP" }, justification = "Expected behavior")
public GHRepository getOwner() {
return owner;
}
/**
* Sets owner.
*
* @param owner
* the owner
* @deprecated Do not use this method. It was added due to incomplete understanding of Jackson binding.
*/
@Deprecated
public void setOwner(GHRepository owner) {
throw new RuntimeException("Do not use this method.");
}
/**
* Is prerelease boolean.
*
* @return the boolean
*/
public boolean isPrerelease() {
return prerelease;
}
/**
* Gets published at.
*
* @return the published at
*/
public Date getPublished_at() {
return new Date(published_at.getTime());
}
/**
* Gets tag name.
*
* @return the tag name
*/
public String getTagName() {
return tag_name;
}
/**
* Gets target commitish.
*
* @return the target commitish
*/
public String getTargetCommitish() {
return target_commitish;
}
/**
* Gets upload url.
*
* @return the upload url
*/
public String getUploadUrl() {
return upload_url;
}
/**
* Gets zipball url.
*
* @return the zipball url
*/
public String getZipballUrl() {
return zipball_url;
}
/**
* Gets tarball url.
*
* @return the tarball url
*/
public String getTarballUrl() {
return tarball_url;
}
GHRelease wrap(GHRepository owner) {
this.owner = owner;
return this;
}
static GHRelease[] wrap(GHRelease[] releases, GHRepository owner) {
for (GHRelease release : releases) {
release.wrap(owner);
}
return releases;
}
/**
* Because github relies on SNI (http://en.wikipedia.org/wiki/Server_Name_Indication) this method will only work on
* Java 7 or greater. Options for fixing this for earlier JVMs can be found here
* http://stackoverflow.com/questions/12361090/server-name-indication-sni-on-java but involve more complicated
* handling of the HTTP requests to github's API.
*
* @param file
* the file
* @param contentType
* the content type
* @return the gh asset
* @throws IOException
* the io exception
*/
public GHAsset uploadAsset(File file, String contentType) throws IOException {
FileInputStream s = new FileInputStream(file);
try {
return uploadAsset(file.getName(), s, contentType);
} finally {
s.close();
}
}
/**
* Upload asset gh asset.
*
* @param filename
* the filename
* @param stream
* the stream
* @param contentType
* the content type
* @return the gh asset
* @throws IOException
* the io exception
*/
public GHAsset uploadAsset(String filename, InputStream stream, String contentType) throws IOException {
Requester builder = owner.root().createRequest().method("POST");
String url = getUploadUrl();
// strip the helpful garbage from the url
url = url.substring(0, url.indexOf('{'));
url += "?name=" + URLEncoder.encode(filename, "UTF-8");
return builder.contentType(contentType).with(stream).withUrlPath(url).fetch(GHAsset.class).wrap(this);
}
/**
* Get the cached assets.
*
* @return the assets
*
* @deprecated This should be the default behavior of {@link #getAssets()} in a future release. This method is
* introduced in addition to enable a transition to using cached asset information while keeping the
* existing logic in place for backwards compatibility.
*/
@Deprecated
public List<GHAsset> assets() {
return Collections.unmodifiableList(assets);
}
/**
* Re-fetch the assets of this release.
*
* @return the assets
* @throws IOException
* the io exception
* @deprecated The behavior of this method will change in a future release. It will then provide cached assets as
* provided by {@link #assets()}. Use {@link #listAssets()} instead to fetch up-to-date information of
* assets.
*/
@Deprecated
public List<GHAsset> getAssets() throws IOException {
return listAssets().toList();
}
/**
* Re-fetch the assets of this release.
*
* @return the assets
* @throws IOException
* the io exception
*/
public PagedIterable<GHAsset> listAssets() throws IOException {
Requester builder = owner.root().createRequest();
return builder.withUrlPath(getApiTailUrl("assets")).toIterable(GHAsset[].class, item -> item.wrap(this));
}
/**
* Deletes this release.
*
* @throws IOException
* the io exception
*/
public void delete() throws IOException {
root().createRequest().method("DELETE").withUrlPath(owner.getApiTailUrl("releases/" + getId())).send();
}
/**
* Updates this release via a builder.
*
* @return the gh release updater
*/
public GHReleaseUpdater update() {
return new GHReleaseUpdater(this);
}
private String getApiTailUrl(String end) {
return owner.getApiTailUrl(format("releases/%s/%s", getId(), end));
}
}
|
kohsuke/github-api
|
src/main/java/org/kohsuke/github/GHRelease.java
|
Java
|
mit
| 8,107 |
import cairo
from gi.repository import Gtk
from gi.repository import Gdk
from pylsner import plugin
class Window(Gtk.Window):
def __init__(self):
super(Window, self).__init__(skip_pager_hint=True,
skip_taskbar_hint=True,
)
self.set_title('Pylsner')
screen = self.get_screen()
self.width = screen.get_width()
self.height = screen.get_height()
self.set_size_request(self.width, self.height)
self.set_position(Gtk.WindowPosition.CENTER)
rgba = screen.get_rgba_visual()
self.set_visual(rgba)
self.override_background_color(Gtk.StateFlags.NORMAL,
Gdk.RGBA(0, 0, 0, 0),
)
self.set_wmclass('pylsner', 'pylsner')
self.set_type_hint(Gdk.WindowTypeHint.DOCK)
self.stick()
self.set_keep_below(True)
drawing_area = Gtk.DrawingArea()
drawing_area.connect('draw', self.redraw)
self.refresh_cnt = 0
self.add(drawing_area)
self.connect('destroy', lambda q: Gtk.main_quit())
self.widgets = []
self.show_all()
def refresh(self, force=False):
self.refresh_cnt += 1
if self.refresh_cnt >= 60000:
self.refresh_cnt = 0
redraw_required = False
for wid in self.widgets:
if (self.refresh_cnt % wid.metric.refresh_rate == 0) or force:
wid.refresh()
redraw_required = True
if redraw_required:
self.queue_draw()
return True
def redraw(self, _, ctx):
ctx.set_antialias(cairo.ANTIALIAS_SUBPIXEL)
for wid in self.widgets:
wid.redraw(ctx)
class Widget:
def __init__(self,
name='default',
metric={'plugin': 'time'},
indicator={'plugin': 'arc'},
fill={'plugin': 'rgba_255'},
):
self.name = name
MetricPlugin = plugin.load_plugin('metrics', metric['plugin'])
self.metric = MetricPlugin(**metric)
IndicatorPlugin = plugin.load_plugin('indicators', indicator['plugin'])
self.indicator = IndicatorPlugin(**indicator)
FillPlugin = plugin.load_plugin('fills', fill['plugin'])
self.fill = FillPlugin(**fill)
def refresh(self):
self.metric.refresh()
self.fill.refresh(self.metric.value)
def redraw(self, ctx):
ctx.set_source(self.fill.pattern)
self.indicator.redraw(ctx, self.metric.value)
|
mrmrwat/pylsner
|
pylsner/gui.py
|
Python
|
mit
| 2,624 |
<?php $t = $a->getThing(); ?>
<div class="linkitem item-flavour-<?php echo $a->getFlavour() ?>" id="link-item-<?php echo $a->getId(); ?>">
<?php if (isset($nopos)): ?>
<span class="itempos"> </span>
<?php else: ?>
<span class="itempos"><?php echo $pos; ?></span>
<?php endif; ?>
<div class="votebtn">
<?php if ($sf_user->isAuthenticated()):?>
<?php $vote = $a->getThing()->getUserVote($sf_user->getId()); ?>
<?php if ($vote): ?>
<?php if ($vote['type'] == 'up'): ?>
<a href="#" onclick="voteUp(this, <?php echo $t->getId(); ?>); return false;">
<?php echo image_tag('mod_up.png', array('id' => 'link-up-' . $t->getId())) ?>
</a>
<?php else: ?>
<a href="#" onclick="voteUp(this, <?php echo $t->getId(); ?>); return false;">
<?php echo image_tag('up.png', array('id' => 'link-up-' . $t->getId())) ?>
</a>
<?php endif; ?>
<?php if ($vote['type'] == 'down'): ?>
<a href="#" onclick="voteDown(this, <?php echo $t->getId(); ?>); return false;">
<?php echo image_tag('mod_down.png', array('id' => 'link-down-' . $t->getId())) ?>
</a>
<?php else: ?>
<a href="#" onclick="voteDown(this, <?php echo $t->getId(); ?>); return false;">
<?php echo image_tag('down.png', array('id' => 'link-down-' . $t->getId())) ?>
</a>
<?php endif; ?>
<?php else: ?>
<a href="#" onclick="voteUp(this, <?php echo $t->getId(); ?>); return false;"><?php echo image_tag('up.png', array('id' => 'link-up-' . $t->getId())) ?></a>
<a href="#" onclick="voteDown(this, <?php echo $t->getId(); ?>); return false;"><?php echo image_tag('down.png', array('id' => 'link-down-' . $t->getId())) ?></a>
<?php endif; ?>
<?php else: ?>
<a href="#" onclick="return false;"><?php echo image_tag('up.png', array('id' => 'link-up-' . $t->getId())) ?></a>
<a href="#" onclick="return false;"><?php echo image_tag('down.png', array('id' => 'link-down-' . $t->getId())) ?></a>
<?php endif; ?>
</div>
<div class="item">
<?php if ($a->getFlavour() == 'link'): ?>
<p><?php echo link_to($a->getTitle(), $a->getUrl(), array('class' => 'name', 'target' => '_blank', 'rel' => 'nofollow')); ?></p>
<p class="url"><?php echo $a->getUrl(); ?></p>
<?php else: ?>
<p><?php echo link_to($a->getTitle(), $a->getViewUrl(), array('class' => 'name')); ?></p>
<?php endif;?>
<p class="link_footer">
<span class="flavour-<?php echo $a->getFlavour(); ?>"><?php echo $a->getFlavour(); ?></span>
<?php $score = $t->getScore(); ?>
<?php if ($score > 0): ?>
<?php if ($score === 1): ?>
<span id="link-score-<?php echo $t->getId(); ?>"><?php echo $t->getScore(); ?></span> point
<?php else: ?>
<span id="link-score-<?php echo $t->getId(); ?>"><?php echo $t->getScore(); ?></span> points
<?php endif; ?>
<?php endif; ?>
posted <?php echo time_ago_in_words(strtotime($a->getCreatedAt())) ?> ago
by <span class="link_author"><?php echo link_to($a->getUsername(), "@show_profile?username=" . $a->getUsername()); ?></span>
<?php $comment_count = $a->getTotalComments(); ?>
<?php if ($comment_count == 1): ?>
<a class="ctrl" href="<?php echo url_for($a->getViewUrl()); ?>">1 comment</a>
<?php else: ?>
<a class="ctrl" href="<?php echo url_for($a->getViewUrl()); ?>"><?php echo $comment_count; ?> comments</a>
<?php endif; ?>
<?php if ($sf_user->isAuthenticated() && $sf_user->isAdmin()): ?>
<?php echo link_to('delete', 'article/delete?articleid=' . $a->getId(), array('class' => 'ctrl admin', 'confirm' => 'Are you sure you want to delet this?')); ?>
<?php endif; ?>
</p>
<?php if ($a->getFlavour() == 'snapshot'): ?>
<?php $snapshot = $a->getSnapshot(true); ?>
<a title="<?php echo $a->getTitle(); ?>" rel="lightbox" target="_blank" href="<?php echo $snapshot->getUrl(); ?>"><img class="snapshot" src="<?php echo $snapshot->getThumbnailUrl(200); ?>" /></a>
<p class="link_summary"><?php echo truncate_text($a->getSummary(), 200); ?></p>
<div class="clear"></div>
<?php endif; ?>
<?php if ($a->getFlavour() == 'link'): ?>
<p class="link_summary"><?php echo truncate_text($a->getSummary(), 200); ?></p>
<?php if ($a->getHasThumbnails()): ?>
<?php $ftas = $a->getFiles(true); ?>
<?php if (count($ftas) > 0): ?>
<div class="preview">
<?php foreach ($ftas as $fta): ?>
<img class="preview-image" src="<?php echo $fta->getFile()->getThumbnailUrl(); ?>" />
<?php endforeach; ?>
</div>
<?php endif; ?>
<?php endif; ?>
<?php endif; ?>
<?php if ($a->getFlavour() == 'code'): ?>
<pre style="display: none;" class="brush: <?php echo $a->getBrushAlias(); ?>"><?php echo htmlspecialchars($a->getCode()); ?></pre>
<?php endif; ?>
<?php if ($a->getFlavour() == 'question'): ?>
<div class="question-body"><?php echo $a->getQuestionHtml(); ?></div>
<?php endif; ?>
</div>
<div style="clear:both;"></div>
</div>
|
sanjeevan/codelovely
|
apps/frontend/modules/article/templates/_article.php
|
PHP
|
mit
| 5,279 |
import { Physics as EightBittrPhysics } from "eightbittr";
import { FullScreenPokemon } from "../FullScreenPokemon";
import { Direction } from "./Constants";
import { Character, Grass, Actor } from "./Actors";
/**
* Physics functions to move Actors around.
*/
export class Physics<Game extends FullScreenPokemon> extends EightBittrPhysics<Game> {
/**
* Determines the bordering direction from one Actor to another.
*
* @param actor The source Actor.
* @param other The destination Actor.
* @returns The direction from actor to other.
*/
public getDirectionBordering(actor: Actor, other: Actor): Direction | undefined {
if (Math.abs(actor.top - (other.bottom - other.tolBottom)) < 4) {
return Direction.Top;
}
if (Math.abs(actor.right - other.left) < 4) {
return Direction.Right;
}
if (Math.abs(actor.bottom - other.top) < 4) {
return Direction.Bottom;
}
if (Math.abs(actor.left - other.right) < 4) {
return Direction.Left;
}
return undefined;
}
/**
* Determines the direction from one Actor to another.
*
* @param actor The source Actor.
* @param other The destination Actor.
* @returns The direction from actor to other.
* @remarks Like getDirectionBordering, but for cases where the two Actors
* aren't necessarily touching.
*/
public getDirectionBetween(actor: Actor, other: Actor): Direction {
const dx: number = this.getMidX(other) - this.getMidX(actor);
const dy: number = this.getMidY(other) - this.getMidY(actor);
if (Math.abs(dx) > Math.abs(dy)) {
return dx > 0 ? Direction.Right : Direction.Left;
}
return dy > 0 ? Direction.Bottom : Direction.Top;
}
/**
* Checks whether one Actor is overlapping another.
*
* @param actor An in-game Actor.
* @param other An in-game Actor.
* @returns Whether actor and other are overlapping.
*/
public isActorWithinOther(actor: Actor, other: Actor): boolean {
return (
actor.top >= other.top &&
actor.right <= other.right &&
actor.bottom <= other.bottom &&
actor.left >= other.left
);
}
/**
* Determines whether a Character is visually within grass.
*
* @param actor An in-game Character.
* @param other Grass that actor might be in.
* @returns Whether actor is visually within other.
*/
public isActorWActorrass(actor: Character, other: Grass): boolean {
if (actor.right <= other.left) {
return false;
}
if (actor.left >= other.right) {
return false;
}
if (other.top > actor.top + actor.height / 2) {
return false;
}
if (other.bottom < actor.top + actor.height / 2) {
return false;
}
return true;
}
/**
* Shifts a Character according to its xvel and yvel.
*
* @param actor A Character to shift.
*/
public shiftCharacter(actor: Character): void {
if (actor.bordering[Direction.Top] && actor.yvel < 0) {
actor.yvel = 0;
}
if (actor.bordering[Direction.Right] && actor.xvel > 0) {
actor.xvel = 0;
}
if (actor.bordering[Direction.Bottom] && actor.yvel > 0) {
actor.yvel = 0;
}
if (actor.bordering[Direction.Left] && actor.xvel < 0) {
actor.xvel = 0;
}
this.shiftBoth(actor, actor.xvel, actor.yvel);
}
/**
* Snaps a moving Actor to a predictable grid position.
*
* @param actor An Actor to snap the position of.
*/
public snapToGrid(actor: Actor): void {
const grid = 32;
const x: number = (this.game.mapScreener.left + actor.left) / grid;
const y: number = (this.game.mapScreener.top + actor.top) / grid;
this.setLeft(actor, Math.round(x) * grid - this.game.mapScreener.left);
this.setTop(actor, Math.round(y) * grid - this.game.mapScreener.top);
}
}
|
FullScreenShenanigans/FullScreenPokemon
|
src/sections/Physics.ts
|
TypeScript
|
mit
| 4,204 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.