hexsha
stringlengths 40
40
| size
int64 3
1.05M
| ext
stringclasses 163
values | lang
stringclasses 53
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
112
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
float64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
113
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
float64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
113
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
float64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.05M
| avg_line_length
float64 1
966k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ed1c78cbfad985fcfcea36fcc15a4d8e66568bfd | 6,931 | pl | Perl | DBI_Excel_template.pl | jkstill/perl | c19d27563461a8daef7558d342fa987f5570f97e | [
"MIT"
]
| 1 | 2019-08-30T10:04:59.000Z | 2019-08-30T10:04:59.000Z | DBI_Excel_template.pl | jkstill/perl | c19d27563461a8daef7558d342fa987f5570f97e | [
"MIT"
]
| null | null | null | DBI_Excel_template.pl | jkstill/perl | c19d27563461a8daef7558d342fa987f5570f97e | [
"MIT"
]
| null | null | null | #!/u01/app/perl/bin/perl
# DBI_Excel_template.pl
# a template script for using DBI and Spreadsheet::WriteExcel
# Jared Still
# jkstill@gmail.com
=head1 DBI/Excel::Writer::XLSX tempate
This Perl program is a template for creating other scripts
that connect to Oracle and create an Excel Spreadsheet.
This script will list all roles and permissions granted to them.
The output will be an Excel Spreadsheet, with one page for each role.
Type DBI_Excel_template..pl -help for a help screen.
=cut
use warnings;
use strict;
use DBI;
use Getopt::Long;
use Excel::Writer::XLSX;
my $debug = 0;
my %optctl = ();
Getopt::Long::GetOptions(
\%optctl,
"machine=s",
"database=s",
"username=s",
"password=s",
"spreadsheet_file=s",
"sysdba!",
"sysoper!",
"z","h","help");
my( $database, $password, $username, $connectionMode);
if (
$optctl{h}
|| $optctl{z}
|| $optctl{help}
) {
Usage(0);
}
my $xlFile = defined($optctl{spreadsheet_file}) ? $optctl{spreadsheet_file} : 'roles.xlsx';
$connectionMode = 0;
if ( $optctl{sysoper} ) { $connectionMode = 4 }
if ( $optctl{sysdba} ) { $connectionMode = 2 }
if ( ! defined($optctl{username}) ) {
warn "username required\n";
Usage(2);
} else {$username=$optctl{username}}
if ( ! defined($optctl{database}) ) {
warn "database required\n";
Usage(3);
} else {$database=$optctl{database}}
my $roleWorkBook;
my %roleWorkSheets = ();
my %fonts = (
fixed => 'Courier New',
fixed_bold => 'Courier New',
text => 'Arial',
text_bold => 'Arial',
);
my %fontSizes = (
fixed => 10,
fixed_bold => 10,
text => 10,
text_bold => 10,
);
my $maxColWidth = 50;
my $counter = 0;
my $interval = 100;
# lookup the password if not on the command line
if ( defined( $optctl{password} ) ) {
$password = $optctl{password};
} else {
warn "Password required!\n";
Usage(4);
}
my $dbh = DBI->connect(
'dbi:Oracle:' . $database,
$username, $password,
{
RaiseError => 1,
AutoCommit => 0,
ora_session_mode => $connectionMode
}
);
die "Connect to $database failed \n" unless $dbh;
# apparently not a database handle attribute
# but IS a prepare handle attribute
#$dbh->{ora_check_sql} = 0;
$dbh->{RowCacheSize} = 100;
$dbh->do(q{alter session set nls_date_format='mm/dd/yyyy'},{ora_check_sql => 0});
my $rolenameSth = $dbh->prepare('select role from dba_roles order by role');
$rolenameSth->execute;
my @roles=();
# get all roles
while( my $rolename = $rolenameSth->fetchrow_hashref ) {
push @roles, $rolename->{ROLE};
}
my %roleLineCount=();
# create workbook
$roleWorkBook = Excel::Writer::XLSX->new(qq{${database}_${xlFile}});
die "Problems creating new Excel file ${database}_${xlFile}: $!\n" unless defined $roleWorkBook;
# create formats
my $stdFormat = $roleWorkBook->add_format(bold => 0,font => $fonts{fixed}, size => $fontSizes{fixed}, color => 'black');
my $boldFormat = $roleWorkBook->add_format(bold => 1,font => $fonts{fixed_bold}, size => $fontSizes{fixed_bold}, color => 'black');
my $wrapFormat = $roleWorkBook->add_format(bold => 0,font => $fonts{text}, size => $fontSizes{text}, color => 'black');
$wrapFormat->set_align('vjustify');
# create the cover page
$roleWorkSheets{CoverPage} = $roleWorkBook->add_worksheet('CoverPage');
# scalar is fine - the data section should all be one line
my $coverIntro = <DATA>;
{
my $upperDB = uc($database);
$coverIntro =~ s/<<DATABASE>>/$upperDB/g;
}
$roleLineCount{CoverPage} = 0;
$roleWorkSheets{CoverPage}->set_column($roleLineCount{CoverPage},0,60);
$roleWorkSheets{CoverPage}->write($roleLineCount{CoverPage}++,0,$coverIntro, $wrapFormat);
# create all role worksheets
# just list all roles on the cover page in alpha order, with links
foreach my $role ( sort @roles ) {
$roleWorkSheets{$role} = $roleWorkBook->add_worksheet($role);
# set column widths per each sheet
# these are commented out
# see dynamic settings in data loop below
#$roleWorkSheets{$role}->set_column(0,0,10); # privtype
#$roleWorkSheets{$role}->set_column(1,1,40); # privname
#$roleWorkSheets{$role}->set_column(2,2,20); # owner
#$roleWorkSheets{$role}->set_column(3,3,30); # table_name
#$roleWorkSheets{$role}->set_column(4,4,10); # grantable ( admin option )
# create link to cover page
$roleWorkSheets{$role}->write(0,0,"internal:CoverPage!A1", 'CoverPage');
$roleWorkSheets{CoverPage}->write($roleLineCount{CoverPage}++,0,"internal:${role}!A1", $role);
$roleLineCount{$role} = 1;
}
my $roleSql = q{
select *
from (
select 'ROLE' privtype, granted_role privname, null owner, null table_name, admin_option grantable
from dba_role_privs
where grantee = upper(?)
union
select 'SYSPRIV' privtype, privilege privname, null owner, null table_name, admin_option grantable
from dba_sys_privs
where grantee = upper(?)
union
select 'TABPRIV' privtype, privilege privname, owner, table_name, grantable
from dba_tab_privs
where grantee = upper(?)
order by 1, 2, 3, 4
)
};
foreach my $role ( sort @roles ) {
my $sth = $dbh->prepare($roleSql);
$sth->execute($role,$role,$role);
# setup column widths
my @precision = @{$sth->{PRECISION}};
# get scale - deal with undefs
my @scale = map { defined($_) ? $_ : 0 } @{$sth->{SCALE}};
foreach my $el ( 0..$#precision ) {
my $colWidth = $precision[$el]+$scale[$el] > $maxColWidth
? $maxColWidth
: $precision[$el]+$scale[$el];
$roleWorkSheets{$role}->set_column($el,$el,$colWidth);
}
# get column names from DBI
my @colNames = @{$sth->{NAME_uc}};
$roleWorkSheets{$role}->write_row($roleLineCount{$role}++,0,\@colNames,$boldFormat);
# freeze pane at header
$roleWorkSheets{$role}->freeze_panes($roleLineCount{$role},0);
while( my $rolePrivs = $sth->fetchrow_arrayref ) {
$roleWorkSheets{$role}->write_row($roleLineCount{$role},0,$rolePrivs,
$stdFormat
);
if ($rolePrivs->[0] eq 'ROLE' ) {
$roleWorkSheets{$role}->write($roleLineCount{$role},1,
"internal:" . $rolePrivs->[1] . "!A1", $rolePrivs->[1]
);
}
$roleLineCount{$role}++;
}
}
$dbh->disconnect;
sub Usage {
my $exitval = shift;
use File::Basename;
my $basename = basename($0);
print qq{
usage: $basename Oracle System Account Lister
-database target database
-username DBA account
-password account password
use one of the following options
to connect as SYSOPER or SYSDBA
[-sysdba || -sysoper]
-spreadsheet_file Name of spreadsheet file to create. Defaults to roles.xlsx
};
exit $exitval;
}
__DATA__
This Spreadsheet contains a separate worksheet for every role in the database <<DATABASE>>. You may go directly to the page for each role by clicking on it on this page. If there are other database roles assigned to the chosen role, you may go to those roles by clicking on the links. Try this on the DBA role, as it usually has several links. You may return to this page by clicking on 'CoverPage' in the cell at the top left of each page.
| 26.657692 | 442 | 0.683307 |
73fc79ce48c29ba4236ba4770d2ccac2375a684d | 2,561 | pm | Perl | auto-lib/Paws/Support/SeverityLevel.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/Support/SeverityLevel.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/Support/SeverityLevel.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z | # Generated by default/object.tt
package Paws::Support::SeverityLevel;
use Moose;
has Code => (is => 'ro', isa => 'Str', request_name => 'code', traits => ['NameInRequest']);
has Name => (is => 'ro', isa => 'Str', request_name => 'name', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::Support::SeverityLevel
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::Support::SeverityLevel object:
$service_obj->Method(Att1 => { Code => $value, ..., Name => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::Support::SeverityLevel object:
$result = $service_obj->Method(...);
$result->Att1->Code
=head1 DESCRIPTION
A code and name pair that represents the severity level of a support
case. The available values depend on the support plan for the account.
For more information, see Choosing a severity
(https://docs.aws.amazon.com/awssupport/latest/user/case-management.html#choosing-severity)
in the I<AWS Support User Guide>.
=head1 ATTRIBUTES
=head2 Code => Str
The code for case severity level.
Valid values: C<low> | C<normal> | C<high> | C<urgent> | C<critical>
=head2 Name => Str
The name of the severity level that corresponds to the severity level
code.
The values returned by the API are different from the values that
appear in the AWS Support Center. For example, the API uses the code
C<low>, but the name appears as General guidance in Support Center.
The following are the API code names and how they appear in the
console:
=over
=item *
C<low> - General guidance
=item *
C<normal> - System impaired
=item *
C<high> - Production system impaired
=item *
C<urgent> - Production system down
=item *
C<critical> - Business-critical system down
=back
For more information, see Choosing a severity
(https://docs.aws.amazon.com/awssupport/latest/user/case-management.html#choosing-severity)
in the I<AWS Support User Guide>.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::Support>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 23.934579 | 102 | 0.732136 |
ed19eb88094d30710492db988bfee0407cd06cf5 | 1,125 | pm | Perl | PubMed/perlscripts/CSB.pm | TheVanishingMan/DrugInteractionDiscovery | 1ad96ffffd1e8e57f5ca196ff6ae09366195fb7c | [
"Xnet",
"X11"
]
| 1 | 2020-04-24T19:11:01.000Z | 2020-04-24T19:11:01.000Z | PubMed/perlscripts/CSB.pm | TheVanishingMan/DrugInteractionDiscovery | 1ad96ffffd1e8e57f5ca196ff6ae09366195fb7c | [
"Xnet",
"X11"
]
| null | null | null | PubMed/perlscripts/CSB.pm | TheVanishingMan/DrugInteractionDiscovery | 1ad96ffffd1e8e57f5ca196ff6ae09366195fb7c | [
"Xnet",
"X11"
]
| null | null | null | package CSB;
require Exporter;
use EasyGet;
use BibCache;
@ISA = qw(Exporter);
@EXPORT = ('csb_query',
'csb_fetch',
);
$VERSION = 1.0;
### URL:s ################################################################################
# Shared URL prefix:
my $csb_base_url = 'http://liinwww.ira.uka.de';
my $csb_simple_url = "$csb_base_url/searchbib/index?";
my $csb_adv_url = "$csb_base_url/waisbib?";
### Functions ############################################################################
# Func: csb_query
#
# In: A hash containing the building blocks of the search
# Out: A list of ids. Results are also cached!
#
my %def_str = ('ty' => 'Any', # article,inproceedings,techreport,book,phdthesis,manual
'au_i' => 'exact', # phonetic
'stemming' => 'on', # off
'maxhits' => '40', # 10,40,100,170 Why only these?
'directget' => '1', # 0. A '1' means return bibtex items
'convert' => 'bibtex', # Important: don't change.
'sortmode' => 'date', # score
'compress' => undef, # on. I assume we cannot handle compression in general...
);
sub csb_query {
}
| 28.125 | 90 | 0.528 |
ed1c8b35f6841430d6064d9989efcbe2a3e54462 | 505 | pm | Perl | lib/VMOMI/VirtualCdromRemoteAtapiBackingInfo.pm | restump/p5-vmomi | e2571d72a1f552ddd0258ad289ec229d8d12a147 | [
"Apache-2.0"
]
| 1 | 2020-07-22T21:56:34.000Z | 2020-07-22T21:56:34.000Z | lib/VMOMI/VirtualCdromRemoteAtapiBackingInfo.pm | restump/p5-vmomi | e2571d72a1f552ddd0258ad289ec229d8d12a147 | [
"Apache-2.0"
]
| null | null | null | lib/VMOMI/VirtualCdromRemoteAtapiBackingInfo.pm | restump/p5-vmomi | e2571d72a1f552ddd0258ad289ec229d8d12a147 | [
"Apache-2.0"
]
| 1 | 2016-07-19T19:56:09.000Z | 2016-07-19T19:56:09.000Z | package VMOMI::VirtualCdromRemoteAtapiBackingInfo;
use parent 'VMOMI::VirtualDeviceRemoteDeviceBackingInfo';
use strict;
use warnings;
our @class_ancestors = (
'VirtualDeviceRemoteDeviceBackingInfo',
'VirtualDeviceBackingInfo',
'DynamicData',
);
our @class_members = ( );
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| 19.423077 | 59 | 0.734653 |
ed0b4232f2a52b6297febc1126a560dfdeedff2d | 4,266 | pm | Perl | os/linux/local/mode/liststorages.pm | nribault/centreon-plugins | e99276ba80ba202392791e78d72b00f1306d1a99 | [
"Apache-2.0"
]
| null | null | null | os/linux/local/mode/liststorages.pm | nribault/centreon-plugins | e99276ba80ba202392791e78d72b00f1306d1a99 | [
"Apache-2.0"
]
| null | null | null | os/linux/local/mode/liststorages.pm | nribault/centreon-plugins | e99276ba80ba202392791e78d72b00f1306d1a99 | [
"Apache-2.0"
]
| null | null | null | #
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package os::linux::local::mode::liststorages;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'filter-type:s' => { name => 'filter_type' },
'filter-fs:s' => { name => 'filter_fs' },
'filter-mount:s' => { name => 'filter_mount' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
}
sub manage_selection {
my ($self, %options) = @_;
my ($stdout) = $options{custom}->execute_command(
command => 'df',
command_options => '-P -k -T 2>&1',
no_quit => 1
);
my $results = {};
my @lines = split /\n/, $stdout;
foreach my $line (@lines) {
next if ($line !~ /^(\S+)\s+(\S+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\S+)\s+(.*)/);
my ($fs, $type, $size, $used, $available, $percent, $mount) = ($1, $2, $3, $4, $5, $6, $7);
if (defined($self->{option_results}->{filter_fs}) && $self->{option_results}->{filter_fs} ne '' &&
$fs !~ /$self->{option_results}->{filter_fs}/) {
$self->{output}->output_add(long_msg => "skipping storage '" . $mount . "': no matching filter filesystem", debug => 1);
next;
}
if (defined($self->{option_results}->{filter_type}) && $self->{option_results}->{filter_type} ne '' &&
$type !~ /$self->{option_results}->{filter_type}/) {
$self->{output}->output_add(long_msg => "skipping storage '" . $mount . "': no matching filter filesystem type", debug => 1);
next;
}
if (defined($self->{option_results}->{filter_mount}) && $self->{option_results}->{filter_mount} ne '' &&
$mount !~ /$self->{option_results}->{filter_mount}/) {
$self->{output}->output_add(long_msg => "skipping storage '" . $mount . "': no matching filter mount point", debug => 1);
next;
}
$results->{$mount} = { fs => $fs, type => $type };
}
return $results;
}
sub run {
my ($self, %options) = @_;
my $results = $self->manage_selection(custom => $options{custom});
foreach my $name (sort(keys %$results)) {
$self->{output}->output_add(long_msg => "'" . $name . "' [fs = " . $results->{$name}->{fs} . '] [type = ' . $results->{$name}->{type} . ']');
}
$self->{output}->output_add(
severity => 'OK',
short_msg => 'List storages:'
);
$self->{output}->display(nolabel => 1, force_ignore_perfdata => 1, force_long_output => 1);
$self->{output}->exit();
}
sub disco_format {
my ($self, %options) = @_;
$self->{output}->add_disco_format(elements => ['name', 'fs', 'type']);
}
sub disco_show {
my ($self, %options) = @_;
my $results = $self->manage_selection(custom => $options{custom});
foreach my $name (sort(keys %$results)) {
$self->{output}->add_disco_entry(
name => $name,
fs => $results->{$name}->{fs},
type => $results->{$name}->{type},
);
}
}
1;
__END__
=head1 MODE
List storages.
Command used: df -P -k -T 2>&1
=over 8
=item B<--filter-type>
Filter filesystem type (regexp can be used).
=item B<--filter-fs>
Filter filesystem (regexp can be used).
=item B<--filter-mount>
Filter mount point (regexp can be used).
=back
=cut
| 29.219178 | 149 | 0.576887 |
ed018b0855c844f9c9ecfbb236c9092e9113904d | 1,853 | pl | Perl | t/sanity_base/mt_03c.pl | logicmoo/pfc | a26290cd35d37d9c926cf5e5dd2797e019553a3d | [
"BSD-2-Clause"
]
| 1 | 2022-02-20T16:31:55.000Z | 2022-02-20T16:31:55.000Z | t/sanity_base/mt_03c.pl | logicmoo/pfc | a26290cd35d37d9c926cf5e5dd2797e019553a3d | [
"BSD-2-Clause"
]
| null | null | null | t/sanity_base/mt_03c.pl | logicmoo/pfc | a26290cd35d37d9c926cf5e5dd2797e019553a3d | [
"BSD-2-Clause"
]
| null | null | null | /* <module>
%
% PFC is codeA language extension for prolog.
%
% It adds codeA new type of module inheritance
%
% Dec 13, 2035
% Douglas Miles
*/
% was_module(header_sane,[]).
:- include(library(logicmoo_test_header)).
%:- add_import_module(header_sane,baseKB,end).
:- set_defaultAssertMt(myMt).
:- expects_dialect(pfc).
:- mpred_trace_exec.
mtProlog(modA).
mtHybrid(modB).
modA: (codeA:- printAll('$current_source_module'(_M)),codeB).
modB: (codeB).
%:- \+ modA:codeA.
genlMt(modA,modB).
% before test, to make sure codeA was not accdently defined in modB
:- sanity(\+ module_clause(modB:codeA,_)).
:- sanity(\+ module_clause(modA:codeB,_)).
:- sanity( module_clause(modA:codeA,_)).
:- sanity( module_clause(modB:codeB,_)).
% before test, genlMt makes the rule available and should not corrupt the modA module
:- warn_fail_TODO(clause_u(modA:codeB,_)).
% make sure genlMt didnt unassert
:- sanity(clause_u(modB:codeB,_)).
% run the test
modA: (:- codeA).
% to make codeB sure is available in modA
:- mpred_must( clause_u(modA:codeB,_)).
% to make sure codeA does not get accdently defined in modB
:- mpred_must(\+ ((clause_u(modB:codeA,B,Ref),B\=inherit_above(modB, codeA), clause_property(Ref,module(modB))))).
% genlMt makes the rule available and should not corrupt the modA module
:- warn_fail_TODO(clause(modA:codeB,_)).
% genlMt
:- warn_fail_TODO( clause_u(modA:codeB,_)).
% ISSUE: https://github.com/logicmoo/logicmoo_workspace/issues/314
% EDIT: https://github.com/logicmoo/logicmoo_workspace/edit/master/packs_sys/pfc/t/sanity_base/mt_03c.pl
% JENKINS: https://jenkins.logicmoo.org/job/logicmoo_workspace/lastBuild/testReport/logicmoo.pfc.test.sanity_base/MT_03C/logicmoo_pfc_test_sanity_base_MT_03C_JUnit/
% ISSUE_SEARCH: https://github.com/logicmoo/logicmoo_workspace/issues?q=is%3Aissue+label%3AMT_03C
| 25.040541 | 165 | 0.739881 |
ed279aac599b8a8b953f47255843a4d007615c65 | 1,057 | pm | Perl | pdu-perl-api/Raritan/RPC/sensors/Sensor_2_0_0/Unit.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
]
| 1 | 2021-04-29T23:04:17.000Z | 2021-04-29T23:04:17.000Z | pdu-perl-api/Raritan/RPC/sensors/Sensor_2_0_0/Unit.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
]
| null | null | null | pdu-perl-api/Raritan/RPC/sensors/Sensor_2_0_0/Unit.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
]
| 2 | 2020-06-20T16:21:23.000Z | 2021-09-28T19:04:44.000Z | # SPDX-License-Identifier: BSD-3-Clause
#
# Copyright 2020 Raritan Inc. All rights reserved.
#
# This file was generated by IdlC from Sensor.idl.
use strict;
package Raritan::RPC::sensors::Sensor_2_0_0::Unit;
use constant NONE => 0;
use constant VOLT => 1;
use constant AMPERE => 2;
use constant WATT => 3;
use constant VOLT_AMP => 4;
use constant WATT_HOUR => 5;
use constant VOLT_AMP_HOUR => 6;
use constant DEGREE_CELSIUS => 7;
use constant HZ => 8;
use constant PERCENT => 9;
use constant METER_PER_SEC => 10;
use constant PASCAL => 11;
use constant G => 12;
use constant RPM => 13;
use constant METER => 14;
use constant HOUR => 15;
use constant MINUTE => 16;
use constant SECOND => 17;
use constant VOLT_AMP_REACTIVE => 18;
use constant VOLT_AMP_REACTIVE_HOUR => 19;
use constant GRAM => 20;
use constant OHM => 21;
use constant LITERS_PER_HOUR => 22;
use constant CANDELA => 23;
use constant METER_PER_SQARE_SEC => 24;
use constant TESLA => 25;
use constant VOLT_PER_METER => 26;
use constant VOLT_PER_AMPERE => 27;
use constant DEGREE => 28;
1;
| 25.166667 | 50 | 0.727531 |
ed28740357ee5a23502ce5be7c0b751ad8090491 | 10,786 | pm | Perl | cloud/aws/elasticache/mode/commandsmemcached.pm | bmareau/centreon-plugins | 377b2e75b36aaf0ea9c80f4530d1396c7f1c0802 | [
"Apache-2.0"
]
| null | null | null | cloud/aws/elasticache/mode/commandsmemcached.pm | bmareau/centreon-plugins | 377b2e75b36aaf0ea9c80f4530d1396c7f1c0802 | [
"Apache-2.0"
]
| null | null | null | cloud/aws/elasticache/mode/commandsmemcached.pm | bmareau/centreon-plugins | 377b2e75b36aaf0ea9c80f4530d1396c7f1c0802 | [
"Apache-2.0"
]
| null | null | null | #
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::aws::elasticache::mode::commandsmemcached;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub prefix_metric_output {
my ($self, %options) = @_;
my $node_id = "";
if (defined($options{instance_value}->{node_id}) && $options{instance_value}->{node_id} ne '') {
$node_id = "[Node: $options{instance_value}->{node_id}] ";
}
return "Cluster '" . $options{instance_value}->{display} . "' " . $node_id . $options{instance_value}->{stat} . " commands ";
}
sub custom_metric_calc {
my ($self, %options) = @_;
$self->{result_values}->{timeframe} = $options{new_datas}->{$self->{instance} . '_timeframe'};
$self->{result_values}->{value} = $options{new_datas}->{$self->{instance} . '_' . $options{extra_options}->{metric} . '_' . $options{extra_options}->{stat}};
$self->{result_values}->{value_per_sec} = $self->{result_values}->{value} / $self->{result_values}->{timeframe};
$self->{result_values}->{stat} = $options{extra_options}->{stat};
$self->{result_values}->{metric} = $options{extra_options}->{metric};
$self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_display'};
return 0;
}
sub custom_metric_threshold {
my ($self, %options) = @_;
my $exit = $self->{perfdata}->threshold_check(value => defined($self->{instance_mode}->{option_results}->{per_sec}) ? $self->{result_values}->{value_per_sec} : $self->{result_values}->{value},
threshold => [ { label => 'critical-' . lc($self->{result_values}->{metric}) . "-" . lc($self->{result_values}->{stat}), exit_litteral => 'critical' },
{ label => 'warning-' . lc($self->{result_values}->{metric}) . "-" . lc($self->{result_values}->{stat}), exit_litteral => 'warning' } ]);
return $exit;
}
sub custom_metric_perfdata {
my ($self, %options) = @_;
my $extra_label = '';
$extra_label = '_' . lc($self->{result_values}->{display}) if (!defined($options{extra_instance}) || $options{extra_instance} != 0);
$self->{output}->perfdata_add(label => lc($self->{result_values}->{metric}) . "_" . lc($self->{result_values}->{stat}) . $extra_label,
unit => defined($self->{instance_mode}->{option_results}->{per_sec}) ? 'cmd/s' : 'cmd',
value => sprintf("%.2f", defined($self->{instance_mode}->{option_results}->{per_sec}) ? $self->{result_values}->{value_per_sec} : $self->{result_values}->{value}),
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . lc($self->{result_values}->{metric}) . "-" . lc($self->{result_values}->{stat})),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . lc($self->{result_values}->{metric}) . "-" . lc($self->{result_values}->{stat})),
);
}
sub custom_metric_output {
my ($self, %options) = @_;
my $msg = "";
if (defined($self->{instance_mode}->{option_results}->{per_sec})) {
$msg = sprintf("%s: %.2f cmd/s", $self->{result_values}->{metric}, $self->{result_values}->{value_per_sec});
} else {
$msg = sprintf("%s: %.2f cmd", $self->{result_values}->{metric}, $self->{result_values}->{value});
}
return $msg;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'metric', type => 1, cb_prefix_output => 'prefix_metric_output', message_multiple => "All commands metrics are ok", skipped_code => { -10 => 1 } },
];
foreach my $statistic ('minimum', 'maximum', 'average', 'sum') {
foreach my $metric ('CmdFlush', 'CmdGet', 'CmdSet', 'CmdConfigGet', 'CmdConfigSet', 'CmdTouch') {
my $entry = { label => lc($metric) . '-' . lc($statistic), set => {
key_values => [ { name => $metric . '_' . $statistic }, { name => 'display' }, { name => 'stat' }, { name => 'timeframe' } ],
closure_custom_calc => $self->can('custom_metric_calc'),
closure_custom_calc_extra_options => { metric => $metric, stat => $statistic },
closure_custom_output => $self->can('custom_metric_output'),
closure_custom_perfdata => $self->can('custom_metric_perfdata'),
closure_custom_threshold_check => $self->can('custom_metric_threshold'),
}
};
push @{$self->{maps_counters}->{metric}}, $entry;
}
}
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments => {
"name:s@" => { name => 'name' },
"node-id:s" => { name => 'node_id' },
"filter-metric:s" => { name => 'filter_metric' },
"per-sec" => { name => 'per_sec' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
if (!defined($self->{option_results}->{name}) || $self->{option_results}->{name} eq '') {
$self->{output}->add_option_msg(short_msg => "Need to specify --name option.");
$self->{output}->option_exit();
}
foreach my $instance (@{$self->{option_results}->{name}}) {
if ($instance ne '') {
push @{$self->{aws_instance}}, $instance;
}
}
$self->{aws_timeframe} = defined($self->{option_results}->{timeframe}) ? $self->{option_results}->{timeframe} : 600;
$self->{aws_period} = defined($self->{option_results}->{period}) ? $self->{option_results}->{period} : 60;
$self->{aws_statistics} = ['Average'];
if (defined($self->{option_results}->{statistic})) {
$self->{aws_statistics} = [];
foreach my $stat (@{$self->{option_results}->{statistic}}) {
if ($stat ne '') {
push @{$self->{aws_statistics}}, ucfirst(lc($stat));
}
}
}
foreach my $metric ('CmdFlush', 'CmdGet', 'CmdSet', 'CmdConfigGet', 'CmdConfigSet', 'CmdTouch') {
next if (defined($self->{option_results}->{filter_metric}) && $self->{option_results}->{filter_metric} ne ''
&& $metric !~ /$self->{option_results}->{filter_metric}/);
push @{$self->{aws_metrics}}, $metric;
}
}
sub manage_selection {
my ($self, %options) = @_;
my %metric_results;
foreach my $instance (@{$self->{aws_instance}}) {
push @{$self->{aws_dimensions}}, { Name => 'CacheClusterId', Value => $instance };
if (defined($self->{option_results}->{node_id}) && $self->{option_results}->{node_id} ne '') {
push @{$self->{aws_dimensions}}, { Name => 'CacheNodeId', Value => $self->{option_results}->{node_id} };
}
$metric_results{$instance} = $options{custom}->cloudwatch_get_metrics(
region => $self->{option_results}->{region},
namespace => 'AWS/ElastiCache',
dimensions => $self->{aws_dimensions},
metrics => $self->{aws_metrics},
statistics => $self->{aws_statistics},
timeframe => $self->{aws_timeframe},
period => $self->{aws_period},
);
foreach my $metric (@{$self->{aws_metrics}}) {
foreach my $statistic (@{$self->{aws_statistics}}) {
next if (!defined($metric_results{$instance}->{$metric}->{lc($statistic)}) && !defined($self->{option_results}->{zeroed}));
$self->{metric}->{$instance . "_" . lc($statistic)}->{display} = $instance;
$self->{metric}->{$instance . "_" . lc($statistic)}->{stat} = lc($statistic);
$self->{metric}->{$instance . "_" . lc($statistic)}->{node_id} = $self->{option_results}->{node_id};
$self->{metric}->{$instance . "_" . lc($statistic)}->{timeframe} = $self->{aws_timeframe};
$self->{metric}->{$instance . "_" . lc($statistic)}->{$metric . "_" . lc($statistic)} = defined($metric_results{$instance}->{$metric}->{lc($statistic)}) ? $metric_results{$instance}->{$metric}->{lc($statistic)} : 0;
}
}
}
if (scalar(keys %{$self->{metric}}) <= 0) {
$self->{output}->add_option_msg(short_msg => 'No metrics. Check your options or use --zeroed option to set 0 on undefined values');
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check ElastiCache cluster received commands for Memcached backends
(flush, get and set, config get, config set and touch for Memcached 1.4.14).
Example:
perl centreon_plugins.pl --plugin=cloud::aws::elasticache::plugin --custommode=paws --mode=commands-memcached
--region='eu-west-1' --name='centreon-front' --statistic='average' --critical-cmdget-average='50' --verbose --per-sec
See 'https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/elasticache-metricscollected.html' for more informations.
Default statistic: 'average' / All satistics are valid.
=over 8
=item B<--name>
Set the cluster name (Required) (Can be multiple).
=item B<--node-id>
Set the node id (Optional).
=item B<--filter-metric>
Filter metrics (Can be: 'CmdFlush', 'CmdGet', 'CmdSet', 'CmdConfigGet', 'CmdConfigSet', 'CmdTouch')
(Can be a regexp).
=item B<--warning-$metric$-$statistic$>
Thresholds warning ($metric$ can be: 'cmdflush', 'cmdget', 'cmdset',
'cmdconfigget', 'cmdconfigset', 'cmdtouch',
$statistic$ can be: 'minimum', 'maximum', 'average', 'sum').
=item B<--critical-$metric$-$statistic$>
Thresholds critical ($metric$ can be: 'cmdflush', 'cmdget', 'cmdset',
'cmdconfigget', 'cmdconfigset', 'cmdtouch',
$statistic$ can be: 'minimum', 'maximum', 'average', 'sum').
=item B<--per-sec>
Change the data to be unit/sec.
=back
=cut
| 43.144 | 231 | 0.585481 |
73e367c2099d3a7134a91a2185eecd16948a8a3d | 5,586 | pm | Perl | lib/Cfn/Resource/AWS/EKS/FargateProfile.pm | agimenez/cfn-perl | 66eaffd2044b6a4921b43183f7b6b20aaa46b24a | [
"Apache-2.0"
]
| null | null | null | lib/Cfn/Resource/AWS/EKS/FargateProfile.pm | agimenez/cfn-perl | 66eaffd2044b6a4921b43183f7b6b20aaa46b24a | [
"Apache-2.0"
]
| null | null | null | lib/Cfn/Resource/AWS/EKS/FargateProfile.pm | agimenez/cfn-perl | 66eaffd2044b6a4921b43183f7b6b20aaa46b24a | [
"Apache-2.0"
]
| null | null | null | # AWS::EKS::FargateProfile generated from spec 20.1.0
use Moose::Util::TypeConstraints;
coerce 'Cfn::Resource::Properties::AWS::EKS::FargateProfile',
from 'HashRef',
via { Cfn::Resource::Properties::AWS::EKS::FargateProfile->new( %$_ ) };
package Cfn::Resource::AWS::EKS::FargateProfile {
use Moose;
extends 'Cfn::Resource';
has Properties => (isa => 'Cfn::Resource::Properties::AWS::EKS::FargateProfile', is => 'rw', coerce => 1);
sub AttributeList {
[ 'Arn' ]
}
sub supported_regions {
[ 'af-south-1','ap-northeast-1','ap-southeast-1','ap-southeast-2','eu-central-1','eu-south-1','eu-west-1','us-east-1','us-east-2','us-west-2' ]
}
}
subtype 'ArrayOfCfn::Resource::Properties::AWS::EKS::FargateProfile::Label',
as 'Cfn::Value',
where { $_->isa('Cfn::Value::Array') or $_->isa('Cfn::Value::Function') },
message { "$_ is not a Cfn::Value or a Cfn::Value::Function" };
coerce 'ArrayOfCfn::Resource::Properties::AWS::EKS::FargateProfile::Label',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
die 'Only accepts functions';
}
},
from 'ArrayRef',
via {
Cfn::Value::Array->new(Value => [
map {
Moose::Util::TypeConstraints::find_type_constraint('Cfn::Resource::Properties::AWS::EKS::FargateProfile::Label')->coerce($_)
} @$_
]);
};
subtype 'Cfn::Resource::Properties::AWS::EKS::FargateProfile::Label',
as 'Cfn::Value';
coerce 'Cfn::Resource::Properties::AWS::EKS::FargateProfile::Label',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
return Cfn::Resource::Properties::Object::AWS::EKS::FargateProfile::Label->new( %$_ );
}
};
package Cfn::Resource::Properties::Object::AWS::EKS::FargateProfile::Label {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Value::TypedValue';
has Key => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has Value => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
}
subtype 'ArrayOfCfn::Resource::Properties::AWS::EKS::FargateProfile::Selector',
as 'Cfn::Value',
where { $_->isa('Cfn::Value::Array') or $_->isa('Cfn::Value::Function') },
message { "$_ is not a Cfn::Value or a Cfn::Value::Function" };
coerce 'ArrayOfCfn::Resource::Properties::AWS::EKS::FargateProfile::Selector',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
die 'Only accepts functions';
}
},
from 'ArrayRef',
via {
Cfn::Value::Array->new(Value => [
map {
Moose::Util::TypeConstraints::find_type_constraint('Cfn::Resource::Properties::AWS::EKS::FargateProfile::Selector')->coerce($_)
} @$_
]);
};
subtype 'Cfn::Resource::Properties::AWS::EKS::FargateProfile::Selector',
as 'Cfn::Value';
coerce 'Cfn::Resource::Properties::AWS::EKS::FargateProfile::Selector',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
return Cfn::Resource::Properties::Object::AWS::EKS::FargateProfile::Selector->new( %$_ );
}
};
package Cfn::Resource::Properties::Object::AWS::EKS::FargateProfile::Selector {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Value::TypedValue';
has Labels => (isa => 'ArrayOfCfn::Resource::Properties::AWS::EKS::FargateProfile::Label', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has Namespace => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
}
package Cfn::Resource::Properties::AWS::EKS::FargateProfile {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Resource::Properties';
has ClusterName => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has FargateProfileName => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has PodExecutionRoleArn => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has Selectors => (isa => 'ArrayOfCfn::Resource::Properties::AWS::EKS::FargateProfile::Selector', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has Subnets => (isa => 'Cfn::Value::Array|Cfn::Value::Function|Cfn::DynamicValue', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has Tags => (isa => 'ArrayOfCfn::Resource::Properties::TagType', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
}
1;
### main pod documentation begin ###
=encoding UTF-8
=head1 NAME
Cfn::Resource::AWS::EKS::FargateProfile - Cfn resource for AWS::EKS::FargateProfile
=head1 DESCRIPTION
This module implements a Perl module that represents the CloudFormation object AWS::EKS::FargateProfile.
See L<Cfn> for more information on how to use it.
=head1 AUTHOR
Jose Luis Martinez
CAPSiDE
jlmartinez@capside.com
=head1 COPYRIGHT and LICENSE
Copyright (c) 2013 by CAPSiDE
This code is distributed under the Apache 2 License. The full text of the
license can be found in the LICENSE file included with this module.
=cut
| 36.75 | 197 | 0.63874 |
ed338c5f1af2f2a270d0d3156ebd89eeb2013757 | 1,159 | pl | Perl | perl/palindrome/pal.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
]
| null | null | null | perl/palindrome/pal.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
]
| null | null | null | perl/palindrome/pal.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
]
| null | null | null | #!/usr/bin/perl
use strict;
use warnings FATAL => 'all';
use List::Util 'sum';
use bigint;
# get the number of palindromes with up to 99 digits
my $sum = sum map { npal($_) } 1 .. 100;
# calculate the sum of digits in the sum
print "sum = ", sum(split //, $sum);
# The trick: given a number of length "n", there are 9 * 10^(n/2 - 1)
# palindromes if "n" is even, 9 * 10^((n-1)/2) if "n" is odd.
sub npal {
my $nd = shift;
my $pow = ($nd % 2 == 0) ? ($nd / 2) - 1 : ($nd - 1) / 2;
return 9 * 10 ** $pow;
}
__END__
http://nedbatchelder.com/blog/201103/two_pi_day_puzzles_from_pycon.html
A number is a palindrome if the digits read the same backwards as forwards: 1,
88, 343, 234565432, and so on. What is the sum of the digits in the number of
palindromes less than a googol (10^100)? That is, count all the palindromes
greater than zero and less than a googol, then sum all the digits in that
number, not the sum of the digits in all the palindromes. What's your answer?
They actually posed it as "write a program to compute the sum of the digits,
etc," and were interested in the shortest program, but I prefer it as a pure
math question.
| 33.114286 | 78 | 0.682485 |
ed3370222f55d2088fe39d29e55a0a49766ebd82 | 1,846 | pl | Perl | Source/boost_1_33_1/libs/graph/doc/jwebfrob.pl | spxuw/RFIM | 32b78fbb90c7008b1106b0cff4f8023ae83c9b6d | [
"MIT"
]
| 4 | 2021-07-31T13:56:01.000Z | 2021-11-13T02:55:10.000Z | Source/boost_1_33_1/libs/graph/doc/jwebfrob.pl | spxuw/RFIM | 32b78fbb90c7008b1106b0cff4f8023ae83c9b6d | [
"MIT"
]
| null | null | null | Source/boost_1_33_1/libs/graph/doc/jwebfrob.pl | spxuw/RFIM | 32b78fbb90c7008b1106b0cff4f8023ae83c9b6d | [
"MIT"
]
| 7 | 2021-08-31T14:34:23.000Z | 2022-01-19T08:25:58.000Z |
$lastpage = 0;
$thispage = 1;
$counter = 1;
$alphabet = "\@abcdefghijklmnopqrstuvwxyz";
$Alphabet = "\@ABCDEFGHIJKLMNOPQRSTUVWXYZ";
$out = "";
$saved_full = "";
$saved_empty = "";
while(<>) {
# These changes are so that it works when we aren't using hyperref
# if (/(\\newlabel.*\{\{)([0-9]+)(\}\{)([0-9ivx]+)(\}.*JWebCtr\.)([0-9]+)(.*)/) {
if (/\\newlabel\{sec:.*/) {
# make sure not to munge normal (non jweb part) section labels
print ;
} elsif (/\\newlabel\{class:.*/) {
# make sure not to munge normal (non jweb part) class labels
print ;
} elsif (/\\newlabel\{tab:.*/) {
# make sure not to munge normal (non jweb part) table labels
print ;
} elsif (/\\newlabel\{concept:.*/) {
# make sure not to munge normal (non jweb part) concept labels
print ;
} elsif (/\\newlabel\{fig:.*/) {
# make sure not to munge normal (non jweb part) class labels
print ;
} elsif (/(\\newlabel.*\{\{)([0-9\.]+)(\}\{)([0-9ivx]+)(\}.*)(.*)/) {
$thispage = $4;
if ($thispage ne $lastpage) {
$counter = 1;
print $saved_empty;
# $saved_full = "$1".substr($alphabet,$counter,1)."$3$4$5$6$7\n";
# $saved_empty = "$1"."$3$4$5$6$7\n";
$saved_full = "$1".substr($alphabet,$counter,1)."$3$4$5\n";
$saved_empty = "$1"."$3$4$5\n";
} else {
print $saved_full;
# print "$1".substr($alphabet,$counter,1)."$3$4$5$counter$7\n";
print "$1".substr($alphabet,$counter,1)."$3$4$5\n";
$saved_full = "";
$saved_empty = "";
}
$lastpage = $thispage;
$counter++;
} else {
print ;
}
}
print $saved_empty;
# get a line
# cases
# - ref
# - if it is first, save off someplace
# - if there is a first saved, dump the empty version
# - else
# - if there is a first saved, dump the non empty version
# - not a ref
| 25.287671 | 82 | 0.553088 |
73d79e74414dfe9345fc3a5669c0eea6017be384 | 10,756 | pm | Perl | miRNAFinderApp/app/features/microPred/progs/miPred/bioperl-1.4/Bio/Taxonomy.pm | shyaman/miRNAFinder-web-server | 4e21ea9b77a321ae87a3e0f93abe66eda0013f62 | [
"MIT"
]
| 1 | 2021-04-27T18:17:33.000Z | 2021-04-27T18:17:33.000Z | miRNAFinderApp/app/features/microPred/progs/miPred/bioperl-1.4/Bio/Taxonomy.pm | shyaman/miRNAFinder-web-server | 4e21ea9b77a321ae87a3e0f93abe66eda0013f62 | [
"MIT"
]
| 2 | 2020-06-20T15:59:50.000Z | 2021-04-25T17:50:35.000Z | miRNAFinderApp/app/features/microPred/progs/miPred/bioperl-1.4/blib/lib/Bio/Taxonomy.pm | shyaman/miRNAFinder-web-server | 4e21ea9b77a321ae87a3e0f93abe66eda0013f62 | [
"MIT"
]
| null | null | null | # $Id: Taxonomy.pm,v 1.5 2003/11/18 06:56:31 juguang Exp $
#
# BioPerl module for Bio::Taxonomy
#
# Cared for by Juguang Xiao
#
# You may distribute this module under the same terms as perl itself
# POD documentation - main docs before the code
=head1 NAME
Bio::Taxonomy - representing Taxonomy.
=head1 SYNOPSIS
use Bio::Taxonomy;
# CREATION: You can either create an instance by assigning it,
# or fetch it through factory.
# Create the nodes first. See Bio::Taxonomy::Node for details.
my $node_species_sapiens = Bio::Taxonomy::Node->new(
-object_id => 9606, # or -ncbi_taxid. Requird tag
-names => {
'scientific' => ['sapiens'],
'common_name' => ['human']
},
-rank => 'species' # Required tag
);
my $node_genus_Homo = Bio::Taxonomy::Node->new(
-object_id => 9605,
-names => { 'scientific' => ['Homo'] },
-rank => 'genus'
);
my $node_class_Mammalia = Bio::Taxonomy::Node->new(
-object_id => 40674,
-names => {
'scientific' => ['Mammalia'],
'common' => ['mammals']
},
-rank => 'class'
);
my $taxonomy = Bio::Taxonomy->new;
$taxonomy->add_node($node_class_Mammalia);
$taxonomy->add_node($node_species_sapiens);
$taxonomy->add_node($node_genus_Homo);
# OR you can fetch it through a factory implementing
# Bio::Taxonomy::FactoryI
my $factory;
my $taxonomy = $factory->fetch_by_ncbi_taxid(40674);
# USAGE
# In this case, binomial returns a defined value.
my $binomial = $taxonomy->binomial;
# 'common_names' refers to the lowest-rank node's common names, in
# array.
my @common_names = $taxonomy->common_names;
# 'get_node', will return undef if the rank is no defined in
# taxonomy object. It will throw error if the rank string is not
# defined, say 'species lah'.
my $node = $taxonomy->get_node('class');
my @nodes = $taxonomy->get_all_nodes;
# Also, you can search for parent and children nodes, if taxonomy
# comes with factory.
my $parent_taxonomy = $taxonomy->get_parent
=head1 DESCRIPTION
Bio::Taxonomy object represents any rank-level in taxonomy system,
rather than Bio::Species which is able to represent only
species-level.
There are two ways to create Taxonomy object, e.g.
1) instantiate an object and assign all nodes on your own code; and
2) fetch an object by factory.
=head2 Creation by instantiation
The abstraction of Taxonomy is actually a hash in data structure
term. The keys of the hash are the rank names, such as 'genus' and
'species', and the values are the instances of Bio::Taxonomy::Node.
=head2 Creation by Factory fetching
NCBI Taxonomy system is well accepted as the standard. The Taxonomy
Factories in bioperl access this system, through HTTP to NCBI Entrez,
dump file, and advanced biosql database.
Bio::Taxonomy::FactoryI defines all methods that all implementations
must obey.
$factory-E<gt>fetch is a general method to fetch Taxonomy by either
NCBI taxid or any types of names.
$factory-E<gt>fetch_parent($taxonomy), returns a Taxonomy that is
one-step higher rank of the taxonomy specified as argument.
$factory-E<gt>fetch_children($taxonomy), reports an array of Taxonomy
those are one-step lower rank of the taxonomy specified as the
argument.
=head2 Usage of Taxonomy object
##
=head1 CONTACT
Juguang Xiao, juguang@tll.org.sg
=head1 APPENDIX
The rest of the documentation details each of the object
methods. Internal methods are usually preceded with a _
=cut
# code begins...
package Bio::Taxonomy;
use vars qw(@ISA);
use strict;
# Object preamble - inherits from Bio::Root::Root Object
use Bio::Root::Root;
@ISA = qw(Bio::Root::Root);
=head2 new
Title : new
Usage : my $obj = new Bio::Taxonomy();
Function: Builds a new Bio::Taxonomy object
Returns : Bio::Taxonomy
Args : -method -> method used to decide classification
(none|trust|lookup)
-ranks -> what ranks are there
=cut
sub new {
my ($class,@args) = @_;
my $self = $class->SUPER::new(@args);
$self->{'_method'}='none';
$self->{'_ranks'}=[];
$self->{'_rank_hash'}={};
$self->{_hierarchy} = {}; # used to store the nodes, with ranks as keys.
my ($method,$ranks,$order) = $self->_rearrange([qw(METHOD RANKS ORDER)], @args);
if ($method) {
$self->method($method);
}
if (defined $ranks &&
(ref($ranks) eq "ARRAY") ) {
$self->ranks(@$ranks);
} else {
# default ranks
# I think these are in the right order, but not sure:
# some parvorder|suborder and varietas|subspecies seem
# to be at the same level - any taxonomists?
# I don't expect that these will actually be used except as a way
# to find what ranks there are in taxonomic use
$self->ranks(('root',
'superkingdom', 'kingdom',
'superphylum', 'phylum', 'subphylum',
'superclass', 'class', 'subclass', 'infraclass',
'superorder', 'order', 'suborder', 'parvorder', 'infraorder',
'superfamily', 'family', 'subfamily',
'tribe', 'subtribe',
'genus', 'subgenus',
'species group', 'species subgroup', 'species', 'subspecies',
'varietas', 'forma', 'no rank'));
}
return $self;
}
=head2 method
Title : method
Usage : $obj = taxonomy->method($method);
Function: set or return the method used to decide classification
Returns : $obj
Args : $obj
=cut
sub method {
my ($self,$value) = @_;
if (defined $value && $value=~/none|trust|lookup/) {
$self->{'_method'} = $value;
}
return $self->{'_method'};
}
=head2 classify
Title : classify
Usage : @obj[][0-1] = taxonomy->classify($species);
Function: return a ranked classification
Returns : @obj of taxa and ranks as word pairs separated by "@"
Args : Bio::Species object
=cut
sub classify {
my ($self,$value) = @_;
my @ranks;
if (! $value->isa('Bio::Species') ) {
$self->throw("Trying to classify $value which is not a Bio::Species object");
}
my @classes=reverse($value->classification);
if ($self->method eq 'none') {
for (my $i=0; $i < @classes-2; $i++) {
($ranks[$i][0],$ranks[$i][1])=($classes[$i],'no rank');
}
push @ranks,[$classes[-2],'genus'];
push @ranks,[$value->binomial,'species'];
} elsif ($self->method eq 'trust') {
if (scalar(@classes)==scalar($self->ranks)) {
for (my $i=0; $i < @classes; $i++) {
if ($self->rank_of_number($i) eq 'species') {
push @ranks,[$value->binomial,$self->rank_of_number($i)];
} else {
push @ranks,[$classes[$i],$self->rank_of_number($i)];
}
}
} else {
$self->throw("Species object and taxonomy object cannot be reconciled");
}
} elsif ($self->method eq 'lookup') {
# this will lookup a DB for the rank of a taxon name
# I imagine that some kind of Bio::DB class will be need to
# be given to the taxonomy object to act as an DB interface
# (I'm not sure how useful this is though - if you have a DB of
# taxonomy - why would you be doing things this way?)
$self->throw("Not yet implemented");
}
return @ranks;
}
=head2 level_of_rank
Title : level_of_rank
Usage : $obj = taxonomy->level_of_rank($obj);
Function: returns the level of a rank name
Returns : $obj
Args : $obj
=cut
sub level_of {
my ($self,$value) = @_;
return $self->{'_rank_hash'}{$value};
}
=head2 rank_of_number
Title : rank_of_number
Usage : $obj = taxonomy->rank_of_number($obj);
Function: returns the rank name of a rank level
Returns : $obj
Args : $obj
=cut
sub rank_of_number {
my ($self,$value) = @_;
return ${$self->{'_ranks'}}[$value];
}
=head2 ranks
Title : ranks
Usage : @obj = taxonomy->ranks(@obj);
Function: set or return all ranks
Returns : @obj
Args : @obj
=cut
sub ranks {
my ($self,@value) = @_;
# currently this makes no uniqueness sanity check (this should be done)
# I am think that adding a way of converting multiple 'no rank' ranks
# to unique 'no rank #' ranks so that the level of a 'no rank' is
# abstracted way from the user - I'm not sure of the value of this
if (@value) {
$self->{'_ranks'}=\@value;
}
for (my $i=0; $i <= @{$self->{'_ranks'}}-1; $i++) {
$self->{'_rank_hash'}{$self->{'_ranks'}[$i]}=$i unless $self->{'_ranks'}[$i] eq 'no rank';
}
return @{$self->{'_ranks'}};
}
=head2 add_node
Title: add_node
Usage: $obj->add_node($node[, $node2, ...]);
Function: add one or more Bio::Taxonomy::Node objects
Returns: None
Args: any number of Bio::Taxonomy::Node(s)
=cut
sub add_node {
my ($self, @nodes) = @_;
foreach(@nodes){
$self->throw("A Bio::Taxonomy::Node object needed")
unless($_->isa('Bio::Taxonomy::Node'));
my ($node, $rank) = ($_, $_->rank);
if(exists $self->{_hierarchy}->{$rank}){
# $self->throw("$rank has been defined");
# print STDERR "RANK:$rank\n";
# return;
}
$self->{_hierarchy}->{$rank} = $node;
}
}
=head2 binomial
Title : binomial
Usage : my $val = $obj->binomial;
Function: returns the binomial name if this taxonomy reachs species level
Returns : the binomial name
OR undef if taxonmy does not reach species level
Args : [No arguments]
=cut
sub binomial {
my $self = shift;
return $self->get_node('species')->scientific_name;
my $genus = $self->get_node('genus');
my $species = $self->get_node('species');
return ($species && $genus) ? "$species $genus" : undef;
}
=head2 get_node
Title : get_node
Usage : $node = $taxonomy->get_node('species');
Function: get a Bio::Taxonomy::Node object according to rank name
Returns : a Bio::Taxonomy::Node object or undef if null
Args : a vaild rank name
=cut
sub get_node {
my ($self, $rank) = @_;
unless(grep /$rank/, keys %{$self->{_hierarchy}}){
$self->throw("'$rank' is not in the rank list");
}
return (exists $self->{_hierarchy}->{$rank})?
$self->{_hierarchy}->{$rank} : undef;
}
=head2 classification
Title : classification
Usage : @names = $taxonomy->classification;
Function: get the classification names of one taxonomy
Returns : array of names
Args : [No arguments]
=cut
sub classification {
my $self = shift;
my %rank_hash = %{$self->{_rank_hash}};
my %hierarchy = %{$self->{_hierarchy}};
my @ordered_nodes = sort {
($rank_hash{$a} <=> $rank_hash{$b})
} keys %hierarchy;
return map {$hierarchy{$_}->scientific_name} @ordered_nodes;
}
1;
| 25.918072 | 96 | 0.627557 |
ed2236dc30c8de3cfdb4d10f004e908b612ebe02 | 10,917 | pm | Perl | lib/App/dropboxapi.pm | lwhsu/dropbox-api-command | 035f3d41d41027a8d382c1e3cb1c7af231acee7c | [
"MIT"
]
| 49 | 2015-01-30T02:00:57.000Z | 2020-08-07T06:52:48.000Z | lib/App/dropboxapi.pm | lwhsu/dropbox-api-command | 035f3d41d41027a8d382c1e3cb1c7af231acee7c | [
"MIT"
]
| 19 | 2015-02-27T04:11:50.000Z | 2020-01-13T02:18:23.000Z | lib/App/dropboxapi.pm | lwhsu/dropbox-api-command | 035f3d41d41027a8d382c1e3cb1c7af231acee7c | [
"MIT"
]
| 5 | 2015-08-12T06:29:17.000Z | 2020-03-29T07:23:48.000Z | package App::dropboxapi;
use strict;
use warnings;
our $VERSION = '2.13';
=head1 NAME
App::dropboxapi - command line interface to access Dropbox API
=head1 SYNOPSIS
dropbox-api put /tmp/foo.txt dropbox:/Public/
Run C<dropbox-api help> for more options.
=head1 DESCRIPTION
dropbox-api is a command line interface to access Dropbox API.
=over 4
=item ls
=item find
=item du
=item sync
=item cp
=item mv
=item rm
=item mkdir
=item get
=item put
=back
=head1 Install and Setup
=head2 1. Install
=head3 1-a) FreeBSD
pkg_add -r dropbox-api-command
=head3 1-b) Ubuntu
sudo apt-get install make gcc libssl-dev wget
wget https://raw.github.com/miyagawa/cpanminus/master/cpanm
sudo perl cpanm App::dropboxapi
=head3 1-c) CentOS
# CentOS 5
sudo yum install gcc gcc-c++ openssl-devel wget
# CentOS 6
sudo yum install gcc gcc-c++ openssl-devel wget perl-devel
wget https://raw.github.com/miyagawa/cpanminus/master/cpanm
sudo perl cpanm App::dropboxapi
=head3 1-d) OS X
# Install Command Line Tools for Xcode
open https://www.google.com/search?q=Command+Line+Tools+for+Xcode
curl -O https://raw.github.com/miyagawa/cpanminus/master/cpanm
sudo perl cpanm App::dropboxapi
=head2 2. Get API Key and API Secret
https://www.dropbox.com/developers
My Apps => Create an App
=head2 3. Get Access Token and Access Secret
> dropbox-api setup
Please Input API Key: ***************
Please Input API Secret: ***************
1. Open the Login URL: https://www.dropbox.com/oauth2/authorize?client_id=*****&response_type=code
2. Input code and press Enter: ***************
success! try
> dropbox-api ls
> dropbox-api find /
=head2 4. How to use Proxy
Please use -e option.
> HTTP_PROXY="http://127.0.0.1:8888" dropbox-api setup -e
=head1 Sub Commands
=head2 help
disp help.
=over 4
=item syntax
dropbox-api help [<command>]
=back
=head3 Example
> dropbox-api help
Usage: dropbox-api <command> [args] [options]
Available commands:
setup get access_key and access_secret
ls list directory contents
find walk a file hierarchy
du disk usage statistics
cp copy file or directory
mv move file or directory
mkdir make directory (Create intermediate directories as required)
rm remove file or directory (Attempt to remove the file hierarchy rooted in each file argument)
put upload file
get download file
sync sync directory (local => dropbox or dropbox => local)
Common Options
-e enable env_proxy ( HTTP_PROXY, NO_PROXY )
-D enable debug
-v verbose
-s sandbox mode, but this option has been removed.
See 'dropbox-api help <command>' for more information on a specific command.
=head3 Example ( command help )
> dropbox-api help ls
Name
dropbox-api-ls - list directory contents
SYNOPSIS
dropbox-api ls <dropbox_path> [options]
Example
dropbox-api ls Public
dropbox-api ls Public -h
dropbox-api ls Public -p "%d\t%s\t%TY/%Tm/%Td %TH:%TM:%TS\t%p\n"
Options
-h print sizes in human readable format (e.g., 1K 234M 2G)
-p print format.
%d ... is_dir ( d: dir, -: file )
%i ... id
%n ... name
%p ... path_display
%P ... path_lower
%b ... bytes
%s ... size (e.g., 1K 234M 2G)
%t ... server_modified
%c ... client_modified
%r ... rev
%Tk ... DateTime 'strftime' function (server_modified)
%Ck ... DateTime 'strftime' function (client_modified)
L<http://search.cpan.org/dist/DateTime/lib/DateTime.pm#strftime_Patterns>
=head2 ls
file list view.
=over 4
=item alias
list
=item syntax
dropbox-api ls <dropbox_path>
=back
=head3 Example
> dropbox-api list /product
d - Thu, 24 Feb 2011 06:58:00 +0000 /product/chrome-extentions
- 294557 Sun, 26 Dec 2010 21:55:59 +0000 /product/ex.zip
=head3 human readable option ( -h )
print sizes in human readable format (e.g., 1K 234M 2G)
> dropbox-api ls /product -h
d - Thu, 24 Feb 2011 06:58:00 +0000 /product/chrome-extentions
- 287.7KB Sun, 26 Dec 2010 21:55:59 +0000 /product/ex.zip
=head3 printf option ( -p )
print format.
> dropbox-api ls /product -p "%d\t%s\t%TY/%Tm/%Td %TH:%TM:%TS\t%p\n"
d - 2011/02/24 06:58:00 /product/chrome-extentions
- 287.7KB 2010/12/26 21:55:59 /product/ex.zip
%d ... is_dir ( d: dir, -: file )
%i ... id
%n ... name
%p ... path_display
%P ... path_lower
%b ... bytes
%s ... size (e.g., 1K 234M 2G)
%t ... server_modified
%c ... client_modified
%r ... rev
%Tk ... DateTime 'strftime' function (server_modified)
%Ck ... DateTime 'strftime' function (client_modified)
L<http://search.cpan.org/dist/DateTime/lib/DateTime.pm#strftime_Patterns>
=head2 find
recursive file list view.
=over 4
=item syntax
dropbox-api find <dropbox_path> [options]
=back
=head3 Example
> dropbox-api find /product/google-tasks-checker-plus
/product/chrome-extentions/google-tasks-checker-plus/README.md
/product/chrome-extentions/google-tasks-checker-plus/src
/product/chrome-extentions/google-tasks-checker-plus/src/background.html
/product/chrome-extentions/google-tasks-checker-plus/src/external.png
/product/chrome-extentions/google-tasks-checker-plus/src/icon-32.png
/product/chrome-extentions/google-tasks-checker-plus/src/icon-128.png
/product/chrome-extentions/google-tasks-checker-plus/src/icon.gif
/product/chrome-extentions/google-tasks-checker-plus/src/jquery-1.4.2.min.js
/product/chrome-extentions/google-tasks-checker-plus/src/main.js
/product/chrome-extentions/google-tasks-checker-plus/src/manifest.json
/product/chrome-extentions/google-tasks-checker-plus/src/options.html
/product/chrome-extentions/google-tasks-checker-plus/src/popup.html
/product/chrome-extentions/google-tasks-checker-plus/src/reset.css
=head3 printf option ( -p )
see also list command's printf option.
=head2 du
display disk usage statistics.
=over 4
=item syntax
dropbox-api du <dropbox_path> [options]
=back
=head3 Example
> dropbox-api du /product -h -d 1
1.1M /product
1.1M /product/chrome-extensions
0B /product/work
=head3 human readable option ( -h )
print sizes in human readable format (e.g., 1K 234M 2G)
=head3 depth option ( -d )
Display an entry for all files and directories depth directories deep.
=head2 sync ( rsync )
recursive file synchronization.
=head3 sync from dropbox
dropbox-api sync dropbox:<source_dir> <target_dir> [options]
> dropbox-api sync dropbox:/product/google-tasks-checker-plus/src /tmp/product
download /private/tmp/product/external.png
download /private/tmp/product/icon-32.png
download /private/tmp/product/icon-128.png
=head3 sync to dropbox
dropbox-api sync <source_dir> dropbox:<target_dir> [options]
> dropbox-api sync /tmp/product dropbox:/work/src
upload background.html /work/src/background.html
upload external.png /work/src/external.png
upload icon-128.png /work/src/icon-128.png
=head3 delete option ( -d )
> dropbox-api sync dropbox:/product/google-tasks-checker-plus/src /tmp/product -d
download /private/tmp/product/external.png
download /private/tmp/product/icon-32.png
download /private/tmp/product/icon-128.png
remove background.html.tmp
=head3 dry run option ( -n )
> dropbox-api sync dropbox:/product/google-tasks-checker-plus/src /tmp/product -dn
!! enable dry run !!
download /private/tmp/product/external.png
download /private/tmp/product/icon-32.png
download /private/tmp/product/icon-128.png
remove background.html.tmp
=head3 verbose option ( -v )
> dropbox-api sync dropbox:/product/google-tasks-checker-plus/src /tmp/product -dnv
remote_base: /product/chrome-extentions/google-tasks-checker-plus/src
local_base: /private/tmp/product
** download **
skip background.html
download /private/tmp/product/external.png
download /private/tmp/product/icon-32.png
download /private/tmp/product/icon-128.png
skip icon.gif
skip jquery-1.4.2.min.js
skip main.js
skip manifest.json
skip options.html
skip popup.html
skip reset.css
** delete **
skip background.html
remove background.html.tmp
skip icon.gif
skip jquery-1.4.2.min.js
skip main.js
skip manifest.json
skip options.html
skip popup.html
skip reset.css
=head2 cp
copy file or directory.
=over 4
=item alias
copy
=item syntax
dropbox-api cp <source_file> <target_file>
=back
=head3 Example
dropbox-api cp memo.txt memo.txt.bak
=head2 mv
move file or directory.
=over 4
=item alias
move
=item syntax
dropbox-api mv <source_file> <target_file>
=back
=head3 Example
dropbox-api mv memo.txt memo.txt.bak
=head2 mkdir
make directory.
*no error if existing, make parent directories as needed.*
=over 4
=item alias
mkpath
=item syntax
dropbox-api mkdir <directory>
=back
=head3 Example
dropbox-api mkdir product/src
=head2 rm
remove file or directory.
*remove the contents of directories recursively.*
=over 4
=item alias
rmtree
=item syntax
dropbox-api rm <file_or_directory>
=back
=head3 Example
dropbox-api rm product/src
=head2 get
download file from dropbox.
=over 4
=item alias
dl, download
=item syntax
dropbox-api get dropbox:<dropbox_file> <file>
=back
=head3 Example
dropbox-api get dropbox:/Public/foo.txt /tmp/foo.txt
=head2 put
upload file to dropbox.
=over 4
=item alias
up, upload
=item syntax
dropbox-api put <file> dropbox:<dropbox_dir>
=back
=head3 Example
dropbox-api put /tmp/foo.txt dropbox:/Public/
=head3 verbose option ( -v )
A progress bar is displayed.
dropbox-api put /tmp/1GB.dat dropbox:/Public/ -v
100% [=====================================================================================>]
=head2 Tips
=head3 Retry
#!/bin/bash
command='dropbox-api sync dropbox:/test/ /Users/aska/test/ -vde'
NEXT_WAIT_TIME=0
EXIT_CODE=0
until $command || [ $NEXT_WAIT_TIME -eq 4 ]; do
EXIT_CODE=$?
sleep $NEXT_WAIT_TIME
let NEXT_WAIT_TIME=NEXT_WAIT_TIME+1
done
exit $EXIT_CODE
=head1 COPYRIGHT
Copyright 2012- Shinichiro Aska
The standalone executable contains the following modules embedded.
=head1 LICENSE
Released under the MIT license. http://creativecommons.org/licenses/MIT/
=head1 COMMUNITY
=over 4
=item L<https://github.com/s-aska/dropbox-api-command> - source code repository, issue tracker
=back
=cut
1;
| 21.07529 | 106 | 0.671247 |
73d39145c1e606835efafa3ed35f6775dc169793 | 1,543 | pl | Perl | brightness.pl | tenguix/perls | d4fdf2adc667dc56d1f04ab3bf9021888fdf5c3b | [
"MIT"
]
| null | null | null | brightness.pl | tenguix/perls | d4fdf2adc667dc56d1f04ab3bf9021888fdf5c3b | [
"MIT"
]
| null | null | null | brightness.pl | tenguix/perls | d4fdf2adc667dc56d1f04ab3bf9021888fdf5c3b | [
"MIT"
]
| null | null | null | #!/usr/bin/perl -s
sub read_value_from($)
{
my $fname = shift;
my $ret;
unless(-r -T $fname) {
die qq(!: "$fname": $!);
}
open(my $fh, "< $fname") or die qq(!: "$fname": $!);
$ret = <$fh>;
unless(eof $fh) {
close($fh) or die qq(!: "$fname": $!);
}
return $ret;
}
sub actual_value($)
{
sprintf "%d", $levels{"max"} * $_[0] / 100;
}
sub relative_level($) {
sprintf "%d", 100 * $_[0] / $levels{"max"};
}
$sys_dir = "/sys/class/backlight/radeon_bl0";
%levels = (
max => read_value_from("$sys_dir/max_brightness"),
old => read_value_from("$sys_dir/brightness"),
new => undef
);
if(1 < scalar @ARGV) {
die qq(!: Usage: $0 [-f] [1-100]\n);
}
elsif($new = shift) {
if($new =~ /^[0-9]+$/) {
if($new < 1 || $new > 100) {
die qq(!: $new: Invalid brightness level.);
}
elsif($new <= 10) {
unless($f) { #force.
$new *= 10;
}
}
$levels{"new"} = actual_value($new);
}
else {
die qq(!: "$new": What the fuck is this);
}
if(open(my $fh, "|sudo tee $sys_dir/brightness")) {
printf $fh ("%d" => $levels{"new"});
close($fh) or die qq(!: "$sys_dir/brightness": $!);
}
else {
die qq(!: Pipe to "$sys_dir/brightness": $!);
}
printf("\n%d%% => %d%%\n", relative_level($levels{"old"}), $new);
# TODO: the old value is never right
}
else {
printf "Current level: %3d%%\n", relative_level($levels{"old"});
}
| 20.573333 | 69 | 0.475697 |
ed260c4c091fe2f084d487827f8f91cc6ce89e58 | 2,863 | pl | Perl | src/main/prolog/game_state_space.pl | joaoraf/gameyard | fb7b23528ecbfbbd51874302323797be234c8f60 | [
"Apache-2.0"
]
| null | null | null | src/main/prolog/game_state_space.pl | joaoraf/gameyard | fb7b23528ecbfbbd51874302323797be234c8f60 | [
"Apache-2.0"
]
| null | null | null | src/main/prolog/game_state_space.pl | joaoraf/gameyard | fb7b23528ecbfbbd51874302323797be234c8f60 | [
"Apache-2.0"
]
| null | null | null | :- module(game_state_space,[
game_state_space/1,
default_game_state_space/1,
state_space_param_check/2,
state_space_param_create/3,
state_space_initial_state/3,
state_space_players/3,
state_space_current_player/3,
state_space_player_view/4,
state_space_view_transitions/3,
state_space_transitions/3,
state_space_state_render/3,
state_space_view_render/3
]).
game_state_space(SS) :-
is_dict(SS,state_space),
callable(SS.state_space_param_check),
callable(SS.state_space_param_create),
callable(SS.state_space_initial_state),
callable(SS.state_space_players),
callable(SS.state_space_current_player),
callable(SS.state_space_player_view),
callable(SS.state_space_view_transitions),
callable(SS.state_space_transitions),
callable(SS.state_space_state_render),
callable(SS.state_space_view_render).
default_game_state_space(SS) :-
M=game_state_space,
SS = state_space{
state_space_param_check: M:default_game_state_param_check,
state_space_current_player: M:default_game_state_current_player,
state_space_player_view: M:default_state_space_player_view,
state_space_view_transitions: M:state_space_transitions,
state_space_state_render: M:default_game_state_render,
state_space_view_render: M:default_game_state_render
}.
default_game_state_param_check(_SS,_Param).
default_game_state_current_player(_SS,_State,0).
default_state_space_player_view(_SS,State,_PlayerNum,State).
default_game_state_end_state(_SS,_State) :- fail.
default_game_state_render(_SS,State,[RenderedState]) :-
sformat(RenderedState,"~k",State).
state_space_param_check(SS,Param) :-
F = SS.state_space_param_check,
call(F,SS,Param).
state_space_param_create(SS,PreParams,Params) :-
F = SS.state_space_param_create,
call(F,SS,Params1),
Params = Params1.put(PreParams).
state_space_initial_state(SS,Params,InitialState) :-
F = SS.state_space_initial_state,
call(F,SS,Params,InitialState).
state_space_players(SS,State,Players) :-
F = SS.state_space_players,
call(F,SS,State,Players).
state_space_current_player(SS,State,Player) :-
F = SS.state_space_current_player,
call(F,SS,State,Player).
state_space_player_view(SS,State,PlayerNum,PlayerView) :-
F = SS.state_space_player_view,
call(F,SS,State,PlayerNum,PlayerView).
state_space_view_transitions(SS,View,MoveEndViewPairs) :-
F = SS.state_space_view_transitions,
call(F,SS,View,MoveEndViewPairs).
state_space_transitions(SS,View,MoveEndStatePairs) :-
F = SS.state_space_transitions,
call(F,SS,View,MoveEndStatePairs).
state_space_state_render(SS,State,RenderedLines) :-
F = SS.state_space_state_render,
call(F,SS,State,RenderedLines).
state_space_view_render(SS,View,RenderedLines) :-
F = SS.state_space_view_render,
call(F,SS,View,RenderedLines).
| 30.784946 | 68 | 0.78519 |
ed11d4a3d2129e62bb41ab1cf16717b827740f77 | 2,489 | pm | Perl | auto-lib/Paws/Neptune/DescribeEventCategories.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/Neptune/DescribeEventCategories.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/Neptune/DescribeEventCategories.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::Neptune::DescribeEventCategories;
use Moose;
has Filters => (is => 'ro', isa => 'ArrayRef[Paws::Neptune::Filter]');
has SourceType => (is => 'ro', isa => 'Str');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DescribeEventCategories');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::Neptune::EventCategoriesMessage');
class_has _result_key => (isa => 'Str', is => 'ro', default => 'DescribeEventCategoriesResult');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Neptune::DescribeEventCategories - Arguments for method DescribeEventCategories on L<Paws::Neptune>
=head1 DESCRIPTION
This class represents the parameters used for calling the method DescribeEventCategories on the
L<Amazon Neptune|Paws::Neptune> service. Use the attributes of this class
as arguments to method DescribeEventCategories.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DescribeEventCategories.
=head1 SYNOPSIS
my $rds = Paws->service('Neptune');
my $EventCategoriesMessage = $rds->DescribeEventCategories(
Filters => [
{
Name => 'MyString',
Values => [ 'MyString', ... ],
},
...
], # OPTIONAL
SourceType => 'MyString', # OPTIONAL
);
# Results:
my $EventCategoriesMapList =
$EventCategoriesMessage->EventCategoriesMapList;
# Returns a L<Paws::Neptune::EventCategoriesMessage> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/rds/DescribeEventCategories>
=head1 ATTRIBUTES
=head2 Filters => ArrayRef[L<Paws::Neptune::Filter>]
This parameter is not currently supported.
=head2 SourceType => Str
The type of source that is generating the events.
Valid values: db-instance | db-parameter-group | db-security-group |
db-snapshot
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DescribeEventCategories in L<Paws::Neptune>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 29.987952 | 249 | 0.709924 |
73db65da76df80e96329a8b84b21035844f9263a | 470 | pl | Perl | cmd/show/badip.pl | latchdevel/DXspider | e61ab5eeea22241ea8d8f1f6d072f5249901d788 | [
"Artistic-1.0-Perl",
"Artistic-2.0"
]
| null | null | null | cmd/show/badip.pl | latchdevel/DXspider | e61ab5eeea22241ea8d8f1f6d072f5249901d788 | [
"Artistic-1.0-Perl",
"Artistic-2.0"
]
| null | null | null | cmd/show/badip.pl | latchdevel/DXspider | e61ab5eeea22241ea8d8f1f6d072f5249901d788 | [
"Artistic-1.0-Perl",
"Artistic-2.0"
]
| null | null | null | #
# set list of bad dx nodes
#
# Copyright (c) 2021 - Dirk Koopman G1TLH
#
#
#
my ($self, $line) = @_;
return (1, $self->msg('e5')) if $self->remotecmd;
# are we permitted?
return (1, $self->msg('e5')) if $self->priv < 6;
my @out;
my @added;
my @in = split /\s+/, $line;
my @list= DXCIDR::list();
foreach my $list (@list) {
if (@in) {
for (@in) {
if ($list =~ /$_/i) {
push @out, $list;
last;
}
}
} else {
push @out, $list;
}
}
return (1, @out);
| 16.206897 | 49 | 0.52766 |
ed05d3a5bbf29673db6c3d829c15b85f18744ea6 | 28,612 | pm | Perl | src/BioPerl/Bio/Structure/SecStr/STRIDE/Res.pm | EuPathDB-Infra/GBrowse | 250030f6ab615fffb348eb37641d240e589c225a | [
"Apache-2.0"
]
| 3 | 2019-06-12T15:21:58.000Z | 2021-07-16T00:48:21.000Z | src/BioPerl/Bio/Structure/SecStr/STRIDE/Res.pm | EuPathDB-Infra/GBrowse | 250030f6ab615fffb348eb37641d240e589c225a | [
"Apache-2.0"
]
| 3 | 2016-12-26T15:47:06.000Z | 2019-06-28T01:29:47.000Z | src/BioPerl/Bio/Structure/SecStr/STRIDE/Res.pm | EuPathDB-Infra/GBrowse | 250030f6ab615fffb348eb37641d240e589c225a | [
"Apache-2.0"
]
| 3 | 2018-10-26T21:45:32.000Z | 2020-03-08T21:35:13.000Z | # $id $
#
# bioperl module for Bio::Structure::SecStr::STRIDE::Res.pm
#
# Please direct questions and support issues to <bioperl-l@bioperl.org>
#
# Cared for by Ed Green <ed@compbio.berkeley.edu>
#
# Copyright Univ. of California
#
# You may distribute this module under the same terms as perl itself
#
# POD documentation - main docs before the code
=head1 NAME
Bio::Structure::SecStr::STRIDE::Res - Module for parsing/accessing stride output
=head1 SYNOPSIS
my $stride_obj = Bio::Structure::SecStr::STRIDE::Res->new( '-file' => 'filename.stride' );
# or
my $stride_obj = Bio::Structure::SecStr::STRIDE::Res->new( '-fh' => \*STDOUT );
# Get secondary structure assignment for PDB residue 20 of chain A
$sec_str = $stride_obj->resSecStr( '20:A' );
# same
$sec_str = $stride_obj->resSecStr( 20, 'A' )
=head1 DESCRIPTION
STRIDE::Res is a module for objectifying STRIDE output. STRIDE is a
program (similar to DSSP) for assigning secondary structure to
individual residues of a pdb structure file.
( Knowledge-Based Protein Secondary Structure Assignment,
PROTEINS: Structure, Function, and Genetics 23:566-579 (1995) )
STRIDE is available here:
http://webclu.bio.wzw.tum.de/stride/
Methods are then available for extracting all of the infomation
present within the output or convenient subsets of it.
Although they are very similar in function, DSSP and STRIDE differ
somewhat in output format. Thes differences are reflected in the
return value of some methods of these modules. For example, both
the STRIDE and DSSP parsers have resSecStr() methods for returning
the secondary structure of a given residue. However, the range of
return values for DSSP is ( H, B, E, G, I, T, and S ) whereas the
range of values for STRIDE is ( H, G, I, E, B, b, T, and C ). See
individual methods for details.
The methods are roughly divided into 3 sections:
1. Global features of this structure (PDB ID, total surface area,
etc.). These methods do not require an argument.
2. Residue specific features ( amino acid, secondary structure,
solvent exposed surface area, etc. ). These methods do require an
arguement. The argument is supposed to uniquely identify a
residue described within the structure. It can be of any of the
following forms:
('#A:B') or ( #, 'A', 'B' )
|| |
|| - Chain ID (blank for single chain)
|--- Insertion code for this residue. Blank for most residues.
|--- Numeric portion of residue ID.
(#)
|
--- Numeric portion of residue ID. If there is only one chain and
it has no ID AND there is no residue with an insertion code at this
number, then this can uniquely specify a residue.
('#:C') or ( #, 'C' )
| |
| -Chain ID
---Numeric portion of residue ID.
If a residue is incompletely specified then the first residue that
fits the arguments is returned. For example, if 19 is the argument
and there are three chains, A, B, and C with a residue whose number
is 19, then 19:A will be returned (assuming its listed first).
Since neither DSSP nor STRIDE correctly handle alt-loc codes, they
are not supported by these modules.
3. Value-added methods. Return values are not verbatem strings
parsed from DSSP or STRIDE output.
=head1 FEEDBACK
=head2 MailingLists
UsUser feedback is an integral part of the evolution of this and other
Bioperl modules. Send your comments and suggestions preferably to one
of the Bioperl mailing lists. Your participation is much appreciated.
bioperl-l@bioperl.org - General discussion
http://bioperl.org/wiki/Mailing_lists - About the mailing lists
=head2 Support
Please direct usage questions or support issues to the mailing list:
I<bioperl-l@bioperl.org>
rather than to the module maintainer directly. Many experienced and
reponsive experts will be able look at the problem and quickly
address it. Please include a thorough description of the problem
with code and data examples if at all possible.
=head2 Reporting Bugs
Report bugs to the Bioperl bug tracking system to help us keep track
the bugs and their resolution. Bug reports can be submitted via the
web:
http://bugzilla.open-bio.org/
=head1 AUTHOR - Ed Green
Email ed@compbio.berkeley.edu
=head1 APPENDIX
The Rest of the documentation details each method.
Internal methods are preceded with a _.
=cut
package Bio::Structure::SecStr::STRIDE::Res;
use strict;
use Bio::Root::IO;
use Bio::PrimarySeq;
use base qw(Bio::Root::Root);
our %ASGTable = ( 'aa' => 0,
'resNum' => 1,
'ssAbbr' => 2,
'ssName' => 3,
'phi' => 4,
'psi' => 5,
'surfArea' => 6 );
our %AATable = ( 'ALA' => 'A', 'ARG' => 'R', 'ASN' => 'N',
'ASP' => 'D', 'CYS' => 'C', 'GLN' => 'Q',
'GLU' => 'E', 'GLY' => 'G', 'HIS' => 'H',
'ILE' => 'I', 'LEU' => 'L', 'LYS' => 'K',
'MET' => 'M', 'PHE' => 'F', 'PRO' => 'P',
'SER' => 'S', 'THR' => 'T', 'TRP' => 'W',
'TYR' => 'Y', 'VAL' => 'V' );
=head2 new
Title : new
Usage : makes new object of this class
Function : Constructor
Example : $stride_obj = Bio::Structure::SecStr::STRIDE:Res->new( '-file' => filename
# or
'-fh' => FILEHANDLE )
Returns : object (ref)
Args : filename or filehandle( must be proper STRIDE output )
=cut
sub new {
my ( $class, @args ) = @_;
my $self = $class->SUPER::new( @args );
my $io = Bio::Root::IO->new( @args );
$self->_parse( $io ); # not passing filehandle !
$io->close();
return $self;
}
# GLOBAL FEATURES / INFO / STATS
=head2 totSurfArea
Title : totSurfArea
Usage : returns sum of surface areas of all residues of all
chains considered. Result is memoized.
Function :
Example : $tot_SA = $stride_obj->totSurfArea();
Returns : scalar
Args : none
=cut
sub totSurfArea {
my $self = shift;
my $total = 0;
my ( $chain, $res );
if ( $self->{ 'SurfArea' } ) {
return $self->{ 'SurfArea' };
}
else {
foreach $chain ( keys %{$self->{ 'ASG' }} ) {
for ( my $i = 1; $i <= $#{$self->{'ASG'}->{$chain}}; $i++ ) {
$total +=
$self->{'ASG'}->{$chain}->[$i]->[$ASGTable{'surfArea'}];
}
}
}
$self->{ 'SurfArea' } = $total;
return $self->{ 'SurfArea' };
}
=head2 numResidues
Title : numResidues
Usage : returns total number of residues in all chains or
just the specified chain
Function :
Example : $tot_res = $stride_obj->numResidues();
Returns : scalar int
Args : none or chain id
=cut
sub numResidues {
my $self = shift;
my $chain = shift;
my $total = 0;
my $key;
foreach $key ( keys %{$self->{ 'ASG' }} ) {
if ( $chain ) {
if ( $key eq $chain ) {
$total += $#{$self->{ 'ASG' }{ $key }};
}
}
else {
$total += $#{$self->{ 'ASG' }{ $key }};
}
}
return $total;
}
# STRAIGHT FROM THE PDB ENTRY
=head2 pdbID
Title : pdbID
Usage : returns pdb identifier ( 1FJM, e.g. )
Function :
Example : $pdb_id = $stride_obj->pdbID();
Returns : scalar string
Args : none
=cut
sub pdbID {
my $self = shift;
return $self->{ 'PDB' };
}
=head2 pdbAuthor
Title : pdbAuthor
Usage : returns author of this PDB entry
Function :
Example : $auth = $stride_obj->pdbAuthor()
Returns : scalar string
Args : none
=cut
sub pdbAuthor {
my $self = shift;
return join( ' ', @{ $self->{ 'HEAD' }->{ 'AUT' } } );
}
=head2 pdbCompound
Title : pdbCompound
Usage : returns string of what was found on the
CMP lines
Function :
Example : $cmp = $stride_obj->pdbCompound();
Returns : string
Args : none
=cut
sub pdbCompound {
my $self = shift;
return join( ' ', @{ $self->{ 'HEAD' }->{ 'CMP' } } );
}
=head2 pdbDate
Title : pdbDate
Usage : returns date given in PDB file
Function :
Example : $pdb_date = $stride_obj->pdbDate();
Returns : scalar
Args : none
=cut
sub pdbDate {
my $self = shift;
return $self->{ 'DATE' };
}
=head2 pdbHeader
Title : pdbHeader
Usage : returns string of characters found on the PDB header line
Function :
Example : $head = $stride_obj->pdbHeader();
Returns : scalar
Args : none
=cut
sub pdbHeader {
my $self = shift;
return $self->{ 'HEAD' }->{ 'HEADER' };
}
=head2 pdbSource
Title : pdbSource
Usage : returns string of what was found on SRC lines
Function :
Example : $src = $stride_obj->pdbSource();
Returns : scalar
Args : none
=cut
sub pdbSource {
my $self = shift;
return join( ' ', @{ $self->{ 'HEAD' }->{ 'SRC' } } );
}
# RESIDUE SPECIFIC ACCESSORS
=head2 resAA
Title : resAA
Usage : returns 1 letter abbr. of the amino acid specified by
the arguments
Function :
Examples : $aa = $stride_obj->resAA( RESIDUE_ID );
Returns : scalar character
Args : RESIDUE_ID
=cut
sub resAA {
my $self = shift;
my @args = @_;
my ( $ord, $chain ) = $self->_toOrdChain( @args );
return ( $AATable{$self->{'ASG'}->{$chain}->[$ord]->[$ASGTable{'aa'}]} );
}
=head2 resPhi
Title : resPhi
Usage : returns phi angle of specified residue
Function :
Example : $phi = $stride_obj->resPhi( RESIDUE_ID );
Returns : scaler
Args : RESIDUE_ID
=cut
sub resPhi {
my $self = shift;
my @args = @_;
my ( $ord, $chain ) = $self->_toOrdChain( @args );
return $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'phi' } ];
}
=head2 resPsi
Title : resPsi
Usage : returns psi angle of specified residue
Function :
Example : $psi = $stride_obj->resPsi( RESIDUE_ID );
Returns : scalar
Args : RESIDUE_ID
=cut
sub resPsi {
my $self = shift;
my @args = @_;
my ( $ord, $chain ) = $self->_toOrdChain( @args );
return $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'psi' } ];
}
=head2 resSolvAcc
Title : resSolvAcc
Usage : returns stride calculated surface area of specified residue
Function :
Example : $sa = $stride_obj->resSolvAcc( RESIDUE_ID );
Returns : scalar
Args : RESIDUE_ID
=cut
sub resSolvAcc {
my $self = shift;
my @args = @_;
my ( $ord, $chain ) = $self->_toOrdChain( @args );
return $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'surfArea' } ];
}
=head2 resSurfArea
Title : resSurfArea
Usage : returns stride calculated surface area of specified residue
Function :
Example : $sa = $stride_obj->resSurfArea( RESIDUE_ID );
Returns : scalar
Args : RESIDUE_ID
=cut
sub resSurfArea {
my $self = shift;
my @args = @_;
my ( $ord, $chain ) = $self->_toOrdChain( @args );
return $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'surfArea' } ];
}
=head2 resSecStr
Title : resSecStr
Usage : gives one letter abbr. of stride determined secondary
structure of specified residue
Function :
Example : $ss = $stride_obj->resSecStr( RESIDUE_ID );
Returns : one of: 'H' => Alpha Helix
'G' => 3-10 helix
'I' => PI-helix
'E' => Extended conformation
'B' or 'b' => Isolated bridge
'T' => Turn
'C' => Coil
' ' => None
# NOTE: This range is slightly DIFFERENT from the
# DSSP method of the same name
Args : RESIDUE_ID
=cut
sub resSecStr {
my $self = shift;
my @args = @_;
my ( $ord, $chain ) = $self->_toOrdChain( @args );
return $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'ssAbbr' } ];
}
=head2 resSecStrSum
Title : resSecStrSum
Usage : gives one letter summary of secondary structure of
specified residue. More general than secStruc()
Function :
Example : $ss_sum = $stride_obj->resSecStrSum( RESIDUE_ID );
Returns : one of: 'H' (helix), 'B' (beta), 'T' (turn), or 'C' (coil)
Args : residue identifier(s) ( SEE INTRO NOTE )
=cut
sub resSecStrSum {
my $self = shift;
my @args = @_;
my $ss_char = $self->resSecStr( @args );
if ( $ss_char eq 'H' || $ss_char eq 'G' || $ss_char eq 'I' ) {
return 'H';
}
if ( $ss_char eq 'E' || $ss_char eq 'B' || $ss_char eq 'b' ) {
return 'B';
}
if ( $ss_char eq 'T' ) {
return 'T';
}
else {
return 'C';
}
}
# STRIDE SPECIFIC
=head2 resSecStrName
Title : resSecStrName
Usage : gives full name of the secondary structural element
classification of the specified residue
Function :
Example : $ss_name = $stride_obj->resSecStrName( RESIDUE_ID );
Returns : scalar string
Args : RESIDUE_ID
=cut
sub resSecStrName {
my $self = shift;
my @args = @_;
my ( $ord, $chain ) = $self->_toOrdChain( @args );
return $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'ssName' } ];
}
=head2 strideLocs
Title : strideLocs
Usage : returns stride determined contiguous secondary
structural elements as specified on the LOC lines
Function :
Example : $loc_pnt = $stride_obj->strideLocs();
Returns : pointer to array of 5 element arrays.
0 => stride name of structural element
1 => first residue pdb key (including insertion code, if app.)
2 => first residue chain id
3 => last residue pdb key (including insertion code, if app.)
4 => last residue chain id
NOTE the differences between this range and the range of SecBounds()
Args : none
=cut
sub strideLocs {
my $self = shift;
return $self->{ 'LOC' };
}
# VALUE ADDED METHODS (NOT JUST PARSE/REPORT)
=head2 secBounds
Title : secBounds
Usage : gets residue ids of boundary residues in each
contiguous secondary structural element of specified
chain
Function :
Example : $ss_bound_pnt = $stride_obj->secBounds( 'A' );
Returns : pointer to array of 3 element arrays. First two elements
are the PDB IDs of the start and end points, respectively
and inclusively. The last element is the STRIDE secondary
structural element code (same range as resSecStr).
Args : chain identifier ( one character ). If none, '-' is assumed
=cut
sub secBounds {
# Requires a chain name. If left blank, we assume ' ' which equals '-'
my $self = shift;
my $chain = shift;
my @SecBounds;
$chain = '-' if ( !( $chain ) || $chain eq ' ' || $chain eq '-' );
# if we've memoized this one, use that
if ( $self->{ 'SecBounds' }->{ $chain } ) {
return $self->{ 'SecBounds' }->{ $chain };
}
#check to make sure chain is valid
if ( !( $self->{ 'ASG' }->{ $chain } ) ) {
$self->throw( "No such chain: $chain\n" );
}
my $cur_element = $self->{ 'ASG' }->{ $chain }->[ 1 ]->
[ $ASGTable{ 'ssAbbr' } ];
my $beg = 1;
my $i;
for ( $i = 2; $i <= $#{$self->{'ASG'}->{$chain}}; $i++ ) {
if ( $self->{ 'ASG' }->{ $chain }->[ $i ]->[ $ASGTable{ 'ssAbbr' } ]
ne $cur_element ) {
push( @SecBounds, [ $beg, $i -1 , $cur_element ] );
$beg = $i;
$cur_element = $self->{ 'ASG' }->{ $chain }->[ $i ]->
[ $ASGTable{ 'ssAbbr' } ];
}
}
if ( $self->{ 'ASG' }->{ $chain }->[ $i ]->[ $ASGTable{ 'ssAbbr' } ]
eq $cur_element ) {
push( @SecBounds, [ $beg, $i, $cur_element ] );
}
else {
push( @SecBounds, [ $beg, $i - 1, $cur_element ],
[ $i, $i, $self->{ 'ASG' }->{ $chain }->[ $i ]->
[ $ASGTable{ 'ssAbbr' } ] ] );
}
$self->{ 'SecBounds' }->{ $chain } = \@SecBounds;
return $self->{ 'SecBounds' }->{ $chain };
}
=head2 chains
Title : chains
Usage : gives array chain I.D.s (characters)
Function :
Example : @chains = $stride_obj->chains();
Returns : array of characters
Args : none
=cut
sub chains {
my $self = shift;
my @chains = keys ( %{ $self->{ 'ASG' } } );
return \@chains;
}
=head2 getSeq
Title : getSeq
Usage : returns a Bio::PrimarySeq object which represents an
approximation at the sequence of the specified chain.
Function : For most chain of most entries, the sequence returned by
this method will be very good. However, it it inherently
unsafe to rely on STRIDE to extract sequence information about
a PDB entry. More reliable information can be obtained from
the PDB entry itself. If a second option is given
(and evaluates to true), the sequence generated will
have 'X' in spaces where the pdb residue numbers are
discontinuous. In some cases this results in a
better sequence object (when the discontinuity is
due to regions which were present, but could not be
resolved). In other cases, it will result in a WORSE
sequence object (when the discontinuity is due to
historical sequence numbering and all sequence is
actually resolved).
Example : $pso = $dssp_obj->getSeq( 'A' );
Returns : (pointer to) a PrimarySeq object
Args : Chain identifier. If none given, '-' is assumed.
=cut
sub getSeq {
my $self = shift;
my $chain = shift;
my $fill_in = shift;
if ( !( $chain ) ) {
$chain = '-';
}
if ( $self->{ 'Seq' }->{ $chain } ) {
return $self->{ 'Seq' }->{ $chain };
}
my ( $seq,
$num_res,
$last_res_num,
$cur_res_num,
$i,
$step,
$id
);
$seq = "";
$num_res = $self->numResidues( $chain );
$last_res_num = $self->_pdbNum( 1, $chain );
for ( $i = 1; $i <= $num_res; $i++ ) {
if ( $fill_in ) {
$cur_res_num = $self->_pdbNum( $i, $chain );
$step = $cur_res_num - $last_res_num;
if ( $step > 1 ) {
$seq .= 'X' x ( $step - 1 );
}
}
$seq .= $self->_resAA( $i, $chain );
$last_res_num = $cur_res_num;
}
$id = $self->pdbID();
$id .= "$chain";
$self->{ 'Seq' }->{ $chain } = Bio::PrimarySeq->new( -seq => $seq,
-id => $id,
-moltype => 'protein'
);
return $self->{ 'Seq' }->{ $chain };
}
=head1 INTERNAL METHODS
=head2 _pdbNum
Title : _pdbNum
Usage : fetches the numeric portion of the identifier for a given
residue as reported by the pdb entry. Note, this DOES NOT
uniquely specify a residue. There may be an insertion code
and/or chain identifier differences.
Function :
Example : $pdbNum = $self->pdbNum( 3, 'A' );
Returns : a scalar
Args : valid ordinal num / chain combination
=cut
sub _pdbNum {
my $self = shift;
my $ord = shift;
my $chain = shift;
if ( !( $self->{ 'ASG' }->{ $chain }->[ $ord ] ) ) {
$self->throw( "No such ordinal $ord in chain $chain.\n" );
}
my $pdb_junk = $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'resNum' } ];
my $num_part;
( $num_part ) = ( $pdb_junk =~ /(-*\d+).*/ );
return $num_part;
}
=head2 _resAA
Title : _resAA
Usage : returns 1 letter abbr. of the amino acid specified by
the arguments
Function :
Examples : $aa = $stride_obj->_resAA( 3, '-' );
Returns : scalar character
Args : ( ord. num, chain )
=cut
sub _resAA {
my $self = shift;
my $ord = shift;
my $chain = shift;
if ( !( $self->{ 'ASG' }->{ $chain }->[ $ord ] ) ) {
$self->throw( "No such ordinal $ord in chain $chain.\n" );
}
return ( $AATable{$self->{'ASG'}->{$chain}->[$ord]->[$ASGTable{'aa'}]} );
}
=head2 _pdbInsCo
Title : _pdbInsCo
Usage : fetches the Insertion code for this residue.
Function :
Example : $pdb_ins_co = $self->_pdb_ins_co( 15, 'B' );
Returns : a scalar
Args : ordinal number and chain
=cut
sub _pdbInsCo {
my $self = shift;
my $ord = shift;
my $chain = shift;
if ( !( $self->{ 'ASG' }->{ $chain }->[ $ord ] ) ) {
$self->throw( "No such ordinal $ord in chain $chain.\n" );
}
my $pdb_junk = $self->{ 'ASG' }->{ $chain }->[ $ord ]->[ $ASGTable{ 'resNum' } ];
my $letter_part;
( $letter_part ) = ( $pdb_junk =~ /\d+(\D+)/ ); # insertion code can be any
# non-word character(s)
return $letter_part;
}
=head2 _toOrdChain
Title : _toOrdChain
Usage : takes any set of residue identifying parameters and
wrestles them into a two element array: the chain and the ordinal
number of this residue. This two element array can then be
efficiently used as keys in many of the above accessor methods
('#A:B') or ( #, 'A', 'B' )
|| |
|| - Chain ID (blank for single chain)
|--- Insertion code for this residue. Blank for most residues.
|--- Numeric portion of residue ID.
(#)
|
--- Numeric portion of residue ID. If there is only one chain and
it has no ID AND there is no residue with an insertion code at this
number, then this can uniquely specify a residue.
# ('#:C) or ( #, 'C' )
| |
| -Chain ID
---Numeric portion of residue ID.
If a residue is incompletely specified then the first residue that
fits the arguments is returned. For example, if 19 is the argument
and there are three chains, A, B, and C with a residue whose number
is 19, then 19:A will be returned (assuming its listed first).
Function :
Example : my ( $ord, $chain ) = $self->_toOrdChain( @args );
Returns : two element array
Args : valid set of residue identifier(s) ( SEE NOTE ABOVE )
=cut
sub _toOrdChain {
my $self = shift;
my $arg_str;
my ( $key_num, $chain_id, $ins_code, $key, $i );
# check to see how many args are given
if ( $#_ >= 1 ) { # multiple args
$key_num = shift;
if ( $#_ >= 1 ) { # still multiple args => ins. code, too
$ins_code = shift;
$chain_id = shift;
}
else { # just one more arg. => chain_id
$chain_id = shift;
}
}
else { # only single arg. Might be number or string
$arg_str = shift;
if ( $arg_str =~ /:/ ) {
# a chain is specified
( $chain_id ) = ( $arg_str =~ /:(.)/);
$arg_str =~ s/:.//;
}
if ( $arg_str =~ /[A-Z]|[a-z]/ ) {
# an insertion code is specified
( $ins_code ) = ( $arg_str =~ /([A-Z]|[a-z])/ );
$arg_str =~ s/[A-Z]|[a-z]//g;
}
#now, get the number bit-> everything still around
$key_num = $arg_str;
}
$key = "$key_num$ins_code";
if ( !( $chain_id ) || $chain_id eq ' ' ) {
$chain_id = '-';
}
if ( !( $self->{ 'ASG' }->{ $chain_id } ) ) {
$self->throw( "No such chain: $chain_id" );
}
for ( $i = 1; $i <= $#{$self->{ 'ASG' }->{ $chain_id }}; $i++ ) {
if ( $self->{ 'ASG' }->{ $chain_id }->[ $i ]->[ $ASGTable{ 'resNum' } ] eq
$key ) {
return ( $i, $chain_id );
}
}
$self->throw( "No such key: $key" );
}
=head2 _parse
Title : _parse
Usage : as name suggests, parses stride output, creating object
Function :
Example : $self->_parse( $io );
Returns :
Args : valid Bio::Root::IO object
=cut
sub _parse {
my $self = shift;
my $io = shift;
my $file = $io->_fh();
# Parse top lines
if ( $self->_parseTop( $io ) ) {
$self->throw( "Not stride output" );
}
# Parse the HDR, CMP, SCR, and AUT lines
$self->_parseHead( $io );
# Parse the CHN, SEQ, STR, and LOC lines
$self->_parseSummary( $io ); # we're ignoring this
# Parse the ASG lines
$self->_parseASG( $io );
}
=head2 _parseTop
Title : _parseTop
Usage : makes sure this looks like stride output
Function :
Example :
Returns :
Args :
=cut
sub _parseTop {
my $self = shift;
my $io = shift;
my $file = $io->_fh();
my $cur = <$file>;
if ( $cur =~ /^REM ---/ ) {
return 0;
}
return 1;
}
=head2 _parseHead
Title : _parseHead
Usage : parses
Function : HDR, CMP, SRC, and AUT lines
Example :
Returns :
Args :
=cut
sub _parseHead {
my $self = shift;
my $io = shift;
my $file = $io->_fh();
my $cur;
my $element;
my ( @elements, @cmp, @src, @aut );
my %head = {};
my $still_head = 1;
$cur = <$file>;
while ( $cur =~ /^REM / ) {
$cur = <$file>;
}
if ( $cur =~ /^HDR / ) {
@elements = split( /\s+/, $cur );
shift( @elements );
pop( @elements );
$self->{ 'PDB' } = pop( @elements );
$self->{ 'DATE' } = pop( @elements );
# now, everything else is "header" except for the word
# HDR
$element = join( ' ', @elements );
$head{ 'HEADER' } = $element;
}
$cur = <$file>;
while ( $cur =~ /^CMP / ) {
( $cur ) = ( $cur =~ /^CMP\s+(.+?)\s*\w{4}$/ );
push( @cmp, $cur );
$cur = <$file>;
}
while ( $cur =~ /^SRC / ) {
( $cur ) = ( $cur =~ /^SRC\s+(.+?)\s*\w{4}$/ );
push( @src, $cur );
$cur = <$file>;
}
while ( $cur =~ /^AUT / ) {
( $cur ) = ( $cur =~ /^AUT\s+(.+?)\s*\w{4}$/ );
push( @aut, $cur );
$cur = <$file>;
}
$head{ 'CMP' } = \@cmp;
$head{ 'SRC' } = \@src;
$head{ 'AUT' } = \@aut;
$self->{ 'HEAD' } = \%head;
}
=head2 _parseSummary
Title : _parseSummary
Usage : parses LOC lines
Function :
Example :
Returns :
Args :
=cut
sub _parseSummary {
my $self = shift;
my $io = shift;
my $file = $io->_fh();
my $cur = <$file>;
my $bound_set;
my $element;
my ( @elements, @cur );
my @LOC_lookup = ( [ 5, 12 ], # Element name
# reduntdant [ 18, 3 ], # First residue name
[ 22, 5 ], # First residue PDB number
[ 28, 1 ], # First residue Chain ID
# redundant [ 35, 3 ], # Last residue name
[ 40, 5 ], # Last residue PDB number
[ 46, 1 ] ); # Last residue Chain ID
#ignore these lines
while ( $cur =~ /^REM |^STR |^SEQ |^CHN / ) {
$cur = <$file>;
}
while ( $cur =~ /^LOC / ) {
foreach $bound_set ( @LOC_lookup ) {
$element = substr( $cur, $bound_set->[ 0 ], $bound_set->[ 1 ] );
$element =~ s/\s//g;
push( @cur, $element );
}
push( @elements, [ @cur ] );
$cur = <$file>;
@cur = ();
}
$self->{ 'LOC' } = \@elements;
}
=head2 _parseASG
Title : _parseASG
Usage : parses ASG lines
Function :
Example :
Returns :
Args :
=cut
sub _parseASG {
my $self = shift;
my $io = shift;
my $file = $io->_fh();
my $cur = <$file>;
my $bound_set;
my $ord_num;
my ( $chain, $last_chain );
my $element;
my %ASG;
my ( @cur, @elements );
my @ASG_lookup = ( [ 5, 3 ], # Residue name
# [ 9, 1 ], # Chain ID
[ 10, 5 ], # PDB residue number (w/ins.code)
# [ 16, 4 ], # ordinal stride number
[ 24, 1 ], # one letter sec. stru. abbr.
[ 26, 13], # full sec. stru. name
[ 42, 7 ], # phi angle
[ 52, 7 ], # psi angle
[ 64, 5 ] );# residue solv. acc.
while ( $cur =~ /^REM / ) {
$cur = <$file>;
}
while ( $cur =~ /^ASG / ) {
# get ordinal number for array key
$ord_num = substr( $cur, 16, 4 );
$ord_num =~ s/\s//g;
# get the chain id
$chain = substr( $cur, 9, 1 );
if ( $last_chain && ( $chain ne $last_chain ) ) {
$ASG{ $last_chain } = [ @elements ];
@elements = ();
}
# now get the rest of the info on this line
foreach $bound_set ( @ASG_lookup ) {
$element = substr( $cur, $bound_set->[ 0 ],
$bound_set->[ 1 ] );
$element =~ s/\s//g;
push( @cur, $element );
}
$elements[ $ord_num ] = [ @cur ];
$cur = <$file>;
@cur = ();
$last_chain = $chain;
}
$ASG{ $chain } = [ @elements ];
$self->{ 'ASG' } = \%ASG;
}
1;
| 25.455516 | 93 | 0.551447 |
73eef3caedad99d7261c7697999ae5a416164a78 | 2,462 | t | Perl | t/88async-multi-stmts.t | janchochol/DBD-MariaDB | 54164507cc99408e0ba91f47b676df76f7137b90 | [
"Artistic-1.0"
]
| null | null | null | t/88async-multi-stmts.t | janchochol/DBD-MariaDB | 54164507cc99408e0ba91f47b676df76f7137b90 | [
"Artistic-1.0"
]
| null | null | null | t/88async-multi-stmts.t | janchochol/DBD-MariaDB | 54164507cc99408e0ba91f47b676df76f7137b90 | [
"Artistic-1.0"
]
| null | null | null | use strict;
use warnings;
use Test::More;
use DBI;
use vars qw($test_dsn $test_user $test_password);
use lib 't', '.';
require 'lib.pl';
my $dbh = DbiTestConnect($test_dsn, $test_user, $test_password,
{ RaiseError => 0, PrintError => 0, AutoCommit => 0, mariadb_multi_statements => 1 });
plan tests => 45;
$dbh->do(<<SQL);
CREATE TEMPORARY TABLE async_test (
value INTEGER AUTO_INCREMENT PRIMARY KEY
);
SQL
my $sth0 = $dbh->prepare('INSERT INTO async_test VALUES(1)', { mariadb_async => 1 });
my $sth1 = $dbh->prepare('INSERT INTO async_test VALUES(2)', { mariadb_async => 1 });
my $sth2 = $dbh->prepare('INSERT INTO async_test VALUES(3); INSERT INTO async_test VALUES(4);', { mariadb_async => 1 });
$sth0->execute;
ok !defined($sth1->mariadb_async_ready);
ok $sth1->errstr;
ok !defined($sth1->mariadb_async_result);
ok $sth1->errstr;
ok defined($sth0->mariadb_async_ready);
ok !$sth1->errstr;
ok defined($sth0->mariadb_async_result);
ok !$sth1->errstr;
is($sth0->last_insert_id(), 1);
is($dbh->last_insert_id(undef, undef, undef, undef), 1);
$sth2->execute;
ok !defined($sth1->mariadb_async_ready);
ok $sth1->err;
ok !defined($sth1->mariadb_async_result);
ok $sth1->err;
is($sth0->last_insert_id(), 1);
is($dbh->last_insert_id(undef, undef, undef, undef), 1);
ok defined($sth2->mariadb_async_ready);
ok !$sth2->err;
is($sth0->last_insert_id(), 1);
is($dbh->last_insert_id(undef, undef, undef, undef), 1);
ok defined($sth2->mariadb_async_result);
ok !$sth2->err;
is($sth0->last_insert_id(), 1);
is($sth2->last_insert_id(), 3);
is($dbh->last_insert_id(undef, undef, undef, undef), 3);
ok $sth2->more_results;
ok defined($sth2->mariadb_async_result);
ok !$sth2->err;
is($sth0->last_insert_id(), 1);
is($sth2->last_insert_id(), 4);
is($dbh->last_insert_id(undef, undef, undef, undef), 4);
ok !$sth2->more_results;
$dbh->do('INSERT INTO async_test VALUES(5)', { mariadb_async => 1 });
is($sth0->last_insert_id(), 1);
is($sth2->last_insert_id(), 4);
is($dbh->last_insert_id(undef, undef, undef, undef), 4);
ok defined($dbh->mariadb_async_ready);
ok !$dbh->err;
is($sth0->last_insert_id(), 1);
is($sth2->last_insert_id(), 4);
is($dbh->last_insert_id(undef, undef, undef, undef), 4);
ok defined($dbh->mariadb_async_result);
ok !$sth2->err;
is($sth0->last_insert_id(), 1);
is($sth2->last_insert_id(), 4);
is($dbh->last_insert_id(undef, undef, undef, undef), 5);
undef $sth0;
undef $sth1;
undef $sth2;
$dbh->disconnect;
| 25.645833 | 120 | 0.68684 |
ed16edf572d1ad44433372cb9ab11b3d18a6db8c | 1,234 | t | Perl | t/basic.t | zostay/perl6-Email-Address | 1ca60e39889b4043451d77e662d9229d5c1ea702 | [
"Artistic-2.0"
]
| 1 | 2021-05-14T15:57:54.000Z | 2021-05-14T15:57:54.000Z | t/basic.t | zostay/perl6-Email-Address | 1ca60e39889b4043451d77e662d9229d5c1ea702 | [
"Artistic-2.0"
]
| null | null | null | t/basic.t | zostay/perl6-Email-Address | 1ca60e39889b4043451d77e662d9229d5c1ea702 | [
"Artistic-2.0"
]
| null | null | null | #!/usr/bin/env perl6
use v6;
use Test;
use Email::Address;
my $str = q[Brotherhood: "Winston Smith" <winston.smith@recdep.minitrue> (Records Department), Julia <julia@ficdep.minitrue>;, user <user@oceania>];
my @email = Email::Address.parse($str, :addresses);
is @email.elems, 2;
does-ok @email[0], Email::Address::Group;
is @email[0].display-name, 'Brotherhood';
is @email[0].mailbox-list.elems, 2;
does-ok @email[0].mailbox-list[0], Email::Address::Mailbox;
is @email[0].mailbox-list[0].display-name, 'Winston Smith';
is @email[0].mailbox-list[0].address.local-part, 'winston.smith';
is @email[0].mailbox-list[0].address.domain, 'recdep.minitrue';
is @email[0].mailbox-list[0].comment, 'Records Department';
does-ok @email[0].mailbox-list[1], Email::Address::Mailbox;
is @email[0].mailbox-list[1].display-name, 'Julia';
is @email[0].mailbox-list[1].address.local-part, 'julia';
is @email[0].mailbox-list[1].address.domain, 'ficdep.minitrue';
is @email[0].mailbox-list[1].comment, Str;
does-ok @email[1], Email::Address::Mailbox;
is @email[1].display-name, 'user';
is @email[1].address.local-part, 'user';
is @email[1].address.domain, 'oceania';
is @email[1].comment, Str;
is Email::Address.format(@email), $str;
done-testing;
| 36.294118 | 148 | 0.708266 |
ed18c6a93f6ccd9e1412aedb654ecbbd1aac3b04 | 1,351 | pm | Perl | t/response/TestModperl/local_env.pm | randolf/mod_perl | 561f0672e8da450158b34fdac45a949845f0dc0d | [
"Apache-2.0"
]
| 23 | 2015-03-17T11:43:34.000Z | 2022-02-19T13:39:01.000Z | t/response/TestModperl/local_env.pm | randolf/mod_perl | 561f0672e8da450158b34fdac45a949845f0dc0d | [
"Apache-2.0"
]
| 3 | 2019-11-21T03:45:00.000Z | 2021-12-27T23:44:28.000Z | t/response/TestModperl/local_env.pm | randolf/mod_perl | 561f0672e8da450158b34fdac45a949845f0dc0d | [
"Apache-2.0"
]
| 19 | 2015-02-18T21:23:25.000Z | 2022-02-13T16:27:14.000Z | # please insert nothing before this line: -*- mode: cperl; cperl-indent-level: 4; cperl-continued-statement-offset: 4; indent-tabs-mode: nil -*-
package TestModperl::local_env;
use strict;
use warnings FATAL => 'all';
use Apache2::RequestRec ();
use Apache2::RequestIO ();
use Apache2::RequestUtil ();
use Apache::Test;
use Apache::TestUtil;
use Apache2::Const -compile => 'OK';
# local %ENV used to cause segfaults
# Report: http://thread.gmane.org/gmane.comp.apache.mod-perl/22236
# Fixed in: http://svn.apache.org/viewcvs.cgi?rev=357236&view=rev
sub handler {
my $r = shift;
plan $r, tests => 6;
my %copy_ENV = %ENV; ## this is not a deep copy;
ok t_cmp($ENV{MOD_PERL_API_VERSION}, 2,
"\$ENV{MOD_PERL_API_VERSION} is 2 before local \%ENV");
{
local %ENV;
ok t_cmp($ENV{MOD_PERL_API_VERSION}, undef,
"\$ENV{MOD_PERL_API_VERSION} is undef after local \%ENV");
ok t_cmp(scalar keys %ENV, 0,
"\%ENV has 0 keys after local");
$ENV{LOCAL} = 1;
ok t_cmp($ENV{LOCAL}, 1,
"can set value after local, but still in block");
}
ok t_cmp($ENV{LOCAL}, undef,
"valuee set in local {} block is gone after leaving scope");
ok t_cmp(\%copy_ENV, \%ENV, "\%ENV was restored correctly");
Apache2::Const::OK;
}
1;
__END__
SetHandler perl-script
| 24.563636 | 144 | 0.643967 |
73fd5c5afbf8533e2461c502e09c53b2f4d9b87a | 7 | al | Perl | paip/jim/attolisp/tsym.al | CompSciCabal/SMRTYPRTY | a8e2c5049199635fecce7b7f70a2225cda6558d8 | [
"Unlicense"
]
| 60 | 2015-02-04T13:02:10.000Z | 2021-11-16T12:54:44.000Z | paip/jim/attolisp/tsym.al | CompSciCabal/SMRTYPRTY | a8e2c5049199635fecce7b7f70a2225cda6558d8 | [
"Unlicense"
]
| 80 | 2015-02-20T07:23:41.000Z | 2018-03-13T02:30:42.000Z | paip/jim/attolisp/tsym.al | CompSciCabal/SMRTYPRTY | a8e2c5049199635fecce7b7f70a2225cda6558d8 | [
"Unlicense"
]
| 11 | 2015-02-20T04:48:06.000Z | 2019-11-17T03:19:36.000Z | '(ABC)
| 3.5 | 6 | 0.428571 |
73fc168577a11515095d0c9da6ecf4ed6b62b841 | 1,980 | pm | Perl | auto-lib/Paws/ELBv2/Limit.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/ELBv2/Limit.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/ELBv2/Limit.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
]
| null | null | null | # Generated by default/object.tt
package Paws::ELBv2::Limit;
use Moose;
has Max => (is => 'ro', isa => 'Str');
has Name => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ELBv2::Limit
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ELBv2::Limit object:
$service_obj->Method(Att1 => { Max => $value, ..., Name => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ELBv2::Limit object:
$result = $service_obj->Method(...);
$result->Att1->Max
=head1 DESCRIPTION
Information about an Elastic Load Balancing resource limit for your AWS
account.
=head1 ATTRIBUTES
=head2 Max => Str
The maximum value of the limit.
=head2 Name => Str
The name of the limit. The possible values are:
=over
=item *
application-load-balancers
=item *
listeners-per-application-load-balancer
=item *
listeners-per-network-load-balancer
=item *
network-load-balancers
=item *
rules-per-application-load-balancer
=item *
target-groups
=item *
target-groups-per-action-on-application-load-balancer
=item *
target-groups-per-action-on-network-load-balancer
=item *
target-groups-per-application-load-balancer
=item *
targets-per-application-load-balancer
=item *
targets-per-availability-zone-per-network-load-balancer
=item *
targets-per-network-load-balancer
=back
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ELBv2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 16.638655 | 102 | 0.724747 |
ed35c3b398007711e63b11e8097b64bd03e95d20 | 1,282 | t | Perl | t/insert_after.t | cafe01/html-jquery | 80994af8970e0d7f32ae117e470066b054687efa | [
"Artistic-1.0"
]
| 1 | 2018-04-04T15:08:04.000Z | 2018-04-04T15:08:04.000Z | t/insert_after.t | cafe01/html-jquery | 80994af8970e0d7f32ae117e470066b054687efa | [
"Artistic-1.0"
]
| 1 | 2016-07-09T09:43:40.000Z | 2016-07-11T23:48:00.000Z | t/insert_after.t | cafe01/html-jquery | 80994af8970e0d7f32ae117e470066b054687efa | [
"Artistic-1.0"
]
| null | null | null | #!/usr/bin/env perl
use strict;
use warnings;
use lib 'lib';
use Test::More;
use XML::LibXML::jQuery;
sub test (&@);
my $html = '<div class="container"><h2>Greetings</h2><div class="inner">Hello</div><div class="inner">Goodbye</div></div>';
test { $_->new('<p/>')->insert_after($_->find('h2, .inner')->{nodes}) } 'insert_after(arrayref)';
test { $_->new('<p/>')->insert_after($_->find('.inner')->add('h2')) } 'insert_after(jQuery)';
test { $_->new('<p/>')->insert_after('h2, .inner') } 'insert_after(selector)';
test { $_->find('h2')->insert_after('.inner:last-child') }
'insert_after(selector) (move)',
'<div class="container"><div class="inner">Hello</div><div class="inner">Goodbye</div><h2>Greetings</h2></div>';
test { $_->find('h2')->insert_after($_->find('.inner:last-child')->get(0)) }
'insert_after(element)',
'<div class="container"><div class="inner">Hello</div><div class="inner">Goodbye</div><h2>Greetings</h2></div>';
done_testing;
sub test (&@) {
my ($cb, $name, $expected) = @_;
$expected ||= '<div class="container"><h2>Greetings</h2><p></p><div class="inner">Hello</div><p></p><div class="inner">Goodbye</div><p></p></div>';
my $j = j($html);
local $_ = $j;
$cb->($j);
is $j->as_html, $expected, $name;
}
| 30.52381 | 151 | 0.599064 |
ed2431aff071e484340e30fc9aa5c50a9b59cae3 | 1,567 | pm | Perl | apps/monitoring/ntopng/restapi/plugin.pm | ponchoh/centreon-plugins | 51316a40af192eb29188814e9c7b7c0776aee493 | [
"Apache-2.0"
]
| null | null | null | apps/monitoring/ntopng/restapi/plugin.pm | ponchoh/centreon-plugins | 51316a40af192eb29188814e9c7b7c0776aee493 | [
"Apache-2.0"
]
| null | null | null | apps/monitoring/ntopng/restapi/plugin.pm | ponchoh/centreon-plugins | 51316a40af192eb29188814e9c7b7c0776aee493 | [
"Apache-2.0"
]
| null | null | null | #
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::monitoring::ntopng::restapi::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_custom);
sub new {
my ( $class, %options ) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{modes} = {
'alerts' => 'apps::monitoring::ntopng::restapi::mode::alerts',
'host-flows' => 'apps::monitoring::ntopng::restapi::mode::hostflows',
'probe-health' => 'apps::monitoring::ntopng::restapi::mode::probehealth',
'netflow-health' => 'apps::monitoring::ntopng::restapi::mode::netflowhealth'
};
$self->{custom_modes}->{api} = 'apps::monitoring::ntopng::restapi::custom::api';
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check NtopNG using Rest API.
=cut
| 30.134615 | 84 | 0.694959 |
ed0ab03b2dafc431da9b087583ac966fe58442b6 | 15,756 | pm | Perl | src/contrib/thriftfs/gen-perl/Types.pm | moyue/hadoop | 947640284fd9b7cfa967b82fa08f755416f42db5 | [
"Apache-2.0"
]
| 194 | 2015-01-07T11:12:52.000Z | 2022-03-14T09:19:24.000Z | src/contrib/thriftfs/gen-perl/Types.pm | moyue/hadoop | 947640284fd9b7cfa967b82fa08f755416f42db5 | [
"Apache-2.0"
]
| 10 | 2019-11-13T06:03:05.000Z | 2021-08-02T17:05:22.000Z | src/contrib/thriftfs/gen-perl/Types.pm | moyue/hadoop | 947640284fd9b7cfa967b82fa08f755416f42db5 | [
"Apache-2.0"
]
| 172 | 2015-01-14T19:25:48.000Z | 2022-02-24T02:42:02.000Z | #
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
require 5.6.0;
use strict;
use warnings;
use Thrift;
package ThriftHandle;
use base('Class::Accessor');
ThriftHandle->mk_accessors( qw( id ) );
sub new {
my $classname = shift;
my $self = {};
my $vals = shift || {};
$self->{id} = undef;
if (UNIVERSAL::isa($vals,'HASH')) {
if (defined $vals->{id}) {
$self->{id} = $vals->{id};
}
}
return bless($self,$classname);
}
sub getName {
return 'ThriftHandle';
}
sub read {
my $self = shift;
my $input = shift;
my $xfer = 0;
my $fname;
my $ftype = 0;
my $fid = 0;
$xfer += $input->readStructBegin(\$fname);
while (1)
{
$xfer += $input->readFieldBegin(\$fname, \$ftype, \$fid);
if ($ftype == TType::STOP) {
last;
}
SWITCH: for($fid)
{
/^-1$/ && do{ if ($ftype == TType::I64) {
$xfer += $input->readI64(\$self->{id});
} else {
$xfer += $input->skip($ftype);
}
last; };
$xfer += $input->skip($ftype);
}
$xfer += $input->readFieldEnd();
}
$xfer += $input->readStructEnd();
return $xfer;
}
sub write {
my $self = shift;
my $output = shift;
my $xfer = 0;
$xfer += $output->writeStructBegin('ThriftHandle');
if (defined $self->{id}) {
$xfer += $output->writeFieldBegin('id', TType::I64, -1);
$xfer += $output->writeI64($self->{id});
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
}
package Pathname;
use base('Class::Accessor');
Pathname->mk_accessors( qw( pathname ) );
sub new {
my $classname = shift;
my $self = {};
my $vals = shift || {};
$self->{pathname} = undef;
if (UNIVERSAL::isa($vals,'HASH')) {
if (defined $vals->{pathname}) {
$self->{pathname} = $vals->{pathname};
}
}
return bless($self,$classname);
}
sub getName {
return 'Pathname';
}
sub read {
my $self = shift;
my $input = shift;
my $xfer = 0;
my $fname;
my $ftype = 0;
my $fid = 0;
$xfer += $input->readStructBegin(\$fname);
while (1)
{
$xfer += $input->readFieldBegin(\$fname, \$ftype, \$fid);
if ($ftype == TType::STOP) {
last;
}
SWITCH: for($fid)
{
/^-1$/ && do{ if ($ftype == TType::STRING) {
$xfer += $input->readString(\$self->{pathname});
} else {
$xfer += $input->skip($ftype);
}
last; };
$xfer += $input->skip($ftype);
}
$xfer += $input->readFieldEnd();
}
$xfer += $input->readStructEnd();
return $xfer;
}
sub write {
my $self = shift;
my $output = shift;
my $xfer = 0;
$xfer += $output->writeStructBegin('Pathname');
if (defined $self->{pathname}) {
$xfer += $output->writeFieldBegin('pathname', TType::STRING, -1);
$xfer += $output->writeString($self->{pathname});
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
}
package FileStatus;
use base('Class::Accessor');
FileStatus->mk_accessors( qw( path length isdir block_replication blocksize modification_time permission owner group ) );
sub new {
my $classname = shift;
my $self = {};
my $vals = shift || {};
$self->{path} = undef;
$self->{length} = undef;
$self->{isdir} = undef;
$self->{block_replication} = undef;
$self->{blocksize} = undef;
$self->{modification_time} = undef;
$self->{permission} = undef;
$self->{owner} = undef;
$self->{group} = undef;
if (UNIVERSAL::isa($vals,'HASH')) {
if (defined $vals->{path}) {
$self->{path} = $vals->{path};
}
if (defined $vals->{length}) {
$self->{length} = $vals->{length};
}
if (defined $vals->{isdir}) {
$self->{isdir} = $vals->{isdir};
}
if (defined $vals->{block_replication}) {
$self->{block_replication} = $vals->{block_replication};
}
if (defined $vals->{blocksize}) {
$self->{blocksize} = $vals->{blocksize};
}
if (defined $vals->{modification_time}) {
$self->{modification_time} = $vals->{modification_time};
}
if (defined $vals->{permission}) {
$self->{permission} = $vals->{permission};
}
if (defined $vals->{owner}) {
$self->{owner} = $vals->{owner};
}
if (defined $vals->{group}) {
$self->{group} = $vals->{group};
}
}
return bless($self,$classname);
}
sub getName {
return 'FileStatus';
}
sub read {
my $self = shift;
my $input = shift;
my $xfer = 0;
my $fname;
my $ftype = 0;
my $fid = 0;
$xfer += $input->readStructBegin(\$fname);
while (1)
{
$xfer += $input->readFieldBegin(\$fname, \$ftype, \$fid);
if ($ftype == TType::STOP) {
last;
}
SWITCH: for($fid)
{
/^1$/ && do{ if ($ftype == TType::STRING) {
$xfer += $input->readString(\$self->{path});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^2$/ && do{ if ($ftype == TType::I64) {
$xfer += $input->readI64(\$self->{length});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^3$/ && do{ if ($ftype == TType::BOOL) {
$xfer += $input->readBool(\$self->{isdir});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^4$/ && do{ if ($ftype == TType::I16) {
$xfer += $input->readI16(\$self->{block_replication});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^5$/ && do{ if ($ftype == TType::I64) {
$xfer += $input->readI64(\$self->{blocksize});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^6$/ && do{ if ($ftype == TType::I64) {
$xfer += $input->readI64(\$self->{modification_time});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^7$/ && do{ if ($ftype == TType::STRING) {
$xfer += $input->readString(\$self->{permission});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^8$/ && do{ if ($ftype == TType::STRING) {
$xfer += $input->readString(\$self->{owner});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^9$/ && do{ if ($ftype == TType::STRING) {
$xfer += $input->readString(\$self->{group});
} else {
$xfer += $input->skip($ftype);
}
last; };
$xfer += $input->skip($ftype);
}
$xfer += $input->readFieldEnd();
}
$xfer += $input->readStructEnd();
return $xfer;
}
sub write {
my $self = shift;
my $output = shift;
my $xfer = 0;
$xfer += $output->writeStructBegin('FileStatus');
if (defined $self->{path}) {
$xfer += $output->writeFieldBegin('path', TType::STRING, 1);
$xfer += $output->writeString($self->{path});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{length}) {
$xfer += $output->writeFieldBegin('length', TType::I64, 2);
$xfer += $output->writeI64($self->{length});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{isdir}) {
$xfer += $output->writeFieldBegin('isdir', TType::BOOL, 3);
$xfer += $output->writeBool($self->{isdir});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{block_replication}) {
$xfer += $output->writeFieldBegin('block_replication', TType::I16, 4);
$xfer += $output->writeI16($self->{block_replication});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{blocksize}) {
$xfer += $output->writeFieldBegin('blocksize', TType::I64, 5);
$xfer += $output->writeI64($self->{blocksize});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{modification_time}) {
$xfer += $output->writeFieldBegin('modification_time', TType::I64, 6);
$xfer += $output->writeI64($self->{modification_time});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{permission}) {
$xfer += $output->writeFieldBegin('permission', TType::STRING, 7);
$xfer += $output->writeString($self->{permission});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{owner}) {
$xfer += $output->writeFieldBegin('owner', TType::STRING, 8);
$xfer += $output->writeString($self->{owner});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{group}) {
$xfer += $output->writeFieldBegin('group', TType::STRING, 9);
$xfer += $output->writeString($self->{group});
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
}
package BlockLocation;
use base('Class::Accessor');
BlockLocation->mk_accessors( qw( hosts names offset length ) );
sub new {
my $classname = shift;
my $self = {};
my $vals = shift || {};
$self->{hosts} = undef;
$self->{names} = undef;
$self->{offset} = undef;
$self->{length} = undef;
if (UNIVERSAL::isa($vals,'HASH')) {
if (defined $vals->{hosts}) {
$self->{hosts} = $vals->{hosts};
}
if (defined $vals->{names}) {
$self->{names} = $vals->{names};
}
if (defined $vals->{offset}) {
$self->{offset} = $vals->{offset};
}
if (defined $vals->{length}) {
$self->{length} = $vals->{length};
}
}
return bless($self,$classname);
}
sub getName {
return 'BlockLocation';
}
sub read {
my $self = shift;
my $input = shift;
my $xfer = 0;
my $fname;
my $ftype = 0;
my $fid = 0;
$xfer += $input->readStructBegin(\$fname);
while (1)
{
$xfer += $input->readFieldBegin(\$fname, \$ftype, \$fid);
if ($ftype == TType::STOP) {
last;
}
SWITCH: for($fid)
{
/^1$/ && do{ if ($ftype == TType::LIST) {
{
my $_size0 = 0;
$self->{hosts} = [];
my $_etype3 = 0;
$xfer += $input->readListBegin(\$_etype3, \$_size0);
for (my $_i4 = 0; $_i4 < $_size0; ++$_i4)
{
my $elem5 = undef;
$xfer += $input->readString(\$elem5);
push(@{$self->{hosts}},$elem5);
}
$xfer += $input->readListEnd();
}
} else {
$xfer += $input->skip($ftype);
}
last; };
/^2$/ && do{ if ($ftype == TType::LIST) {
{
my $_size6 = 0;
$self->{names} = [];
my $_etype9 = 0;
$xfer += $input->readListBegin(\$_etype9, \$_size6);
for (my $_i10 = 0; $_i10 < $_size6; ++$_i10)
{
my $elem11 = undef;
$xfer += $input->readString(\$elem11);
push(@{$self->{names}},$elem11);
}
$xfer += $input->readListEnd();
}
} else {
$xfer += $input->skip($ftype);
}
last; };
/^3$/ && do{ if ($ftype == TType::I64) {
$xfer += $input->readI64(\$self->{offset});
} else {
$xfer += $input->skip($ftype);
}
last; };
/^4$/ && do{ if ($ftype == TType::I64) {
$xfer += $input->readI64(\$self->{length});
} else {
$xfer += $input->skip($ftype);
}
last; };
$xfer += $input->skip($ftype);
}
$xfer += $input->readFieldEnd();
}
$xfer += $input->readStructEnd();
return $xfer;
}
sub write {
my $self = shift;
my $output = shift;
my $xfer = 0;
$xfer += $output->writeStructBegin('BlockLocation');
if (defined $self->{hosts}) {
$xfer += $output->writeFieldBegin('hosts', TType::LIST, 1);
{
$output->writeListBegin(TType::STRING, scalar(@{$self->{hosts}}));
{
foreach my $iter12 (@{$self->{hosts}})
{
$xfer += $output->writeString($iter12);
}
}
$output->writeListEnd();
}
$xfer += $output->writeFieldEnd();
}
if (defined $self->{names}) {
$xfer += $output->writeFieldBegin('names', TType::LIST, 2);
{
$output->writeListBegin(TType::STRING, scalar(@{$self->{names}}));
{
foreach my $iter13 (@{$self->{names}})
{
$xfer += $output->writeString($iter13);
}
}
$output->writeListEnd();
}
$xfer += $output->writeFieldEnd();
}
if (defined $self->{offset}) {
$xfer += $output->writeFieldBegin('offset', TType::I64, 3);
$xfer += $output->writeI64($self->{offset});
$xfer += $output->writeFieldEnd();
}
if (defined $self->{length}) {
$xfer += $output->writeFieldBegin('length', TType::I64, 4);
$xfer += $output->writeI64($self->{length});
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
}
package MalformedInputException;
use base('Thrift::TException');
use base('Class::Accessor');
MalformedInputException->mk_accessors( qw( message ) );
sub new {
my $classname = shift;
my $self = {};
my $vals = shift || {};
$self->{message} = undef;
if (UNIVERSAL::isa($vals,'HASH')) {
if (defined $vals->{message}) {
$self->{message} = $vals->{message};
}
}
return bless($self,$classname);
}
sub getName {
return 'MalformedInputException';
}
sub read {
my $self = shift;
my $input = shift;
my $xfer = 0;
my $fname;
my $ftype = 0;
my $fid = 0;
$xfer += $input->readStructBegin(\$fname);
while (1)
{
$xfer += $input->readFieldBegin(\$fname, \$ftype, \$fid);
if ($ftype == TType::STOP) {
last;
}
SWITCH: for($fid)
{
/^-1$/ && do{ if ($ftype == TType::STRING) {
$xfer += $input->readString(\$self->{message});
} else {
$xfer += $input->skip($ftype);
}
last; };
$xfer += $input->skip($ftype);
}
$xfer += $input->readFieldEnd();
}
$xfer += $input->readStructEnd();
return $xfer;
}
sub write {
my $self = shift;
my $output = shift;
my $xfer = 0;
$xfer += $output->writeStructBegin('MalformedInputException');
if (defined $self->{message}) {
$xfer += $output->writeFieldBegin('message', TType::STRING, -1);
$xfer += $output->writeString($self->{message});
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
}
package ThriftIOException;
use base('Thrift::TException');
use base('Class::Accessor');
ThriftIOException->mk_accessors( qw( message ) );
sub new {
my $classname = shift;
my $self = {};
my $vals = shift || {};
$self->{message} = undef;
if (UNIVERSAL::isa($vals,'HASH')) {
if (defined $vals->{message}) {
$self->{message} = $vals->{message};
}
}
return bless($self,$classname);
}
sub getName {
return 'ThriftIOException';
}
sub read {
my $self = shift;
my $input = shift;
my $xfer = 0;
my $fname;
my $ftype = 0;
my $fid = 0;
$xfer += $input->readStructBegin(\$fname);
while (1)
{
$xfer += $input->readFieldBegin(\$fname, \$ftype, \$fid);
if ($ftype == TType::STOP) {
last;
}
SWITCH: for($fid)
{
/^-1$/ && do{ if ($ftype == TType::STRING) {
$xfer += $input->readString(\$self->{message});
} else {
$xfer += $input->skip($ftype);
}
last; };
$xfer += $input->skip($ftype);
}
$xfer += $input->readFieldEnd();
}
$xfer += $input->readStructEnd();
return $xfer;
}
sub write {
my $self = shift;
my $output = shift;
my $xfer = 0;
$xfer += $output->writeStructBegin('ThriftIOException');
if (defined $self->{message}) {
$xfer += $output->writeFieldBegin('message', TType::STRING, -1);
$xfer += $output->writeString($self->{message});
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
}
1;
| 25.787234 | 121 | 0.526149 |
73de1a7b65bfb61b39e4e6ca9c130d9c43a87508 | 260 | pl | Perl | libtap/tests/fail/test.pl | simark/libtap-prev | 934d6a0dac027c00ba413446b79a9261085025cf | [
"BSD-2-Clause"
]
| null | null | null | libtap/tests/fail/test.pl | simark/libtap-prev | 934d6a0dac027c00ba413446b79a9261085025cf | [
"BSD-2-Clause"
]
| 3 | 2019-08-01T23:40:11.000Z | 2021-08-12T14:51:11.000Z | libtap/tests/fail/test.pl | simark/libtap-prev | 934d6a0dac027c00ba413446b79a9261085025cf | [
"BSD-2-Clause"
]
| 4 | 2019-09-01T01:34:46.000Z | 2020-12-15T14:24:22.000Z | #!/usr/bin/perl
use warnings;
use strict;
use Test::More;
my $rc = 0;
$rc = plan tests => 2;
diag("Returned: " . sprintf('%d', $rc));
$rc = fail('test to fail');
diag("Returned: $rc");
$rc = fail('test to fail with extra string');
diag("Returned: $rc");
| 14.444444 | 45 | 0.596154 |
ed0e0700e677b7c6d82a39fda5705e0022e2b347 | 516 | pl | Perl | egs/babel/s5b/g2p/g2p_onc2syl.pl | Shuang777/kaldi-2016 | 5373fe4bd80857b53134db566cad48b8445cf3b9 | [
"Apache-2.0"
]
| null | null | null | egs/babel/s5b/g2p/g2p_onc2syl.pl | Shuang777/kaldi-2016 | 5373fe4bd80857b53134db566cad48b8445cf3b9 | [
"Apache-2.0"
]
| null | null | null | egs/babel/s5b/g2p/g2p_onc2syl.pl | Shuang777/kaldi-2016 | 5373fe4bd80857b53134db566cad48b8445cf3b9 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/perl
$DELIMITER="\t ";
while($line = <>) {
chomp $line;
($word, $pron) = split(/\t/, $line);
$state=0;
@phones = split(/\s/, $pron);
print "$word\t";
foreach $p (@phones) {
if ($p=~s/\/O$//) {
if ($state==1) {
print "\t";
}
$state=0;
$p=~s/\/O$//;
} elsif ($p=~/\/N$/) {
if ($state==1) {
print "\t";
}
$state=1;
$p=~s/\/N$//;
} else {
$state=1;
$p=~s/\/C$//;
}
print " $p";
}
print "\n";
}
| 15.636364 | 38 | 0.358527 |
ed3bdf168c8992250692c99467a3fe0c29553eee | 4,088 | plx | Perl | test/TestExec-regression-test/plans/Increment-test.plx | morxa/plexil-4 | 890e92aa259881dd944d573d6ec519341782a5f2 | [
"BSD-3-Clause"
]
| 1 | 2020-02-27T03:35:50.000Z | 2020-02-27T03:35:50.000Z | test/TestExec-regression-test/plans/Increment-test.plx | morxa/plexil-4 | 890e92aa259881dd944d573d6ec519341782a5f2 | [
"BSD-3-Clause"
]
| null | null | null | test/TestExec-regression-test/plans/Increment-test.plx | morxa/plexil-4 | 890e92aa259881dd944d573d6ec519341782a5f2 | [
"BSD-3-Clause"
]
| null | null | null | <?xml version="1.0" encoding="UTF-8"?><PlexilPlan xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:tr="extended-plexil-translator" FileName="Increment-test.ple"><GlobalDeclarations LineNo="2" ColNo="0"><LibraryNodeDeclaration LineNo="2" ColNo="0"><Name>Increment</Name><Interface LineNo="2" ColNo="25"><In><DeclareVariable LineNo="1" ColNo="25"><Name>x</Name><Type>Integer</Type></DeclareVariable></In><InOut><DeclareVariable LineNo="1" ColNo="39"><Name>result</Name><Type>Integer</Type></DeclareVariable></InOut></Interface></LibraryNodeDeclaration><CommandDeclaration LineNo="3" ColNo="0"><Name>pprint</Name><AnyParameters/></CommandDeclaration></GlobalDeclarations><Node NodeType="NodeList" epx="Sequence" LineNo="7" ColNo="2"><NodeId>LibraryCallTest</NodeId><VariableDeclarations><DeclareVariable LineNo="6" ColNo="2"><Name>result</Name><Type>Integer</Type></DeclareVariable></VariableDeclarations><InvariantCondition><NOT><OR><AND><EQInternal><NodeOutcomeVariable><NodeRef dir="child">LibraryCall__0</NodeRef></NodeOutcomeVariable><NodeOutcomeValue>FAILURE</NodeOutcomeValue></EQInternal><EQInternal><NodeStateVariable><NodeRef dir="child">LibraryCall__0</NodeRef></NodeStateVariable><NodeStateValue>FINISHED</NodeStateValue></EQInternal></AND><AND><EQInternal><NodeOutcomeVariable><NodeRef dir="child">COMMAND__1</NodeRef></NodeOutcomeVariable><NodeOutcomeValue>FAILURE</NodeOutcomeValue></EQInternal><EQInternal><NodeStateVariable><NodeRef dir="child">COMMAND__1</NodeRef></NodeStateVariable><NodeStateValue>FINISHED</NodeStateValue></EQInternal></AND><AND><EQInternal><NodeOutcomeVariable><NodeRef dir="child">LibraryCall__2</NodeRef></NodeOutcomeVariable><NodeOutcomeValue>FAILURE</NodeOutcomeValue></EQInternal><EQInternal><NodeStateVariable><NodeRef dir="child">LibraryCall__2</NodeRef></NodeStateVariable><NodeStateValue>FINISHED</NodeStateValue></EQInternal></AND><AND><EQInternal><NodeOutcomeVariable><NodeRef dir="child">COMMAND__3</NodeRef></NodeOutcomeVariable><NodeOutcomeValue>FAILURE</NodeOutcomeValue></EQInternal><EQInternal><NodeStateVariable><NodeRef dir="child">COMMAND__3</NodeRef></NodeStateVariable><NodeStateValue>FINISHED</NodeStateValue></EQInternal></AND></OR></NOT></InvariantCondition><NodeBody><NodeList><Node NodeType="LibraryNodeCall"><NodeId>LibraryCall__0</NodeId><NodeBody><LibraryNodeCall><NodeId>Increment</NodeId><Alias><NodeParameter>x</NodeParameter><IntegerValue>1</IntegerValue></Alias><Alias><NodeParameter>result</NodeParameter><IntegerVariable>result</IntegerVariable></Alias></LibraryNodeCall></NodeBody></Node><Node NodeType="Command" LineNo="8" ColNo="2"><NodeId>COMMAND__1</NodeId><StartCondition><EQInternal><NodeStateVariable><NodeRef dir="sibling">LibraryCall__0</NodeRef></NodeStateVariable><NodeStateValue>FINISHED</NodeStateValue></EQInternal></StartCondition><NodeBody><Command><Name><StringValue>pprint</StringValue></Name><Arguments LineNo="9" ColNo="10"><StringValue>Increment(1) =</StringValue><IntegerVariable>result</IntegerVariable></Arguments></Command></NodeBody></Node><Node NodeType="LibraryNodeCall"><NodeId>LibraryCall__2</NodeId><StartCondition><EQInternal><NodeStateVariable><NodeRef dir="sibling">COMMAND__1</NodeRef></NodeStateVariable><NodeStateValue>FINISHED</NodeStateValue></EQInternal></StartCondition><NodeBody><LibraryNodeCall><NodeId>Increment</NodeId><Alias><NodeParameter>x</NodeParameter><IntegerVariable>result</IntegerVariable></Alias><Alias><NodeParameter>result</NodeParameter><IntegerVariable>result</IntegerVariable></Alias></LibraryNodeCall></NodeBody></Node><Node NodeType="Command" LineNo="10" ColNo="2"><NodeId>COMMAND__3</NodeId><StartCondition><EQInternal><NodeStateVariable><NodeRef dir="sibling">LibraryCall__2</NodeRef></NodeStateVariable><NodeStateValue>FINISHED</NodeStateValue></EQInternal></StartCondition><NodeBody><Command><Name><StringValue>pprint</StringValue></Name><Arguments LineNo="11" ColNo="10"><StringValue>Increment(2) =</StringValue><IntegerVariable>result</IntegerVariable></Arguments></Command></NodeBody></Node></NodeList></NodeBody></Node></PlexilPlan> | 4,088 | 4,088 | 0.804305 |
ed0b8c8739090ea67eb50f0c7d4cffbdc1b4659c | 3,236 | t | Perl | S17-supply/throttle.t | SirBogman/roast | 0a0835a20951c93fea57a39dec1b2b8789d81fc5 | [
"Artistic-2.0"
]
| 1 | 2019-11-06T05:07:10.000Z | 2019-11-06T05:07:10.000Z | S17-supply/throttle.t | SirBogman/roast | 0a0835a20951c93fea57a39dec1b2b8789d81fc5 | [
"Artistic-2.0"
]
| null | null | null | S17-supply/throttle.t | SirBogman/roast | 0a0835a20951c93fea57a39dec1b2b8789d81fc5 | [
"Artistic-2.0"
]
| null | null | null | use v6;
use Test;
plan 18;
dies-ok { Supply.throttle(1,1) }, 'can not be called as a class method';
diag "**** scheduling with {$*SCHEDULER.WHAT.perl}";
{
my $min = 0;
my $max = 10;
my @seen;
my $before = now;
(1..10).Supply.throttle(1,.5).tap: {
@seen.push($_);
my $now = now;
my $diff = $now - $before;
$max = $max min $now - $before;
$min = $min max $now - $before;
$before = $now;
};
sleep 6;
is @seen, (1..10), 'did we see all of the element';
ok $min > .5, 'difference between each at least .5 seconds';
ok $max < .9, 'difference between each at most .8 seconds';
}
{
my $min = 0;
my $max = 10;
my @seen;
my $control = Supplier.new;
my $before = now;
(1..10).Supply.throttle(0,.5,:$control).tap: {
@seen.push($_);
my $diff = now - $before;
$max = $max min now - $before;
$min = $min max now - $before;
$before = now;
};
sleep 1;
$control.emit( "limit:2" );
sleep 3;
is @seen, (1..10), 'did we see all of the element';
ok $min > 0, 'difference between each at least something';
ok $max < .5, 'difference between each at most .5 seconds';
}
# cannot get this to reliably work everywhere before 6.c
#for 1..10 -> $n {
# my @a[10]; # pre-size array to allow seamless multi-thread updates
# (^10).Supply.throttle( $n, { @a[$_] = 1 } ).wait;
# is @a.sum, 10, "ok with $n at a time";
#}
#for 1..10 -> $n {
# my @a[10]; # pre-size array to allow seamless multi-thread updates
# my $before = now;
# (^10).Supply.throttle( $n, { sleep rand; @a[$_] = 1 } ).wait;
# ok now < $before + ((12 - $n) * .6), "parallelism as expected with $n";
# is @a.sum, 10, "ok with $n at a time with random delay";
#}
{
my $status = Supplier.new;
my @statuses;
$status.Supply.tap: { @statuses.push: $_ };
my $control = Supplier.new;
my @seen;
(1..4).Supply.throttle(
0,
{ sleep $_ / 4; $_ },
:$control,
:$status,
).act: { @seen.push($_.result) };
is +@seen, 0, 'Nothing should be seen yet';
$control.emit( "limit: 2" );
sleep 3; # no way to wait yet :-(
is @seen, (1,2,3,4), 'did we see all in the right order?';
is +@statuses, 1, 'did we get the final status?';
for @statuses -> $s {
is $s<allowed>, 2, 'currently allowed to run';
is $s<bled>, 0, 'no bleeds done';
is $s<buffered>, 0, 'none left in buffer';
is $s<emitted>, 4, 'number of results emitted';
is $s<id>, "done", 'correct status ID';
is $s<limit>, 2, 'maximally allowed to run';
is $s<running>, 0, 'none are running still';
last;
}
}
#?rakudo.jvm skip 'Unhandled exception; category = 1'
#?DOES 1
{
subtest {
react {
whenever Supply.from-list(^4).throttle(4, .1) {
is $_, $++, "throttle isnt done yet";
LAST { done }
}
whenever Promise.in(10) {
flunk "throttle timed out";
done
}
}
}, "Supply.throttle(\$, \$second) should become done when the source supply become done";
}
# vim: ft=perl6 expandtab sw=4
| 27.65812 | 91 | 0.528739 |
73f94833b1f53e36076829b4d1e0de8ca366ecfe | 1,068 | pm | Perl | lib/Markdent/Event/EndEmphasis.pm | clayne/Markdent | df3268c53968d7dbd38d45aa91db1e7bce3a771b | [
"Artistic-1.0"
]
| 7 | 2017-03-25T10:23:02.000Z | 2021-03-14T20:21:25.000Z | lib/Markdent/Event/EndEmphasis.pm | clayne/Markdent | df3268c53968d7dbd38d45aa91db1e7bce3a771b | [
"Artistic-1.0"
]
| 20 | 2017-11-09T15:39:21.000Z | 2021-03-21T15:16:31.000Z | lib/Markdent/Event/EndEmphasis.pm | clayne/Markdent | df3268c53968d7dbd38d45aa91db1e7bce3a771b | [
"Artistic-1.0"
]
| 10 | 2018-02-18T15:40:05.000Z | 2021-02-04T19:29:26.000Z | package Markdent::Event::EndEmphasis;
use strict;
use warnings;
use namespace::autoclean;
our $VERSION = '0.41';
use Markdent::Types;
use Moose;
use MooseX::StrictConstructor;
has delimiter => (
is => 'ro',
isa => t('Str'),
required => 1,
);
with(
'Markdent::Role::Event' => { event_class => __PACKAGE__ },
'Markdent::Role::BalancedEvent' => { compare => ['delimiter'] },
'Markdent::Role::EventAsText'
);
sub as_text { $_[0]->delimiter }
__PACKAGE__->meta->make_immutable;
1;
# ABSTRACT: An event for the end of an emphasis span
__END__
=pod
=head1 DESCRIPTION
This class represents the end of an emphasis span.
=head1 ATTRIBUTES
This class has the following attributes:
=head2 delimiter
The delimiter for the emphasis span.
=head1 METHODS
This class has the following methods:
=head2 $event->as_text
Returns the event's delimiter.
=head1 ROLES
This class does the L<Markdent::Role::Event> and
L<Markdent::Role::BalancedEvent> roles.
=head1 BUGS
See L<Markdent> for bug reporting details.
=cut
| 15.705882 | 72 | 0.687266 |
73e75f363f2041872bed3fea80a900f8711ecfb9 | 13,267 | pl | Perl | crypto/sha/asm/keccak1600-avx512vl.pl | c4rlo/openssl | ec9135a62320c861ab17f7179ebe470686360c64 | [
"Apache-2.0"
]
| 19,127 | 2015-01-01T18:26:43.000Z | 2022-03-31T21:50:00.000Z | crypto/sha/asm/keccak1600-avx512vl.pl | c4rlo/openssl | ec9135a62320c861ab17f7179ebe470686360c64 | [
"Apache-2.0"
]
| 17,222 | 2015-01-04T19:36:01.000Z | 2022-03-31T23:50:53.000Z | crypto/sha/asm/keccak1600-avx512vl.pl | c4rlo/openssl | ec9135a62320c861ab17f7179ebe470686360c64 | [
"Apache-2.0"
]
| 9,313 | 2015-01-01T21:37:44.000Z | 2022-03-31T23:08:27.000Z | #!/usr/bin/env perl
# Copyright 2017-2020 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the Apache License 2.0 (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
#
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
#
# Keccak-1600 for AVX512VL.
#
# December 2017.
#
# This is an adaptation of AVX2 module that reuses register data
# layout, but utilizes new 256-bit AVX512VL instructions. See AVX2
# module for further information on layout.
#
########################################################################
# Numbers are cycles per processed byte out of large message.
#
# r=1088(*)
#
# Skylake-X 6.4/+47%
#
# (*) Corresponds to SHA3-256. Percentage after slash is improvement
# coefficient in comparison to scalar keccak1600-x86_64.pl.
# Digits in variables' names denote right-most coordinates:
my ($A00, # [0][0] [0][0] [0][0] [0][0] # %ymm0
$A01, # [0][4] [0][3] [0][2] [0][1] # %ymm1
$A20, # [3][0] [1][0] [4][0] [2][0] # %ymm2
$A31, # [2][4] [4][3] [1][2] [3][1] # %ymm3
$A21, # [3][4] [1][3] [4][2] [2][1] # %ymm4
$A41, # [1][4] [2][3] [3][2] [4][1] # %ymm5
$A11) = # [4][4] [3][3] [2][2] [1][1] # %ymm6
map("%ymm$_",(0..6));
# We also need to map the magic order into offsets within structure:
my @A_jagged = ([0,0], [1,0], [1,1], [1,2], [1,3], # [0][0..4]
[2,2], [6,0], [3,1], [4,2], [5,3], # [1][0..4]
[2,0], [4,0], [6,1], [5,2], [3,3], # [2][0..4]
[2,3], [3,0], [5,1], [6,2], [4,3], # [3][0..4]
[2,1], [5,0], [4,1], [3,2], [6,3]); # [4][0..4]
@A_jagged = map(8*($$_[0]*4+$$_[1]), @A_jagged); # ... and now linear
my @T = map("%ymm$_",(7..15));
my ($C14,$C00,$D00,$D14) = @T[5..8];
my ($R20,$R01,$R31,$R21,$R41,$R11) = map("%ymm$_",(16..21));
$code.=<<___;
.text
.type __KeccakF1600,\@function
.align 32
__KeccakF1600:
lea iotas(%rip),%r10
mov \$24,%eax
jmp .Loop_avx512vl
.align 32
.Loop_avx512vl:
######################################### Theta
vpshufd \$0b01001110,$A20,$C00
vpxor $A31,$A41,$C14
vpxor $A11,$A21,@T[2]
vpternlogq \$0x96,$A01,$T[2],$C14 # C[1..4]
vpxor $A20,$C00,$C00
vpermq \$0b01001110,$C00,@T[0]
vpermq \$0b10010011,$C14,@T[4]
vprolq \$1,$C14,@T[1] # ROL64(C[1..4],1)
vpermq \$0b00111001,@T[1],$D14
vpxor @T[4],@T[1],$D00
vpermq \$0b00000000,$D00,$D00 # D[0..0] = ROL64(C[1],1) ^ C[4]
vpternlogq \$0x96,@T[0],$A00,$C00 # C[0..0]
vprolq \$1,$C00,@T[1] # ROL64(C[0..0],1)
vpxor $D00,$A00,$A00 # ^= D[0..0]
vpblendd \$0b11000000,@T[1],$D14,$D14
vpblendd \$0b00000011,$C00,@T[4],@T[0]
######################################### Rho + Pi + pre-Chi shuffle
vpxor $D00,$A20,$A20 # ^= D[0..0] from Theta
vprolvq $R20,$A20,$A20
vpternlogq \$0x96,@T[0],$D14,$A31 # ^= D[1..4] from Theta
vprolvq $R31,$A31,$A31
vpternlogq \$0x96,@T[0],$D14,$A21 # ^= D[1..4] from Theta
vprolvq $R21,$A21,$A21
vpternlogq \$0x96,@T[0],$D14,$A41 # ^= D[1..4] from Theta
vprolvq $R41,$A41,$A41
vpermq \$0b10001101,$A20,@T[3] # $A20 -> future $A31
vpermq \$0b10001101,$A31,@T[4] # $A31 -> future $A21
vpternlogq \$0x96,@T[0],$D14,$A11 # ^= D[1..4] from Theta
vprolvq $R11,$A11,@T[1] # $A11 -> future $A01
vpermq \$0b00011011,$A21,@T[5] # $A21 -> future $A41
vpermq \$0b01110010,$A41,@T[6] # $A41 -> future $A11
vpternlogq \$0x96,@T[0],$D14,$A01 # ^= D[1..4] from Theta
vprolvq $R01,$A01,@T[2] # $A01 -> future $A20
######################################### Chi
vpblendd \$0b00001100,@T[6],@T[2],$A31 # [4][4] [2][0]
vpblendd \$0b00001100,@T[2],@T[4],@T[8] # [4][0] [2][1]
vpblendd \$0b00001100,@T[4],@T[3],$A41 # [4][2] [2][4]
vpblendd \$0b00001100,@T[3],@T[2],@T[7] # [4][3] [2][0]
vpblendd \$0b00110000,@T[4],$A31,$A31 # [1][3] [4][4] [2][0]
vpblendd \$0b00110000,@T[5],@T[8],@T[8] # [1][4] [4][0] [2][1]
vpblendd \$0b00110000,@T[2],$A41,$A41 # [1][0] [4][2] [2][4]
vpblendd \$0b00110000,@T[6],@T[7],@T[7] # [1][1] [4][3] [2][0]
vpblendd \$0b11000000,@T[5],$A31,$A31 # [3][2] [1][3] [4][4] [2][0]
vpblendd \$0b11000000,@T[6],@T[8],@T[8] # [3][3] [1][4] [4][0] [2][1]
vpblendd \$0b11000000,@T[6],$A41,$A41 # [3][3] [1][0] [4][2] [2][4]
vpblendd \$0b11000000,@T[4],@T[7],@T[7] # [3][4] [1][1] [4][3] [2][0]
vpternlogq \$0xC6,@T[8],@T[3],$A31 # [3][1] [1][2] [4][3] [2][4]
vpternlogq \$0xC6,@T[7],@T[5],$A41 # [3][2] [1][4] [4][1] [2][3]
vpsrldq \$8,@T[1],@T[0]
vpandn @T[0],@T[1],@T[0] # tgting [0][0] [0][0] [0][0] [0][0]
vpblendd \$0b00001100,@T[2],@T[5],$A11 # [4][0] [2][3]
vpblendd \$0b00001100,@T[5],@T[3],@T[8] # [4][1] [2][4]
vpblendd \$0b00110000,@T[3],$A11,$A11 # [1][2] [4][0] [2][3]
vpblendd \$0b00110000,@T[4],@T[8],@T[8] # [1][3] [4][1] [2][4]
vpblendd \$0b11000000,@T[4],$A11,$A11 # [3][4] [1][2] [4][0] [2][3]
vpblendd \$0b11000000,@T[2],@T[8],@T[8] # [3][0] [1][3] [4][1] [2][4]
vpternlogq \$0xC6,@T[8],@T[6],$A11 # [3][3] [1][1] [4][4] [2][2]
vpermq \$0b00011110,@T[1],$A21 # [0][1] [0][2] [0][4] [0][3]
vpblendd \$0b00110000,$A00,$A21,@T[8] # [0][1] [0][0] [0][4] [0][3]
vpermq \$0b00111001,@T[1],$A01 # [0][1] [0][4] [0][3] [0][2]
vpblendd \$0b11000000,$A00,$A01,$A01 # [0][0] [0][4] [0][3] [0][2]
vpblendd \$0b00001100,@T[5],@T[4],$A20 # [4][1] [2][1]
vpblendd \$0b00001100,@T[4],@T[6],@T[7] # [4][2] [2][2]
vpblendd \$0b00110000,@T[6],$A20,$A20 # [1][1] [4][1] [2][1]
vpblendd \$0b00110000,@T[3],@T[7],@T[7] # [1][2] [4][2] [2][2]
vpblendd \$0b11000000,@T[3],$A20,$A20 # [3][1] [1][1] [4][1] [2][1]
vpblendd \$0b11000000,@T[5],@T[7],@T[7] # [3][2] [1][2] [4][2] [2][2]
vpternlogq \$0xC6,@T[7],@T[2],$A20 # [3][0] [1][0] [4][0] [2][0]
vpermq \$0b00000000,@T[0],@T[0] # [0][0] [0][0] [0][0] [0][0]
vpermq \$0b00011011,$A31,$A31 # post-Chi shuffle
vpermq \$0b10001101,$A41,$A41
vpermq \$0b01110010,$A11,$A11
vpblendd \$0b00001100,@T[3],@T[6],$A21 # [4][3] [2][2]
vpblendd \$0b00001100,@T[6],@T[5],@T[7] # [4][4] [2][3]
vpblendd \$0b00110000,@T[5],$A21,$A21 # [1][4] [4][3] [2][2]
vpblendd \$0b00110000,@T[2],@T[7],@T[7] # [1][0] [4][4] [2][3]
vpblendd \$0b11000000,@T[2],$A21,$A21 # [3][0] [1][4] [4][3] [2][2]
vpblendd \$0b11000000,@T[3],@T[7],@T[7] # [3][1] [1][0] [4][4] [2][3]
vpternlogq \$0xC6,@T[8],@T[1],$A01 # [0][4] [0][3] [0][2] [0][1]
vpternlogq \$0xC6,@T[7],@T[4],$A21 # [3][4] [1][3] [4][2] [2][1]
######################################### Iota
vpternlogq \$0x96,(%r10),@T[0],$A00
lea 32(%r10),%r10
dec %eax
jnz .Loop_avx512vl
ret
.size __KeccakF1600,.-__KeccakF1600
___
my ($A_flat,$inp,$len,$bsz) = ("%rdi","%rsi","%rdx","%rcx");
my $out = $inp; # in squeeze
$code.=<<___;
.globl SHA3_absorb
.type SHA3_absorb,\@function
.align 32
SHA3_absorb:
mov %rsp,%r11
lea -240(%rsp),%rsp
and \$-32,%rsp
lea 96($A_flat),$A_flat
lea 96($inp),$inp
lea 96(%rsp),%r10
lea rhotates_left(%rip),%r8
vzeroupper
vpbroadcastq -96($A_flat),$A00 # load A[5][5]
vmovdqu 8+32*0-96($A_flat),$A01
vmovdqu 8+32*1-96($A_flat),$A20
vmovdqu 8+32*2-96($A_flat),$A31
vmovdqu 8+32*3-96($A_flat),$A21
vmovdqu 8+32*4-96($A_flat),$A41
vmovdqu 8+32*5-96($A_flat),$A11
vmovdqa64 0*32(%r8),$R20 # load "rhotate" indices
vmovdqa64 1*32(%r8),$R01
vmovdqa64 2*32(%r8),$R31
vmovdqa64 3*32(%r8),$R21
vmovdqa64 4*32(%r8),$R41
vmovdqa64 5*32(%r8),$R11
vpxor @T[0],@T[0],@T[0]
vmovdqa @T[0],32*2-96(%r10) # zero transfer area on stack
vmovdqa @T[0],32*3-96(%r10)
vmovdqa @T[0],32*4-96(%r10)
vmovdqa @T[0],32*5-96(%r10)
vmovdqa @T[0],32*6-96(%r10)
.Loop_absorb_avx512vl:
mov $bsz,%rax
sub $bsz,$len
jc .Ldone_absorb_avx512vl
shr \$3,%eax
vpbroadcastq 0-96($inp),@T[0]
vmovdqu 8-96($inp),@T[1]
sub \$4,%eax
___
for(my $i=5; $i<25; $i++) {
$code.=<<___
dec %eax
jz .Labsorved_avx512vl
mov 8*$i-96($inp),%r8
mov %r8,$A_jagged[$i]-96(%r10)
___
}
$code.=<<___;
.Labsorved_avx512vl:
lea ($inp,$bsz),$inp
vpxor @T[0],$A00,$A00
vpxor @T[1],$A01,$A01
vpxor 32*2-96(%r10),$A20,$A20
vpxor 32*3-96(%r10),$A31,$A31
vpxor 32*4-96(%r10),$A21,$A21
vpxor 32*5-96(%r10),$A41,$A41
vpxor 32*6-96(%r10),$A11,$A11
call __KeccakF1600
lea 96(%rsp),%r10
jmp .Loop_absorb_avx512vl
.Ldone_absorb_avx512vl:
vmovq %xmm0,-96($A_flat)
vmovdqu $A01,8+32*0-96($A_flat)
vmovdqu $A20,8+32*1-96($A_flat)
vmovdqu $A31,8+32*2-96($A_flat)
vmovdqu $A21,8+32*3-96($A_flat)
vmovdqu $A41,8+32*4-96($A_flat)
vmovdqu $A11,8+32*5-96($A_flat)
vzeroupper
lea (%r11),%rsp
lea ($len,$bsz),%rax # return value
ret
.size SHA3_absorb,.-SHA3_absorb
.globl SHA3_squeeze
.type SHA3_squeeze,\@function
.align 32
SHA3_squeeze:
mov %rsp,%r11
lea 96($A_flat),$A_flat
lea rhotates_left(%rip),%r8
shr \$3,$bsz
vzeroupper
vpbroadcastq -96($A_flat),$A00
vpxor @T[0],@T[0],@T[0]
vmovdqu 8+32*0-96($A_flat),$A01
vmovdqu 8+32*1-96($A_flat),$A20
vmovdqu 8+32*2-96($A_flat),$A31
vmovdqu 8+32*3-96($A_flat),$A21
vmovdqu 8+32*4-96($A_flat),$A41
vmovdqu 8+32*5-96($A_flat),$A11
vmovdqa64 0*32(%r8),$R20 # load "rhotate" indices
vmovdqa64 1*32(%r8),$R01
vmovdqa64 2*32(%r8),$R31
vmovdqa64 3*32(%r8),$R21
vmovdqa64 4*32(%r8),$R41
vmovdqa64 5*32(%r8),$R11
mov $bsz,%rax
.Loop_squeeze_avx512vl:
mov @A_jagged[$i]-96($A_flat),%r8
___
for (my $i=0; $i<25; $i++) {
$code.=<<___;
sub \$8,$len
jc .Ltail_squeeze_avx512vl
mov %r8,($out)
lea 8($out),$out
je .Ldone_squeeze_avx512vl
dec %eax
je .Lextend_output_avx512vl
mov @A_jagged[$i+1]-120($A_flat),%r8
___
}
$code.=<<___;
.Lextend_output_avx512vl:
call __KeccakF1600
vmovq %xmm0,-96($A_flat)
vmovdqu $A01,8+32*0-96($A_flat)
vmovdqu $A20,8+32*1-96($A_flat)
vmovdqu $A31,8+32*2-96($A_flat)
vmovdqu $A21,8+32*3-96($A_flat)
vmovdqu $A41,8+32*4-96($A_flat)
vmovdqu $A11,8+32*5-96($A_flat)
mov $bsz,%rax
jmp .Loop_squeeze_avx512vl
.Ltail_squeeze_avx512vl:
add \$8,$len
.Loop_tail_avx512vl:
mov %r8b,($out)
lea 1($out),$out
shr \$8,%r8
dec $len
jnz .Loop_tail_avx512vl
.Ldone_squeeze_avx512vl:
vzeroupper
lea (%r11),%rsp
ret
.size SHA3_squeeze,.-SHA3_squeeze
.align 64
rhotates_left:
.quad 3, 18, 36, 41 # [2][0] [4][0] [1][0] [3][0]
.quad 1, 62, 28, 27 # [0][1] [0][2] [0][3] [0][4]
.quad 45, 6, 56, 39 # [3][1] [1][2] [4][3] [2][4]
.quad 10, 61, 55, 8 # [2][1] [4][2] [1][3] [3][4]
.quad 2, 15, 25, 20 # [4][1] [3][2] [2][3] [1][4]
.quad 44, 43, 21, 14 # [1][1] [2][2] [3][3] [4][4]
iotas:
.quad 0x0000000000000001, 0x0000000000000001, 0x0000000000000001, 0x0000000000000001
.quad 0x0000000000008082, 0x0000000000008082, 0x0000000000008082, 0x0000000000008082
.quad 0x800000000000808a, 0x800000000000808a, 0x800000000000808a, 0x800000000000808a
.quad 0x8000000080008000, 0x8000000080008000, 0x8000000080008000, 0x8000000080008000
.quad 0x000000000000808b, 0x000000000000808b, 0x000000000000808b, 0x000000000000808b
.quad 0x0000000080000001, 0x0000000080000001, 0x0000000080000001, 0x0000000080000001
.quad 0x8000000080008081, 0x8000000080008081, 0x8000000080008081, 0x8000000080008081
.quad 0x8000000000008009, 0x8000000000008009, 0x8000000000008009, 0x8000000000008009
.quad 0x000000000000008a, 0x000000000000008a, 0x000000000000008a, 0x000000000000008a
.quad 0x0000000000000088, 0x0000000000000088, 0x0000000000000088, 0x0000000000000088
.quad 0x0000000080008009, 0x0000000080008009, 0x0000000080008009, 0x0000000080008009
.quad 0x000000008000000a, 0x000000008000000a, 0x000000008000000a, 0x000000008000000a
.quad 0x000000008000808b, 0x000000008000808b, 0x000000008000808b, 0x000000008000808b
.quad 0x800000000000008b, 0x800000000000008b, 0x800000000000008b, 0x800000000000008b
.quad 0x8000000000008089, 0x8000000000008089, 0x8000000000008089, 0x8000000000008089
.quad 0x8000000000008003, 0x8000000000008003, 0x8000000000008003, 0x8000000000008003
.quad 0x8000000000008002, 0x8000000000008002, 0x8000000000008002, 0x8000000000008002
.quad 0x8000000000000080, 0x8000000000000080, 0x8000000000000080, 0x8000000000000080
.quad 0x000000000000800a, 0x000000000000800a, 0x000000000000800a, 0x000000000000800a
.quad 0x800000008000000a, 0x800000008000000a, 0x800000008000000a, 0x800000008000000a
.quad 0x8000000080008081, 0x8000000080008081, 0x8000000080008081, 0x8000000080008081
.quad 0x8000000000008080, 0x8000000000008080, 0x8000000000008080, 0x8000000000008080
.quad 0x0000000080000001, 0x0000000080000001, 0x0000000080000001, 0x0000000080000001
.quad 0x8000000080008008, 0x8000000080008008, 0x8000000080008008, 0x8000000080008008
.asciz "Keccak-1600 absorb and squeeze for AVX512VL, CRYPTOGAMS by <appro\@openssl.org>"
___
$output=pop and open STDOUT,">$output";
print $code;
close STDOUT or die "error closing STDOUT: $!";
| 33.844388 | 88 | 0.59908 |
73d64dc7400418806ab188b80260b2102e80a278 | 405 | t | Perl | t/utils-iterator.t | labdsf/amusewiki | c9be8dd0784c96d99efef0fa550ea2532fc2f749 | [
"Artistic-1.0"
]
| null | null | null | t/utils-iterator.t | labdsf/amusewiki | c9be8dd0784c96d99efef0fa550ea2532fc2f749 | [
"Artistic-1.0"
]
| 39 | 2020-03-19T23:35:06.000Z | 2020-05-07T20:22:01.000Z | t/utils-iterator.t | labdsf/amusewiki | c9be8dd0784c96d99efef0fa550ea2532fc2f749 | [
"Artistic-1.0"
]
| 1 | 2020-04-01T21:10:25.000Z | 2020-04-01T21:10:25.000Z | #!perl
use strict;
use warnings;
use Test::More tests => 12;
use AmuseWikiFarm::Utils::Iterator;
my $iter = AmuseWikiFarm::Utils::Iterator->new([1,2,3]);
is $iter->count, 3;
foreach my $expected (1..3) {
is $expected, $iter->next;
}
ok !$iter->next;
ok !$iter->next;
is $iter->count, 3;
$iter->reset;
foreach my $expected (1..3) {
is $expected, $iter->next;
}
ok !$iter->next;
ok !$iter->next;
| 17.608696 | 56 | 0.632099 |
ed24602e41ff72ecdc23395ee6a3e9478864a1ce | 1,319 | t | Perl | gnu/usr.bin/perl/cpan/version/t/06noop.t | ArrogantWombatics/openbsd-src | 75721e1d44322953075b7c4b89337b163a395291 | [
"BSD-3-Clause"
]
| 1 | 2019-02-16T13:29:23.000Z | 2019-02-16T13:29:23.000Z | gnu/usr.bin/perl/cpan/version/t/06noop.t | ArrogantWombatics/openbsd-src | 75721e1d44322953075b7c4b89337b163a395291 | [
"BSD-3-Clause"
]
| 1 | 2018-08-21T03:56:33.000Z | 2018-08-21T03:56:33.000Z | gnu/usr.bin/perl/cpan/version/t/06noop.t | ArrogantWombaticus/openbsd-src | 75721e1d44322953075b7c4b89337b163a395291 | [
"BSD-3-Clause"
]
| null | null | null | #! /usr/local/perl -w
# Before `make install' is performed this script should be runnable with
# `make test'. After `make install' it should work as `perl test.pl'
#########################
use Test::More qw/no_plan/;
BEGIN {
use_ok('version', 0.9909);
}
my $v1 = version->new('1.2');
eval {$v1 = $v1 + 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 = $v1 - 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 = $v1 / 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 = $v1 * 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 = abs($v1)};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 += 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 -= 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 /= 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
eval {$v1 *= 1};
like $@, qr/operation not supported with version object/, 'No math ops with version objects';
| 39.969697 | 93 | 0.67627 |
ed2e548e679106db826f148bc615b9a790a78ae3 | 2,555 | pl | Perl | fs/usr/lib/perl5/5.20.1/Unicode/Collate/Locale/ee.pl | AlanyTan/jor1k-sysroot | c80026ca284aa6a0e8edaafc45490e7c86e6e656 | [
"MIT"
]
| 21 | 2015-07-06T21:12:40.000Z | 2022-03-18T02:44:13.000Z | fs/usr/lib/perl5/5.20.1/Unicode/Collate/Locale/ee.pl | AlanyTan/jor1k-sysroot | c80026ca284aa6a0e8edaafc45490e7c86e6e656 | [
"MIT"
]
| 1 | 2018-08-21T03:56:33.000Z | 2018-08-21T03:56:33.000Z | fs/usr/lib/perl5/5.20.1/Unicode/Collate/Locale/ee.pl | AlanyTan/jor1k-sysroot | c80026ca284aa6a0e8edaafc45490e7c86e6e656 | [
"MIT"
]
| 30 | 2015-01-11T14:06:10.000Z | 2022-03-05T06:10:44.000Z | +{
locale_version => 1.01,
# d-tail, open-e, f-hook, gamma, eng, open-o, v-hook don't require tailoring
entry => <<'ENTRY', # for DUCET v6.3.0
0302 ; [.0000.0029.0002] # COMBINING CIRCUMFLEX ACCENT
00E2 ; [.15EB.0020.0002][.0000.0029.0002] # LATIN SMALL LETTER A WITH CIRCUMFLEX
00C2 ; [.15EB.0020.0008][.0000.0029.0002] # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
00EA ; [.1648.0020.0002][.0000.0029.0002] # LATIN SMALL LETTER E WITH CIRCUMFLEX
00CA ; [.1648.0020.0008][.0000.0029.0002] # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
00EE ; [.16C9.0020.0002][.0000.0029.0002] # LATIN SMALL LETTER I WITH CIRCUMFLEX
00CE ; [.16C9.0020.0008][.0000.0029.0002] # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
00F4 ; [.176D.0020.0002][.0000.0029.0002] # LATIN SMALL LETTER O WITH CIRCUMFLEX
00D4 ; [.176D.0020.0008][.0000.0029.0002] # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
00FB ; [.1832.0020.0002][.0000.0029.0002] # LATIN SMALL LETTER U WITH CIRCUMFLEX
00DB ; [.1832.0020.0008][.0000.0029.0002] # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0177 ; [.187C.0020.0002][.0000.0029.0002] # LATIN SMALL LETTER Y WITH CIRCUMFLEX
0176 ; [.187C.0020.0008][.0000.0029.0002] # LATIN CAPITAL LETTER Y WITH CIRCUMFLEX
0064 007A ; [.162E.0020.0002] # <LATIN SMALL LETTER D, LATIN SMALL LETTER Z>
0044 007A ; [.162E.0020.0007] # <LATIN CAPITAL LETTER D, LATIN SMALL LETTER Z>
0044 005A ; [.162E.0020.0008] # <LATIN CAPITAL LETTER D, LATIN CAPITAL LETTER Z>
0067 0062 ; [.168E.0020.0002] # <LATIN SMALL LETTER G, LATIN SMALL LETTER B>
0047 0062 ; [.168E.0020.0007] # <LATIN CAPITAL LETTER G, LATIN SMALL LETTER B>
0047 0042 ; [.168E.0020.0008] # <LATIN CAPITAL LETTER G, LATIN CAPITAL LETTER B>
0078 ; [.16B1.0020.0002] # LATIN SMALL LETTER X
0058 ; [.16B1.0020.0008] # LATIN CAPITAL LETTER X
006B 0070 ; [.16FC.0020.0002] # <LATIN SMALL LETTER K, LATIN SMALL LETTER P>
004B 0070 ; [.16FC.0020.0007] # <LATIN CAPITAL LETTER K, LATIN SMALL LETTER P>
004B 0050 ; [.16FC.0020.0008] # <LATIN CAPITAL LETTER K, LATIN CAPITAL LETTER P>
006E 0079 ; [.174C.0020.0002] # <LATIN SMALL LETTER N, LATIN SMALL LETTER Y>
004E 0079 ; [.174C.0020.0007] # <LATIN CAPITAL LETTER N, LATIN SMALL LETTER Y>
004E 0059 ; [.174C.0020.0008] # <LATIN CAPITAL LETTER N, LATIN CAPITAL LETTER Y>
0074 0073 ; [.1813.0020.0002] # <LATIN SMALL LETTER T, LATIN SMALL LETTER S>
0054 0073 ; [.1813.0020.0007] # <LATIN CAPITAL LETTER T, LATIN SMALL LETTER S>
0054 0053 ; [.1813.0020.0008] # <LATIN CAPITAL LETTER T, LATIN CAPITAL LETTER S>
ENTRY
};
| 69.054054 | 87 | 0.691194 |
73d28da3596bbe7c306940983a3f418bf596f539 | 1,872 | pl | Perl | Benchmarks/Recomputation/specOMP_install/bin/lib/unicore/lib/Sc/Beng.pl | sqsq87/NVC | 1ed478788978e3e85c219313cd55564d4037e242 | [
"MIT"
]
| null | null | null | Benchmarks/Recomputation/specOMP_install/bin/lib/unicore/lib/Sc/Beng.pl | sqsq87/NVC | 1ed478788978e3e85c219313cd55564d4037e242 | [
"MIT"
]
| null | null | null | Benchmarks/Recomputation/specOMP_install/bin/lib/unicore/lib/Sc/Beng.pl | sqsq87/NVC | 1ed478788978e3e85c219313cd55564d4037e242 | [
"MIT"
]
| null | null | null | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 5.2.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by the Perl program only. The format and even
# the name or existence of this file are subject to change without notice.
# Don't use it directly.
# This file returns the 92 code points in Unicode Version 5.2.0 that match
# any of the following regular expression constructs:
#
# \p{Script=Bengali}
# \p{Sc=Beng}
# \p{Is_Script=Bengali}
# \p{Is_Sc=Beng}
#
# \p{Bengali}
# \p{Is_Bengali}
# \p{Beng}
# \p{Is_Beng}
#
# Note that contrary to what you might expect, the above is NOT the same
# as \p{Block=Bengali}
#
# perluniprops.pod should be consulted for the syntax rules for any of these,
# including if adding or subtracting white space, underscore, and hyphen
# characters matters or doesn't matter, and other permissible syntactic
# variants. Upper/lower case distinctions never matter.
#
# A colon can be substituted for the equals sign, and anything to the left of
# the equals (or colon) can be combined with anything to the right. Thus,
# for example,
# \p{Is_Sc: Bengali}
# is also valid.
#
# The format of the lines of this file is: START\tSTOP\twhere START is the
# starting code point of the range, in hex; STOP is the ending point, or if
# omitted, the range has just one code point. Numbers in comments in
# [brackets] indicate how many code points are in the range.
return <<'END';
0981 0983 # [3]
0985 098C # [8]
098F 0990 # [2]
0993 09A8 # [22]
09AA 09B0 # [7]
09B2
09B6 09B9 # [4]
09BC 09C4 # [9]
09C7 09C8 # [2]
09CB 09CE # [4]
09D7
09DC 09DD # [2]
09DF 09E3 # [5]
09E6 09FB # [22]
END
| 32.275862 | 78 | 0.666132 |
ed253206bdcdbab0a547ba78dfc650b52565451c | 3,805 | pm | Perl | tests/installation/upgrade_select.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
]
| 84 | 2015-02-10T16:01:52.000Z | 2022-03-10T21:20:14.000Z | tests/installation/upgrade_select.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
]
| 8,065 | 2015-01-07T07:44:02.000Z | 2022-03-31T12:02:06.000Z | tests/installation/upgrade_select.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
]
| 404 | 2015-01-14T14:42:44.000Z | 2022-03-30T07:38:08.000Z | # SUSE's openQA tests
#
# Copyright 2009-2013 Bernhard M. Wiedemann
# Copyright 2012-2020 SUSE LLC
# SPDX-License-Identifier: FSFAP
# Summary: Select existing partition(s) for upgrade
# Maintainer: QA SLE YaST team <qa-sle-yast@suse.de>
use base 'y2_installbase';
use strict;
use warnings;
use testapi;
use utils 'assert_screen_with_soft_timeout';
use version_utils qw(is_sle is_opensuse);
sub run {
if (get_var('ENCRYPT')) {
assert_screen [qw(upgrade-unlock-disk upgrade-enter-password)];
# New Storage NG dialog already contains the password entry.
# The old dialog needed another click to proceed to the password entry:
if (!match_has_tag("upgrade-enter-password")) {
send_key 'alt-p'; # provide password
assert_screen "upgrade-enter-password";
}
type_password;
send_key $cmd{ok};
}
# hardware detection and waiting for updates from suse.com can take a while
# Add tag 'all-partition' for poo#54050 - Need to show all Partition or the base partition for continous migration from SLE11SP4 won't be shown
assert_screen_with_soft_timeout([qw(select-for-update all-partition)], timeout => 500, soft_timeout => 100, bugref => 'bsc#1028774');
if (match_has_tag("all-partition")) {
send_key 'alt-s';
send_key $cmd{next};
}
if (match_has_tag("select-for-update")) {
my $arch = get_var("ARCH");
assert_screen('select-for-update-' . "$arch");
send_key $cmd{next};
}
# The SLE15-SP2 license page moved after registration.
if (get_var('MEDIA_UPGRADE') || is_sle('<15-SP2') || is_opensuse) {
assert_screen [qw(remove-repository license-agreement license-agreement-accepted)], 240;
if (match_has_tag("license-agreement")) {
send_key 'alt-a';
assert_screen('license-agreement-accepted');
send_key $cmd{next};
assert_screen "remove-repository";
}
send_key $cmd{next};
}
# Select migration target in sle15 upgrade
if (is_sle '15+') {
if (get_var('MEDIA_UPGRADE')) {
# No 'unregistered system' warning message shown when using Full installation image on SLE15SP2
if (is_sle('<15-SP2')) {
assert_screen 'upgrade-unregistered-system';
send_key $cmd{ok};
}
}
else {
# Ensure we are in 'Select the Migration Target' page
assert_screen 'select-migration-target', 120;
send_key 'alt-p';
# Confirm default migration target matches correct base product
my $migration_target_base = 'migration_target_' . lc(get_var('SLE_PRODUCT', 'sles')) . lc(get_var('VERSION'));
# Scroll to the end to assert target base product if the text is longer than box
assert_screen ["$migration_target_base", 'migration_target_hscrollbar'];
if (match_has_tag 'migration_target_hscrollbar') {
assert_and_click 'migration_target_hscrollbar';
assert_screen "$migration_target_base";
}
# Confirm other migration targets match the same base product
# Assume no more than 6 possible migration targets
for (1 .. 5) {
send_key 'down';
unless (check_screen $migration_target_base, 30) {
record_info 'Likely error detected', 'Incorrect migration target? See https://fate.suse.com/323165', result => 'fail';
last;
}
}
# Back to default migration target
wait_screen_change {
send_key 'home';
};
save_screenshot;
send_key $cmd{next};
}
}
}
1;
| 40.052632 | 147 | 0.615769 |
73f3b6fadb1f10370980835322b247ca4b45242f | 788 | t | Perl | t/controller/author.t | omega/metacpan-web | b9f4fc52f395c6267d18c00da1efd14bfec290c3 | [
"Artistic-1.0"
]
| null | null | null | t/controller/author.t | omega/metacpan-web | b9f4fc52f395c6267d18c00da1efd14bfec290c3 | [
"Artistic-1.0"
]
| null | null | null | t/controller/author.t | omega/metacpan-web | b9f4fc52f395c6267d18c00da1efd14bfec290c3 | [
"Artistic-1.0"
]
| null | null | null | use strict;
use warnings;
use Test::More;
use MetaCPAN::Web::Test;
test_psgi app, sub {
my $cb = shift;
ok( my $res = $cb->( GET "/author/DOESNTEXIST" ),
'GET /author/DOESNTEXIST' );
is( $res->code, 404, 'code 404' );
ok( $res = $cb->( GET "/author/perler" ), 'GET /author/perler' );
is( $res->code, 301, 'code 301' );
ok( $res = $cb->( GET "/author/PERLER" ), 'GET /author/PERLER' );
is( $res->code, 200, 'code 200' );
my $tx = tx($res);
$tx->like( '/html/head/title', qr/PERLER/, 'title includes author name' );
my $release = $tx->find_value('//table[1]//tbody/tr[1]/td[1]//a/@href');
ok( $release, 'found a release' );
ok( $res = $cb->( GET $release), "GET $release" );
is( $res->code, 200, 'code 200' );
};
done_testing;
| 31.52 | 78 | 0.55203 |
73d0f54878a41484c89c7c126283cda3a5f58cf7 | 3,211 | pl | Perl | intranet/cgi-bin/admin/clone-rules.pl | cse-library/koha | 84c0968e5a43328817d5eee912763797949b9efb | [
"CECILL-B"
]
| null | null | null | intranet/cgi-bin/admin/clone-rules.pl | cse-library/koha | 84c0968e5a43328817d5eee912763797949b9efb | [
"CECILL-B"
]
| null | null | null | intranet/cgi-bin/admin/clone-rules.pl | cse-library/koha | 84c0968e5a43328817d5eee912763797949b9efb | [
"CECILL-B"
]
| null | null | null | #!/usr/bin/perl
# vim: et ts=4 sw=4
# Copyright BibLibre
#
# This file is part of Koha.
#
# Koha is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Koha is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Koha; if not, see <http://www.gnu.org/licenses>.
# This script clones issuing rules from a library to another
# parameters :
# - frombranch : the branch we want to clone issuing rules from
# - tobranch : the branch we want to clone issuing rules to
#
# The script can be called with one of the parameters, both or none
use Modern::Perl;
use CGI qw ( -utf8 );
use C4::Context;
use C4::Output;
use C4::Auth;
use C4::Koha;
use C4::Debug;
my $input = new CGI;
my $dbh = C4::Context->dbh;
my ($template, $loggedinuser, $cookie)
= get_template_and_user({template_name => "admin/clone-rules.tt",
query => $input,
type => "intranet",
authnotrequired => 0,
flagsrequired => {parameters => 'parameters_remaining_permissions'},
debug => 1,
});
my $frombranch = $input->param("frombranch");
my $tobranch = $input->param("tobranch");
$template->param(frombranch => $frombranch) if ($frombranch);
$template->param(tobranch => $tobranch) if ($tobranch);
if ($frombranch && $tobranch) {
my $error;
# First, we create a temporary table with the rules we want to clone
my $query = "CREATE TEMPORARY TABLE tmpissuingrules ENGINE=memory SELECT * FROM issuingrules WHERE branchcode=?";
my $sth = $dbh->prepare($query);
my $res = $sth->execute($frombranch);
$error = 1 unless ($res);
if (!$error) {
# We modify these rules according to the new branchcode
$query = "UPDATE tmpissuingrules SET branchcode=? WHERE branchcode=?";
$sth = $dbh->prepare($query);
$res = $sth->execute($tobranch, $frombranch);
$error = 1 unless ($res);
}
if (!$error) {
# We delete the rules for the existing branchode
$query = "DELETE FROM issuingrules WHERE branchcode=?";
$sth = $dbh->prepare($query);
$res = $sth->execute($tobranch);
$error = 1 unless ($res);
}
if (!$error) {
# We insert the new rules from our temporary table
$query = "INSERT INTO issuingrules SELECT * FROM tmpissuingrules WHERE branchcode=?";
$sth = $dbh->prepare($query);
$res = $sth->execute($tobranch);
$error = 1 unless ($res);
}
# Finally, we delete our temporary table
$query = "DROP TABLE tmpissuingrules";
$sth = $dbh->prepare($query);
$res = $sth->execute();
$template->param(result => "1");
$template->param(error => $error);
}
output_html_with_http_headers $input, $cookie, $template->output;
| 31.480392 | 117 | 0.639676 |
ed208263d3a1427355c2fdfd5517c7d284b8024a | 3,520 | t | Perl | t/node/healthcheck-stop-checker.t | wangxiaochuang/apisix | 4efc1202dc7a97e928411f883ea42a5ab446ce03 | [
"ECL-2.0",
"Apache-2.0"
]
| 1 | 2019-10-29T02:04:21.000Z | 2019-10-29T02:04:21.000Z | t/node/healthcheck-stop-checker.t | wangxiaochuang/apisix | 4efc1202dc7a97e928411f883ea42a5ab446ce03 | [
"ECL-2.0",
"Apache-2.0"
]
| null | null | null | t/node/healthcheck-stop-checker.t | wangxiaochuang/apisix | 4efc1202dc7a97e928411f883ea42a5ab446ce03 | [
"ECL-2.0",
"Apache-2.0"
]
| 1 | 2020-02-01T13:28:35.000Z | 2020-02-01T13:28:35.000Z | BEGIN {
if ($ENV{TEST_NGINX_CHECK_LEAK}) {
$SkipReason = "unavailable for the hup tests";
} else {
$ENV{TEST_NGINX_USE_HUP} = 1;
undef $ENV{TEST_NGINX_USE_STAP};
}
}
use t::APISIX 'no_plan';
master_on();
repeat_each(1);
log_level('info');
no_root_location();
no_shuffle();
worker_connections(256);
run_tests();
__DATA__
=== TEST 1: set route(two healthy upstream nodes)
--- request
PUT /apisix/admin/routes/1
{"uri":"/server_port","upstream":{"type":"roundrobin","nodes":{"127.0.0.1:1980":1,"127.0.0.1:1981":1},"checks":{"active":{"http_path":"/status","host":"foo.com","healthy":{"interval":1,"successes":1},"unhealthy":{"interval":1,"http_failures":2}}}}}
--- error_code_like: ^20\d$
--- no_error_log
[error]
=== TEST 2: update + delete
--- config
location /t {
content_by_lua_block {
local t = require("lib.test_admin").test
local code, status, body = t('/apisix/admin/routes/1',
"PUT",
[[{"uri":"/server_port","upstream":{"type":"roundrobin","nodes":{"127.0.0.1:1980":1,"127.0.0.1:1981":1},"checks":{"active":{"http_path":"/status","healthy":{"interval":1,"successes":1},"unhealthy":{"interval":1,"http_failures":2}}}}}]]
)
if code < 300 then
code = 200
end
ngx.say("1 code: ", code)
ngx.sleep(0.2)
local code, body = t('/server_port', "GET")
ngx.say("2 code: ", code)
ngx.sleep(0.2)
code = t('/apisix/admin/routes/1', "DELETE")
ngx.say("3 code: ", code)
ngx.sleep(0.2)
local code, body = t('/server_port', "GET")
ngx.say("4 code: ", code)
}
}
--- request
GET /t
--- response_body
1 code: 200
2 code: 200
3 code: 200
4 code: 404
--- grep_error_log eval
qr/create new checker: table: 0x|try to release checker: table: 0x/
--- grep_error_log_out
create new checker: table: 0x
try to release checker: table: 0x
=== TEST 3: set route(two healthy upstream nodes)
--- request
PUT /apisix/admin/routes/1
{"uri":"/server_port","upstream":{"type":"roundrobin","nodes":{"127.0.0.1:1980":1,"127.0.0.1:1981":1},"checks":{"active":{"http_path":"/status","host":"foo.com","healthy":{"interval":1,"successes":1},"unhealthy":{"interval":1,"http_failures":2}}}}}
--- error_code: 201
--- no_error_log
[error]
=== TEST 4: update
--- config
location /t {
content_by_lua_block {
local t = require("lib.test_admin").test
local code, body = t('/server_port', "GET")
ngx.say("1 code: ", code)
local code, status, body = t('/apisix/admin/routes/1',
"PUT",
[[{"uri":"/server_port","upstream":{"type":"roundrobin","nodes":{"127.0.0.1:1980":1,"127.0.0.1:1981":1},"checks":{"active":{"http_path":"/status","healthy":{"interval":1,"successes":1},"unhealthy":{"interval":1,"http_failures":2}}}}}]]
)
if code < 300 then
code = 200
end
ngx.say("2 code: ", code)
ngx.sleep(0.2)
local code, body = t('/server_port', "GET")
ngx.say("3 code: ", code)
}
}
--- request
GET /t
--- response_body
1 code: 200
2 code: 200
3 code: 200
--- grep_error_log eval
qr/create new checker: table: 0x|try to release checker: table: 0x/
--- grep_error_log_out
create new checker: table: 0x
try to release checker: table: 0x
create new checker: table: 0x
| 28.387097 | 251 | 0.571023 |
ed0d84b89856696cd7c2daa18ed928464b8d6159 | 1,892 | pm | Perl | auto-lib/Paws/CloudWatchLogs/DisassociateKmsKey.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/CloudWatchLogs/DisassociateKmsKey.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/CloudWatchLogs/DisassociateKmsKey.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::CloudWatchLogs::DisassociateKmsKey;
use Moose;
has LogGroupName => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'logGroupName' , required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DisassociateKmsKey');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::API::Response');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::CloudWatchLogs::DisassociateKmsKey - Arguments for method DisassociateKmsKey on L<Paws::CloudWatchLogs>
=head1 DESCRIPTION
This class represents the parameters used for calling the method DisassociateKmsKey on the
L<Amazon CloudWatch Logs|Paws::CloudWatchLogs> service. Use the attributes of this class
as arguments to method DisassociateKmsKey.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DisassociateKmsKey.
=head1 SYNOPSIS
my $logs = Paws->service('CloudWatchLogs');
$logs->DisassociateKmsKey(
LogGroupName => 'MyLogGroupName',
);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/logs/DisassociateKmsKey>
=head1 ATTRIBUTES
=head2 B<REQUIRED> LogGroupName => Str
The name of the log group.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DisassociateKmsKey in L<Paws::CloudWatchLogs>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 31.533333 | 249 | 0.736786 |
ed0a7a89ecb616a06369bb457573ccbe5c4a5d19 | 1,051 | t | Perl | test/blackbox-tests/test-cases/dynamic-dependencies/read-macro-produces-dyn-deps.t | mndrix/dune | 30b84ff370351b13f05db34fd952dfe5d0249bcb | [
"MIT"
]
| 245 | 2016-12-02T14:13:32.000Z | 2018-01-14T20:00:40.000Z | test/blackbox-tests/test-cases/dynamic-dependencies/read-macro-produces-dyn-deps.t | mndrix/dune | 30b84ff370351b13f05db34fd952dfe5d0249bcb | [
"MIT"
]
| 380 | 2017-01-28T18:46:52.000Z | 2018-01-17T13:34:51.000Z | test/blackbox-tests/test-cases/dynamic-dependencies/read-macro-produces-dyn-deps.t | mndrix/dune | 30b84ff370351b13f05db34fd952dfe5d0249bcb | [
"MIT"
]
| 59 | 2016-12-02T13:58:19.000Z | 2018-01-06T18:23:02.000Z | Tests for dynamic dependencies computed from the `%{read:...}` family of macros
$ cat > dune-project <<EOF
> (lang dune 3.0)
> EOF
Define rules have dynamic file dependencies
$ cat > dune <<EOF
> (rule
> (target deps.d)
> (action
> (system "echo 'depA\ndepB' > %{target}")))
>
> (rule
> (target depA)
> (action
> (system "echo contentsA > %{target}")))
>
> (rule
> (target depB)
> (action
> (system "echo contentsB > %{target}")))
> EOF
Now we define a rule that reads `deps.d` to figure out what to build.
$ cat >> dune <<EOF
> (rule
> (target output)
> (deps %{read-lines:./deps.d})
> (action
> (progn
> (system "cat %{deps}")
> (system "echo %{deps} > %{target}"))))
> EOF
Building `./output` should now produce a file with contents "depA depB"
$ dune build ./output --display=short
sh deps.d
sh depA
sh depB
sh output
contentsA
contentsB
sh output
$ cat ./_build/default/output
depA depB
| 20.607843 | 79 | 0.556613 |
ed1db08f9f759039061438ebdb5c5b9cbcac9fe5 | 1,305 | al | Perl | benchmark/benchmarks/FASP-benchmarks/data/small-world-2/smallworld2-0189-100-90-359.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/small-world-2/smallworld2-0189-100-90-359.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/small-world-2/smallworld2-0189-100-90-359.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | 1 12 21 38 76
2 1 10 55 78
3 12 66 69 75 78
4 20 46
5 45 70
6 12 18 29 34 52 53 54
7 25 26 53 57 80
8 15 60
9 44 50 82
10 44 60 68 80
11 66
12 9 32 36 68 69
13 51 52 58 64 76
14 20 23 52 56 68
15 2 35 48 73
16 14 28 56 62
17 24 36 51 60 62
18 15 21 53 55 55 65 87
19 2 23 24 44 64 66 68 76
20 8 17 40 51
21 26 27 70 72 86 90
22 1 15 23 26 44 50 55 62 76 84
23 3 56 70 70 73 78 82
24 21 60
25 82
26 20 78
27 9 16 31
28 29 31 47 60 75 89
29 14 20 59
30 32 51 79 87
31 17 44 80
32 58
33 11 19 74 82
34 14 28 45 55
35 13 28 48 52 68
36 11 50 57 67 71 83 87 89
37 15 61
38 11 43 65 66 73 81
39 5 31 66 72
40 12
41 62 70 84
42 13 85
43 2 19 28 40 71
44 11 11 45 56
45 15 43 64 68 76
46 13 20 21 24 47 59
47 21 33 65
48 1 17 36 66 77
49 16 29 39 64 84
50 5 14 19 39 47 64
51 52 66 82 85
52 16 21 57
53 8 14 21 29 45 47 78
54 28 39 68 87
55 4 57 87 87
56 47
57 5 20 45 50 79
58 32 49 60 74 83
59 46 50 80 89
60 16 37 42 59 77
61 30
62 38 67
63 16 79 83 88
64 71
65 3 14 77
67 6 69 90
68 15
69 12 36 52 82
70 8 23 29 32 69
71 8 17 69 82
72 52 54 55 57
73 59 68 86 89 90 90
74 49 63 83
75 7 22 54 54
76 11 61 66
77 2 11 75
78 6 6 10 13 20 29 31 32 59
79 20 29 31 33 39
80 4 35 35 37 66 87
81 12 54 72
82 69
83 7 74 76
84 19 23 62 81
85 4 8 19 29 53 78
86 80
87 16 62
88 73 78 80 81 83
89 14 17 35 48 88
90 4 9 54 69 | 14.662921 | 31 | 0.657471 |
ed2bfef0606fe3e4d2b74a2a1690ff4429a99b8b | 5,887 | t | Perl | swig/perl/t/03.t | tjcorona/gdal-svn | bdaf4b0d16a0803548e68ee8617b796782208743 | [
"BSD-3-Clause"
]
| null | null | null | swig/perl/t/03.t | tjcorona/gdal-svn | bdaf4b0d16a0803548e68ee8617b796782208743 | [
"BSD-3-Clause"
]
| null | null | null | swig/perl/t/03.t | tjcorona/gdal-svn | bdaf4b0d16a0803548e68ee8617b796782208743 | [
"BSD-3-Clause"
]
| null | null | null | use strict;
use Scalar::Util 'blessed';
use Test::More qw(no_plan);
BEGIN { use_ok('Geo::GDAL') };
# Geo::GDAL::Band
my $dataset = Geo::GDAL::Driver('GTiff')->Create(Name => '/vsimem/test.gtiff', Width => 4, Height => 6);
my $band = $dataset->Band;
$band->CategoryNames('a','b');
my @list = $band->CategoryNames;
ok(($list[0] eq 'a' and $list[1] eq 'b'), "CategoryNames");
@list = $band->GetBlockSize;
ok(($list[0] == 4 and $list[1] == 6), "GetBlockSize");
@list = $band->Size;
ok(($list[0] == 4 and $list[1] == 6), "Size");
my $ds = $band->GetDataset;
ok((defined($ds) and blessed($ds) and $ds->isa('Geo::GDAL::Dataset')), "GetDataset");
$band->Unit('metri');
$ds = $band->Unit();
ok($ds eq 'metri', "Unit");
$band->ScaleAndOffset(0.1, 5);
@list = $band->ScaleAndOffset();
ok(($list[0] == 0.1 and $list[1] == 5), "ScaleAndOffset");
my $nr = $band->GetBandNumber;
ok($nr == 1, "GetBandNumber");
my $rat = Geo::GDAL::RasterAttributeTable->new;
$band->AttributeTable($rat);
$rat = $band->AttributeTable();
ok((defined($rat) and blessed($rat) and $rat->isa('Geo::GDAL::RasterAttributeTable')), "RasterAttributeTable");
my $c = $band->ColorInterpretation;
my %c = map {$_=>1} Geo::GDAL::Band::ColorInterpretations;
ok($c{$c}, "Get ColorInterpretation");
$c = (keys %c)[0];
$band->ColorInterpretation($c);
ok($band->ColorInterpretation eq $c, "Set ColorInterpretation");
@list = $band->Domains;
ok(@list > 1, "Domains");
$c = Geo::GDAL::ColorTable->new;
$c->ColorEntry(0, 100, 50, 150, 300);
@list = $c->ColorTable;
ok($list[0][0] == 100, "Colortable");
$band->SetColorTable($c);
$c = $band->GetColorTable();
ok((defined($c) and blessed($c) and $c->isa('Geo::GDAL::ColorTable')), "Get ColorTable");
@list = $c->ColorTable;
ok($list[0][0] == 100, "Set and Get Colortable");
$dataset = Geo::GDAL::Driver('MEM')->Create(Width => 4, Height => 4);
$dataset->AddBand('Int32');
$band = $dataset->Band(2);
$band-> Fill(123);
my $data = $band->ReadTile;
ok($data->[0][0] == 123, "Fill with integer");
for my $row (@$data) {
# print "@$row\n";
}
$dataset->AddBand('Float64');
$band = $dataset->Band(3);
$band-> Fill(123.45);
$data = $band->ReadTile;
ok($data->[0][0] == 123.45, "Fill with real");
for my $row (@$data) {
# print "@$row\n";
}
#$dataset->AddBand('CFloat64');
#$band = $dataset->Band(4);
#$band->Fill(123.45, 10);
#$data = $band->ReadTile;
#for my $row (@$data) {
# print "@$row\n";
#}
#use Statistics::Descriptive;
#my $stat = Statistics::Descriptive::Full->new();
$band = $dataset->Band(3);
for my $y (0..3) {
for my $x (0..3) {
$data->[$y][$x] = rand 10;
#$stat->add_data($data->[$y][$x]);
}
}
$band->WriteTile($data);
for my $row (@$data) {
#print "@$row\n";
}
my $x;
my ($min, $max, $mean, $stddev);
#print $stat->mean()," ",$stat->standard_deviation(),"\n";
@list = $band->ComputeRasterMinMax;
ok(@list == 2, "ComputeRasterMinMax");
$x = $band->GetMinimum;
ok(!defined($x), "GetMinimum");
@list = $band->GetMinimum;
ok(@list == 2, "GetMinimum");
$x = $band->GetMaximum;
ok(!defined($x), "GetMaximum");
@list = $band->GetMaximum;
ok(@list == 2, "GetMaximum");
@list = $band->ComputeBandStats;
ok(@list == 2, "ComputeBandStats");
$band->ComputeStatistics(1);
$x = $band->GetMaximum;
ok(defined($x), "GetMaximum");
@list = $band->GetStatistics(1,0);
ok(@list == 4, "GetStatistics");
$band->SetStatistics(0, 1, 2, 3);
@list = $band->GetStatistics(0,0);
ok($list[3] == 3, "SetStatistics");
@list = $band->ComputeBandStats;
ok(@list == 2, "ComputeBandStats");
my $foo;
my $n = 0;
@list = $band->ComputeStatistics(0, sub {$foo = $_[2] unless $foo; $n++; 1}, 'foo');
ok(@list == 4, "ComputeStatistics");
ok(($n > 0 and $foo eq 'foo'), "ComputeStatistics callback");
Geo::GDAL::VSIF::Unlink('/vsimem/test.gtiff');
$dataset = Geo::GDAL::Driver('GTiff')->Create(Name => '/vsimem/test.gtiff', Width => 4, Height => 6);
$band = $dataset->Band;
$c = $band->Checksum;
ok($c == 0, "Checksum");
$c = $band->NoDataValue;
ok(!defined($c), "Get NoDataValue");
$band->NoDataValue(10);
$c = $band->NoDataValue;
ok($c == 10, "Set NoDataValue");
# set one pixel no data
$data = $band->ReadTile;
$data->[2][2] = 10;
$band->WriteTile($data);
my @f = $band->MaskFlags;
ok(@f > 0, "MaskFlags");
@f = $band->GetMaskFlags;
ok($f[0] eq 'NoData', "GetMaskFlags");
# fill the one pixel
$band->FillNodata();
$data = $band->ReadTile;
ok($data->[2][2] == 0, "FillNodata, got $data->[2][2]");
$band->CreateMaskBand('PerDataset');
@f = $band->GetMaskFlags;
ok($f[0] eq 'PerDataset', "CreateMaskBand");
#@list = Geo::GDAL::VSIF::ReadDir('/vsimem/');
#print "files @list\n"; # includes .msk
# $m is not valid here any more, how to test?
Geo::GDAL::VSIF::Unlink('/vsimem/test.gtiff');
$dataset = Geo::GDAL::Driver('GTiff')->Create(Name => '/vsimem/test.gtiff', Bands => 2);
$dataset->BuildOverviews('average', [2,4]);
my $band1 = $dataset->Band(1);
my $band2 = $dataset->Band(2);
$band1->RegenerateOverviews([$band2]); #scalar resampling, subref callback, scalar callback_data
$band1->RegenerateOverview($band2); #scalar resampling, subref callback, scalar callback_data
my $c = $band1->GetOverviewCount;
ok($c == 2, "GetOverviewCount, got $c");
my $o = $band1->GetOverview(1);
ok(defined($o), "GetOverview");
my $b = $band1->HasArbitraryOverviews;
ok(!$b, "HasArbitraryOverviews");
__END__
public Geo::OGR::Layer Contours (scalar DataSource, hashref LayerConstructor, scalar ContourInterval, scalar ContourBase, arrayref FixedLevels, scalar NoDataValue, scalar IDField, scalar ElevField, subref callback, scalar callback_data)
public list SetDefaultHistogram (scalar min, scalar max, scalar histogram)
public list GetDefaultHistogram (scalar force=1, subref callback=undef, scalar callback_data=undef)
public list GetHistogram (hash parameters)
public scalar ReadRaster (hash params)
public method WriteRaster (hash params)
| 28.439614 | 237 | 0.638356 |
73fe4d461dc1ba99a4cac0817c9f226ceaeeba89 | 221 | pm | Perl | lib/BP/Loader/Mapper/Autoload/DocumentationGenerator.pm | inab/BP-Schema-tools | 40d28957a436b3637051aea0936b45476e58f4e2 | [
"Apache-2.0"
]
| null | null | null | lib/BP/Loader/Mapper/Autoload/DocumentationGenerator.pm | inab/BP-Schema-tools | 40d28957a436b3637051aea0936b45476e58f4e2 | [
"Apache-2.0"
]
| null | null | null | lib/BP/Loader/Mapper/Autoload/DocumentationGenerator.pm | inab/BP-Schema-tools | 40d28957a436b3637051aea0936b45476e58f4e2 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/perl -w
use strict;
use BP::Loader::Mapper;
package BP::Loader::Mapper::DocumentationGenerator;
our $SECTION;
BEGIN {
$SECTION = 'gendoc';
$BP::Loader::Mapper::storage_names{$SECTION}=__PACKAGE__;
};
1;
| 13.8125 | 58 | 0.696833 |
ed1cc6a9e8039342bb9632e4c04dbf00d4cffac9 | 2,071 | t | Perl | Moose-t-failing/060_compat/003_foreign_inheritence.t | gluesys/p5-Mouse | b3805f0444a98a4c746e427ebf668a8189ca0f3c | [
"Artistic-1.0"
]
| 21 | 2015-05-13T04:45:53.000Z | 2019-07-25T09:43:23.000Z | Moose-t-failing/060_compat/003_foreign_inheritence.t | gluesys/p5-Mouse | b3805f0444a98a4c746e427ebf668a8189ca0f3c | [
"Artistic-1.0"
]
| 48 | 2015-01-19T11:01:58.000Z | 2019-08-13T09:48:13.000Z | Moose-t-failing/060_compat/003_foreign_inheritence.t | gluesys/p5-Mouse | b3805f0444a98a4c746e427ebf668a8189ca0f3c | [
"Artistic-1.0"
]
| 20 | 2015-03-02T04:21:52.000Z | 2019-08-14T03:02:00.000Z | #!/usr/bin/perl
# This is automatically generated by author/import-moose-test.pl.
# DO NOT EDIT THIS FILE. ANY CHANGES WILL BE LOST!!!
use t::lib::MooseCompat;
use strict;
use warnings;
use Test::More;
$TODO = q{Mouse is not yet completed};
use Test::Exception;
{
package Elk;
use strict;
use warnings;
sub new {
my $class = shift;
bless { no_moose => "Elk" } => $class;
}
sub no_moose { $_[0]->{no_moose} }
package Foo::Mouse;
use Mouse;
extends 'Elk';
has 'moose' => ( is => 'ro', default => 'Foo' );
sub new {
my $class = shift;
my $super = $class->SUPER::new(@_);
return $class->meta->new_object( '__INSTANCE__' => $super, @_ );
}
__PACKAGE__->meta->make_immutable( inline_constructor => 0, debug => 0 );
package Bucket;
use metaclass 'Mouse::Meta::Class';
__PACKAGE__->meta->add_attribute(
'squeegee' => ( accessor => 'squeegee' ) );
package Old::Bucket::Nose;
# see http://www.moosefoundation.org/moose_facts.htm
use Mouse;
extends 'Bucket';
package MyBase;
sub foo { }
package Custom::Meta1;
use base qw(Mouse::Meta::Class);
package Custom::Meta2;
use base qw(Mouse::Meta::Class);
package SubClass1;
use metaclass 'Custom::Meta1';
use Mouse;
extends 'MyBase';
package SubClass2;
use metaclass 'Custom::Meta2';
use Mouse;
# XXX FIXME subclassing meta-attrs and immutable-ing the subclass fails
}
my $foo_moose = Foo::Mouse->new();
isa_ok( $foo_moose, 'Foo::Mouse' );
isa_ok( $foo_moose, 'Elk' );
is( $foo_moose->no_moose, 'Elk',
'... got the right value from the Elk method' );
is( $foo_moose->moose, 'Foo',
'... got the right value from the Foo::Mouse method' );
lives_ok {
Old::Bucket::Nose->meta->make_immutable( debug => 0 );
}
'Immutability on Mouse class extending Mouse::Meta class ok';
lives_ok {
SubClass2->meta->superclasses('MyBase');
}
'Can subclass the same non-Mouse class twice with different metaclasses';
done_testing;
| 21.350515 | 77 | 0.623853 |
73e7fd82152e41b14387466368ce1e9625669ed2 | 338 | pm | Perl | pdu-perl-api/Raritan/RPC/auth/Type.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
]
| 1 | 2021-04-29T23:04:17.000Z | 2021-04-29T23:04:17.000Z | pdu-perl-api/Raritan/RPC/auth/Type.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
]
| null | null | null | pdu-perl-api/Raritan/RPC/auth/Type.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
]
| 2 | 2020-06-20T16:21:23.000Z | 2021-09-28T19:04:44.000Z | # SPDX-License-Identifier: BSD-3-Clause
#
# Copyright 2020 Raritan Inc. All rights reserved.
#
# This file was generated by IdlC from AuthManager.idl.
use strict;
package Raritan::RPC::auth::Type;
use constant LOCAL => 0;
use constant RADIUS => 1;
use constant KERBEROS => 2;
use constant TACACS_PLUS => 3;
use constant LDAP => 4;
1;
| 18.777778 | 55 | 0.721893 |
ed0fc444b168120ec4828c85f949b9c2a8847b0e | 3,558 | pl | Perl | LPKviz/bin/Debug/swipl/xpce/prolog/lib/doc/url_fetch.pl | mmacinkov/Quiz | 53443f83cc599e494cf2883001c60d9f3e926e8b | [
"MIT"
]
| 1 | 2021-02-12T12:33:01.000Z | 2021-02-12T12:33:01.000Z | LPKviz/bin/x64/Debug/swipl/xpce/prolog/lib/doc/url_fetch.pl | mmacinkov/Quiz | 53443f83cc599e494cf2883001c60d9f3e926e8b | [
"MIT"
]
| 1 | 2021-02-12T13:11:42.000Z | 2021-02-12T13:11:42.000Z | LPKviz/bin/x64/Debug/swipl/xpce/prolog/lib/doc/url_fetch.pl | mmacinkov/Quiz | 53443f83cc599e494cf2883001c60d9f3e926e8b | [
"MIT"
]
| null | null | null | /* $Id$
Part of XPCE --- The SWI-Prolog GUI toolkit
Author: Jan Wielemaker and Anjo Anjewierden
E-mail: jan@swi.psy.uva.nl
WWW: http://www.swi.psy.uva.nl/projects/xpce/
Copyright (C): 1985-2002, University of Amsterdam
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
As a special exception, if you link this library with other files,
compiled with a Free Software compiler, to produce an executable, this
library does not by itself cause the resulting executable to be covered
by the GNU General Public License. This exception does not however
invalidate any other reasons why the executable file might be covered by
the GNU General Public License.
*/
:- module(url_fetch,
[ get_url_to_file/2, % +URL, -File
get_url_to_file/3 % +Base, +Url, -File
]).
:- use_module(library(url)).
:- use_module(library(http_client)).
:- use_module(util).
get_url_to_file(URL, File) :-
parse_url(URL, Parts),
get_data(Parts, File), !.
get_url_to_file(URL, _) :-
print_message(warning, url_load_failed(URL)),
fail.
get_url_to_file(Base, URL, File) :-
parse_url(Base, URL, Parts),
get_data(Parts, File), !.
get_url_to_file(Base, URL, _) :-
print_message(warning, url_load_failed(Base, URL)),
fail.
/*******************************
* FETCH DATA *
*******************************/
get_data(Parts, File) :- % file
memberchk(protocol(file), Parts), !,
memberchk(path(File), Parts).
get_data(Parts, File) :- % HTTP
memberchk(protocol(http), Parts), !,
cache_file(Parts, File),
new(F, file(File)),
send(F, kind, binary),
send(F, open, write),
new(Client, http_client(Parts)),
send(Client, fetch_data, F),
send(F, close),
free(Client).
/*******************************
* PAGE CACHE *
*******************************/
cache_dir(Dir) :-
current_prolog_flag(unix, true), !,
expand_file_name('~/.http_cache', [Dir]).
cache_dir(cache).
cache_file(ParsedURL, File) :-
cache_dir(Dir),
option(protocol(Protocol), ParsedURL),
option(host(Host), ParsedURL),
option(port(Port), ParsedURL, 80),
option(path(Path0), ParsedURL),
( sub_atom(Path0, _, _, 0, /)
-> atom_concat(Path0, '.index', Path)
; Path = Path0
),
sformat(S, '~w/~w/~w:~w~w', [Dir, Protocol, Host, Port, Path]),
string_to_atom(S, File),
ensure_dir_for_file(File),
debug(cache, 'Cache file is \'~w\'~n', File).
ensure_dir_for_file(File) :-
file_directory_name(File, Dir),
ensure_dir(Dir).
ensure_dir(Dir) :-
exists_directory(Dir), !.
ensure_dir(Dir) :-
file_directory_name(Dir, Parent),
ensure_dir(Parent),
send(directory(Dir), make). % should be Prolog
:- multifile
prolog:message/3.
prolog:message(url_load_failed(Base, URL)) -->
[ 'Failed to get data from ~p (base=~p)'-[URL,Base] ].
prolog:message(url_load_failed(URL)) -->
[ 'Failed to get data from ~p'-[URL] ].
| 30.672414 | 82 | 0.664699 |
73dd37b17445dac90a53af54c9ccf6a43d61ef65 | 3,146 | pl | Perl | ext/CURT/bb0/startClientCurt.pl | TeamSPoon/logicmoo_nlu | 5c3e5013a3048da7d68a8a43476ad84d3ea4bb47 | [
"MIT"
]
| 6 | 2020-01-27T12:08:02.000Z | 2020-02-28T19:30:28.000Z | pack/logicmoo_nlu/prolog/CURT/bb0/startClientCurt.pl | logicmoo/old_logicmoo_workspace | 44025b6e389e2f2f7d86b46c1301cab0604bba26 | [
"MIT"
]
| 1 | 2020-02-02T13:12:34.000Z | 2020-02-02T13:12:34.000Z | pack/logicmoo_nlu/prolog/CURT/bb0/startClientCurt.pl | logicmoo/old_logicmoo_workspace | 44025b6e389e2f2f7d86b46c1301cab0604bba26 | [
"MIT"
]
| null | null | null | /*************************************************************************
name: startClientCurt.pl (Volume 1, Chapter 6)
version: April 27, 2001
description: Wrapper that starts all clients and servers for clientCurt
author: Patrick Blackburn & Johan Bos
*************************************************************************/
:- use_module(library(system),[exec/3,kill/2,sleep/1]).
:- use_module(library('linda/client')).
:- dynamic startedpid/1.
/*========================================================================
Start Module
========================================================================*/
startModule(E,Sleep):-
exec(E,[std,std,std],Pid1),
assert(startedpid(Pid1)),
in(pid(Pid2)),
assert(startedpid(Pid2)),
sleep(Sleep).
/*========================================================================
Init Linda Client
========================================================================*/
init_client:-
open('/tmp/curtHostPort',read,Stream),
read(Stream,host_port(Host,Port)),
close(Stream),
linda_client(Host:Port).
/*========================================================================
Print Proces IDs
========================================================================*/
printPIDs:-
startedpid(Pid),
format('~nProces ID: ~p',[Pid]),
fail.
printPIDs.
/*========================================================================
Main Predicate
========================================================================*/
start:-
exec('sicstus -l curtServer.pl &',[std,std,std],Pid1),
sleep(1),
assert(startedpid(Pid1)),
open('/tmp/curtServerPid',read,Stream),
read(Stream,pid(Pid2)),
close(Stream),
assert(startedpid(Pid2)),
init_client,
startModule('xterm -T CURT -sb -e sicstus -l clientCurt.pl &',1),
startModule('xterm -T MACE -sb -e sicstus -l clientMace.pl &',1),
startModule('xterm -T BLIKSEM -sb -e sicstus -l clientBliksem.pl &',1),
startModule('xterm -T OTTER -sb -e sicstus -l clientOtter.pl &',1),
printPIDs,
nl, write('READY!').
/*========================================================================
Kill Processes
========================================================================*/
kill:-
retract(startedpid(Pid)),
format('~nKilling process ~p.',[Pid]),
on_exception(_,
kill(Pid,1),
format('~nNot able to kill process Pid ~p.',[Pid])
),
kill.
kill:-
close_client.
/*========================================================================
Info
========================================================================*/
info:-
format('~n * CURT (LINDA VERSION), by Patrick Blackburn & Johan Bos *',[]),
format('~n * Type "start." to launch Linda server and all clients *',[]),
format('~n * Type "kill." to kill the server and clients. *~n~n',[]).
/*========================================================================
Print info when starting
========================================================================*/
:- info.
| 28.6 | 82 | 0.382708 |
73ec01c5a388bd30604e49ec729c361a81108abb | 14,230 | pl | Perl | perl/lib/unicore/To/Sc.pl | JyothsnaMididoddi26/xampp | 8f34d7fa7c2e6cc37fe4ece5e6886dc4e5c0757b | [
"Apache-2.0"
]
| 1 | 2017-01-31T08:49:16.000Z | 2017-01-31T08:49:16.000Z | xampp/perl/lib/unicore/To/Sc.pl | silent88/Biographies-du-Fontenay | af4567cb6b78003daa72c37b5ac9f5611a360a9f | [
"MIT"
]
| 2 | 2020-07-17T00:13:41.000Z | 2021-05-08T17:01:54.000Z | perl/lib/unicore/To/Sc.pl | Zolhyp/Plan | 05dbf6a650cd54f855d1731dee70098c5c587339 | [
"Apache-2.0"
]
| null | null | null | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
# The name this swash is to be known by, with the format of the mappings in
# the main body of the table, and what all code points missing from this file
# map to.
$utf8::SwashInfo{'ToSc'}{'format'} = 's'; # string
$utf8::SwashInfo{'ToSc'}{'missing'} = 'Unknown';
return <<'END';
0000 0040 Common
0041 005A Latin
005B 0060 Common
0061 007A Latin
007B 00A9 Common
00AA Latin
00AB 00B9 Common
00BA Latin
00BB 00BF Common
00C0 00D6 Latin
00D7 Common
00D8 00F6 Latin
00F7 Common
00F8 02B8 Latin
02B9 02DF Common
02E0 02E4 Latin
02E5 02E9 Common
02EA 02EB Bopomofo
02EC 02FF Common
0300 036F Inherited
0370 0373 Greek
0374 Common
0375 0377 Greek
037A 037D Greek
037E Common
0384 Greek
0385 Common
0386 Greek
0387 Common
0388 038A Greek
038C Greek
038E 03A1 Greek
03A3 03E1 Greek
03E2 03EF Coptic
03F0 03FF Greek
0400 0484 Cyrillic
0485 0486 Inherited
0487 0527 Cyrillic
0531 0556 Armenian
0559 055F Armenian
0561 0587 Armenian
0589 Common
058A Armenian
058F Armenian
0591 05C7 Hebrew
05D0 05EA Hebrew
05F0 05F4 Hebrew
0600 0604 Arabic
0606 060B Arabic
060C Common
060D 061A Arabic
061B Common
061E Arabic
061F Common
0620 063F Arabic
0640 Common
0641 064A Arabic
064B 0655 Inherited
0656 065E Arabic
065F Inherited
0660 0669 Common
066A 066F Arabic
0670 Inherited
0671 06DC Arabic
06DD Common
06DE 06FF Arabic
0700 070D Syriac
070F 074A Syriac
074D 074F Syriac
0750 077F Arabic
0780 07B1 Thaana
07C0 07FA Nko
0800 082D Samaritan
0830 083E Samaritan
0840 085B Mandaic
085E Mandaic
08A0 Arabic
08A2 08AC Arabic
08E4 08FE Arabic
0900 0950 Devanagari
0951 0952 Inherited
0953 0963 Devanagari
0964 0965 Common
0966 0977 Devanagari
0979 097F Devanagari
0981 0983 Bengali
0985 098C Bengali
098F 0990 Bengali
0993 09A8 Bengali
09AA 09B0 Bengali
09B2 Bengali
09B6 09B9 Bengali
09BC 09C4 Bengali
09C7 09C8 Bengali
09CB 09CE Bengali
09D7 Bengali
09DC 09DD Bengali
09DF 09E3 Bengali
09E6 09FB Bengali
0A01 0A03 Gurmukhi
0A05 0A0A Gurmukhi
0A0F 0A10 Gurmukhi
0A13 0A28 Gurmukhi
0A2A 0A30 Gurmukhi
0A32 0A33 Gurmukhi
0A35 0A36 Gurmukhi
0A38 0A39 Gurmukhi
0A3C Gurmukhi
0A3E 0A42 Gurmukhi
0A47 0A48 Gurmukhi
0A4B 0A4D Gurmukhi
0A51 Gurmukhi
0A59 0A5C Gurmukhi
0A5E Gurmukhi
0A66 0A75 Gurmukhi
0A81 0A83 Gujarati
0A85 0A8D Gujarati
0A8F 0A91 Gujarati
0A93 0AA8 Gujarati
0AAA 0AB0 Gujarati
0AB2 0AB3 Gujarati
0AB5 0AB9 Gujarati
0ABC 0AC5 Gujarati
0AC7 0AC9 Gujarati
0ACB 0ACD Gujarati
0AD0 Gujarati
0AE0 0AE3 Gujarati
0AE6 0AF1 Gujarati
0B01 0B03 Oriya
0B05 0B0C Oriya
0B0F 0B10 Oriya
0B13 0B28 Oriya
0B2A 0B30 Oriya
0B32 0B33 Oriya
0B35 0B39 Oriya
0B3C 0B44 Oriya
0B47 0B48 Oriya
0B4B 0B4D Oriya
0B56 0B57 Oriya
0B5C 0B5D Oriya
0B5F 0B63 Oriya
0B66 0B77 Oriya
0B82 0B83 Tamil
0B85 0B8A Tamil
0B8E 0B90 Tamil
0B92 0B95 Tamil
0B99 0B9A Tamil
0B9C Tamil
0B9E 0B9F Tamil
0BA3 0BA4 Tamil
0BA8 0BAA Tamil
0BAE 0BB9 Tamil
0BBE 0BC2 Tamil
0BC6 0BC8 Tamil
0BCA 0BCD Tamil
0BD0 Tamil
0BD7 Tamil
0BE6 0BFA Tamil
0C01 0C03 Telugu
0C05 0C0C Telugu
0C0E 0C10 Telugu
0C12 0C28 Telugu
0C2A 0C33 Telugu
0C35 0C39 Telugu
0C3D 0C44 Telugu
0C46 0C48 Telugu
0C4A 0C4D Telugu
0C55 0C56 Telugu
0C58 0C59 Telugu
0C60 0C63 Telugu
0C66 0C6F Telugu
0C78 0C7F Telugu
0C82 0C83 Kannada
0C85 0C8C Kannada
0C8E 0C90 Kannada
0C92 0CA8 Kannada
0CAA 0CB3 Kannada
0CB5 0CB9 Kannada
0CBC 0CC4 Kannada
0CC6 0CC8 Kannada
0CCA 0CCD Kannada
0CD5 0CD6 Kannada
0CDE Kannada
0CE0 0CE3 Kannada
0CE6 0CEF Kannada
0CF1 0CF2 Kannada
0D02 0D03 Malayalam
0D05 0D0C Malayalam
0D0E 0D10 Malayalam
0D12 0D3A Malayalam
0D3D 0D44 Malayalam
0D46 0D48 Malayalam
0D4A 0D4E Malayalam
0D57 Malayalam
0D60 0D63 Malayalam
0D66 0D75 Malayalam
0D79 0D7F Malayalam
0D82 0D83 Sinhala
0D85 0D96 Sinhala
0D9A 0DB1 Sinhala
0DB3 0DBB Sinhala
0DBD Sinhala
0DC0 0DC6 Sinhala
0DCA Sinhala
0DCF 0DD4 Sinhala
0DD6 Sinhala
0DD8 0DDF Sinhala
0DF2 0DF4 Sinhala
0E01 0E3A Thai
0E3F Common
0E40 0E5B Thai
0E81 0E82 Lao
0E84 Lao
0E87 0E88 Lao
0E8A Lao
0E8D Lao
0E94 0E97 Lao
0E99 0E9F Lao
0EA1 0EA3 Lao
0EA5 Lao
0EA7 Lao
0EAA 0EAB Lao
0EAD 0EB9 Lao
0EBB 0EBD Lao
0EC0 0EC4 Lao
0EC6 Lao
0EC8 0ECD Lao
0ED0 0ED9 Lao
0EDC 0EDF Lao
0F00 0F47 Tibetan
0F49 0F6C Tibetan
0F71 0F97 Tibetan
0F99 0FBC Tibetan
0FBE 0FCC Tibetan
0FCE 0FD4 Tibetan
0FD5 0FD8 Common
0FD9 0FDA Tibetan
1000 109F Myanmar
10A0 10C5 Georgian
10C7 Georgian
10CD Georgian
10D0 10FA Georgian
10FB Common
10FC 10FF Georgian
1100 11FF Hangul
1200 1248 Ethiopic
124A 124D Ethiopic
1250 1256 Ethiopic
1258 Ethiopic
125A 125D Ethiopic
1260 1288 Ethiopic
128A 128D Ethiopic
1290 12B0 Ethiopic
12B2 12B5 Ethiopic
12B8 12BE Ethiopic
12C0 Ethiopic
12C2 12C5 Ethiopic
12C8 12D6 Ethiopic
12D8 1310 Ethiopic
1312 1315 Ethiopic
1318 135A Ethiopic
135D 137C Ethiopic
1380 1399 Ethiopic
13A0 13F4 Cherokee
1400 167F Canadian_Aboriginal
1680 169C Ogham
16A0 16EA Runic
16EB 16ED Common
16EE 16F0 Runic
1700 170C Tagalog
170E 1714 Tagalog
1720 1734 Hanunoo
1735 1736 Common
1740 1753 Buhid
1760 176C Tagbanwa
176E 1770 Tagbanwa
1772 1773 Tagbanwa
1780 17DD Khmer
17E0 17E9 Khmer
17F0 17F9 Khmer
1800 1801 Mongolian
1802 1803 Common
1804 Mongolian
1805 Common
1806 180E Mongolian
1810 1819 Mongolian
1820 1877 Mongolian
1880 18AA Mongolian
18B0 18F5 Canadian_Aboriginal
1900 191C Limbu
1920 192B Limbu
1930 193B Limbu
1940 Limbu
1944 194F Limbu
1950 196D Tai_Le
1970 1974 Tai_Le
1980 19AB New_Tai_Lue
19B0 19C9 New_Tai_Lue
19D0 19DA New_Tai_Lue
19DE 19DF New_Tai_Lue
19E0 19FF Khmer
1A00 1A1B Buginese
1A1E 1A1F Buginese
1A20 1A5E Tai_Tham
1A60 1A7C Tai_Tham
1A7F 1A89 Tai_Tham
1A90 1A99 Tai_Tham
1AA0 1AAD Tai_Tham
1B00 1B4B Balinese
1B50 1B7C Balinese
1B80 1BBF Sundanese
1BC0 1BF3 Batak
1BFC 1BFF Batak
1C00 1C37 Lepcha
1C3B 1C49 Lepcha
1C4D 1C4F Lepcha
1C50 1C7F Ol_Chiki
1CC0 1CC7 Sundanese
1CD0 1CD2 Inherited
1CD3 Common
1CD4 1CE0 Inherited
1CE1 Common
1CE2 1CE8 Inherited
1CE9 1CEC Common
1CED Inherited
1CEE 1CF3 Common
1CF4 Inherited
1CF5 1CF6 Common
1D00 1D25 Latin
1D26 1D2A Greek
1D2B Cyrillic
1D2C 1D5C Latin
1D5D 1D61 Greek
1D62 1D65 Latin
1D66 1D6A Greek
1D6B 1D77 Latin
1D78 Cyrillic
1D79 1DBE Latin
1DBF Greek
1DC0 1DE6 Inherited
1DFC 1DFF Inherited
1E00 1EFF Latin
1F00 1F15 Greek
1F18 1F1D Greek
1F20 1F45 Greek
1F48 1F4D Greek
1F50 1F57 Greek
1F59 Greek
1F5B Greek
1F5D Greek
1F5F 1F7D Greek
1F80 1FB4 Greek
1FB6 1FC4 Greek
1FC6 1FD3 Greek
1FD6 1FDB Greek
1FDD 1FEF Greek
1FF2 1FF4 Greek
1FF6 1FFE Greek
2000 200B Common
200C 200D Inherited
200E 2064 Common
206A 2070 Common
2071 Latin
2074 207E Common
207F Latin
2080 208E Common
2090 209C Latin
20A0 20B9 Common
20D0 20F0 Inherited
2100 2125 Common
2126 Greek
2127 2129 Common
212A 212B Latin
212C 2131 Common
2132 Latin
2133 214D Common
214E Latin
214F 215F Common
2160 2188 Latin
2189 Common
2190 23F3 Common
2400 2426 Common
2440 244A Common
2460 26FF Common
2701 27FF Common
2800 28FF Braille
2900 2B4C Common
2B50 2B59 Common
2C00 2C2E Glagolitic
2C30 2C5E Glagolitic
2C60 2C7F Latin
2C80 2CF3 Coptic
2CF9 2CFF Coptic
2D00 2D25 Georgian
2D27 Georgian
2D2D Georgian
2D30 2D67 Tifinagh
2D6F 2D70 Tifinagh
2D7F Tifinagh
2D80 2D96 Ethiopic
2DA0 2DA6 Ethiopic
2DA8 2DAE Ethiopic
2DB0 2DB6 Ethiopic
2DB8 2DBE Ethiopic
2DC0 2DC6 Ethiopic
2DC8 2DCE Ethiopic
2DD0 2DD6 Ethiopic
2DD8 2DDE Ethiopic
2DE0 2DFF Cyrillic
2E00 2E3B Common
2E80 2E99 Han
2E9B 2EF3 Han
2F00 2FD5 Han
2FF0 2FFB Common
3000 3004 Common
3005 Han
3006 Common
3007 Han
3008 3020 Common
3021 3029 Han
302A 302D Inherited
302E 302F Hangul
3030 3037 Common
3038 303B Han
303C 303F Common
3041 3096 Hiragana
3099 309A Inherited
309B 309C Common
309D 309F Hiragana
30A0 Common
30A1 30FA Katakana
30FB 30FC Common
30FD 30FF Katakana
3105 312D Bopomofo
3131 318E Hangul
3190 319F Common
31A0 31BA Bopomofo
31C0 31E3 Common
31F0 31FF Katakana
3200 321E Hangul
3220 325F Common
3260 327E Hangul
327F 32CF Common
32D0 32FE Katakana
3300 3357 Katakana
3358 33FF Common
3400 4DB5 Han
4DC0 4DFF Common
4E00 9FCC Han
A000 A48C Yi
A490 A4C6 Yi
A4D0 A4FF Lisu
A500 A62B Vai
A640 A697 Cyrillic
A69F Cyrillic
A6A0 A6F7 Bamum
A700 A721 Common
A722 A787 Latin
A788 A78A Common
A78B A78E Latin
A790 A793 Latin
A7A0 A7AA Latin
A7F8 A7FF Latin
A800 A82B Syloti_Nagri
A830 A839 Common
A840 A877 Phags_Pa
A880 A8C4 Saurashtra
A8CE A8D9 Saurashtra
A8E0 A8FB Devanagari
A900 A92F Kayah_Li
A930 A953 Rejang
A95F Rejang
A960 A97C Hangul
A980 A9CD Javanese
A9CF A9D9 Javanese
A9DE A9DF Javanese
AA00 AA36 Cham
AA40 AA4D Cham
AA50 AA59 Cham
AA5C AA5F Cham
AA60 AA7B Myanmar
AA80 AAC2 Tai_Viet
AADB AADF Tai_Viet
AAE0 AAF6 Meetei_Mayek
AB01 AB06 Ethiopic
AB09 AB0E Ethiopic
AB11 AB16 Ethiopic
AB20 AB26 Ethiopic
AB28 AB2E Ethiopic
ABC0 ABED Meetei_Mayek
ABF0 ABF9 Meetei_Mayek
AC00 D7A3 Hangul
D7B0 D7C6 Hangul
D7CB D7FB Hangul
F900 FA6D Han
FA70 FAD9 Han
FB00 FB06 Latin
FB13 FB17 Armenian
FB1D FB36 Hebrew
FB38 FB3C Hebrew
FB3E Hebrew
FB40 FB41 Hebrew
FB43 FB44 Hebrew
FB46 FB4F Hebrew
FB50 FBC1 Arabic
FBD3 FD3D Arabic
FD3E FD3F Common
FD50 FD8F Arabic
FD92 FDC7 Arabic
FDF0 FDFC Arabic
FDFD Common
FE00 FE0F Inherited
FE10 FE19 Common
FE20 FE26 Inherited
FE30 FE52 Common
FE54 FE66 Common
FE68 FE6B Common
FE70 FE74 Arabic
FE76 FEFC Arabic
FEFF Common
FF01 FF20 Common
FF21 FF3A Latin
FF3B FF40 Common
FF41 FF5A Latin
FF5B FF65 Common
FF66 FF6F Katakana
FF70 Common
FF71 FF9D Katakana
FF9E FF9F Common
FFA0 FFBE Hangul
FFC2 FFC7 Hangul
FFCA FFCF Hangul
FFD2 FFD7 Hangul
FFDA FFDC Hangul
FFE0 FFE6 Common
FFE8 FFEE Common
FFF9 FFFD Common
10000 1000B Linear_B
1000D 10026 Linear_B
10028 1003A Linear_B
1003C 1003D Linear_B
1003F 1004D Linear_B
10050 1005D Linear_B
10080 100FA Linear_B
10100 10102 Common
10107 10133 Common
10137 1013F Common
10140 1018A Greek
10190 1019B Common
101D0 101FC Common
101FD Inherited
10280 1029C Lycian
102A0 102D0 Carian
10300 1031E Old_Italic
10320 10323 Old_Italic
10330 1034A Gothic
10380 1039D Ugaritic
1039F Ugaritic
103A0 103C3 Old_Persian
103C8 103D5 Old_Persian
10400 1044F Deseret
10450 1047F Shavian
10480 1049D Osmanya
104A0 104A9 Osmanya
10800 10805 Cypriot
10808 Cypriot
1080A 10835 Cypriot
10837 10838 Cypriot
1083C Cypriot
1083F Cypriot
10840 10855 Imperial_Aramaic
10857 1085F Imperial_Aramaic
10900 1091B Phoenician
1091F Phoenician
10920 10939 Lydian
1093F Lydian
10980 1099F Meroitic_Hieroglyphs
109A0 109B7 Meroitic_Cursive
109BE 109BF Meroitic_Cursive
10A00 10A03 Kharoshthi
10A05 10A06 Kharoshthi
10A0C 10A13 Kharoshthi
10A15 10A17 Kharoshthi
10A19 10A33 Kharoshthi
10A38 10A3A Kharoshthi
10A3F 10A47 Kharoshthi
10A50 10A58 Kharoshthi
10A60 10A7F Old_South_Arabian
10B00 10B35 Avestan
10B39 10B3F Avestan
10B40 10B55 Inscriptional_Parthian
10B58 10B5F Inscriptional_Parthian
10B60 10B72 Inscriptional_Pahlavi
10B78 10B7F Inscriptional_Pahlavi
10C00 10C48 Old_Turkic
10E60 10E7E Arabic
11000 1104D Brahmi
11052 1106F Brahmi
11080 110C1 Kaithi
110D0 110E8 Sora_Sompeng
110F0 110F9 Sora_Sompeng
11100 11134 Chakma
11136 11143 Chakma
11180 111C8 Sharada
111D0 111D9 Sharada
11680 116B7 Takri
116C0 116C9 Takri
12000 1236E Cuneiform
12400 12462 Cuneiform
12470 12473 Cuneiform
13000 1342E Egyptian_Hieroglyphs
16800 16A38 Bamum
16F00 16F44 Miao
16F50 16F7E Miao
16F8F 16F9F Miao
1B000 Katakana
1B001 Hiragana
1D000 1D0F5 Common
1D100 1D126 Common
1D129 1D166 Common
1D167 1D169 Inherited
1D16A 1D17A Common
1D17B 1D182 Inherited
1D183 1D184 Common
1D185 1D18B Inherited
1D18C 1D1A9 Common
1D1AA 1D1AD Inherited
1D1AE 1D1DD Common
1D200 1D245 Greek
1D300 1D356 Common
1D360 1D371 Common
1D400 1D454 Common
1D456 1D49C Common
1D49E 1D49F Common
1D4A2 Common
1D4A5 1D4A6 Common
1D4A9 1D4AC Common
1D4AE 1D4B9 Common
1D4BB Common
1D4BD 1D4C3 Common
1D4C5 1D505 Common
1D507 1D50A Common
1D50D 1D514 Common
1D516 1D51C Common
1D51E 1D539 Common
1D53B 1D53E Common
1D540 1D544 Common
1D546 Common
1D54A 1D550 Common
1D552 1D6A5 Common
1D6A8 1D7CB Common
1D7CE 1D7FF Common
1EE00 1EE03 Arabic
1EE05 1EE1F Arabic
1EE21 1EE22 Arabic
1EE24 Arabic
1EE27 Arabic
1EE29 1EE32 Arabic
1EE34 1EE37 Arabic
1EE39 Arabic
1EE3B Arabic
1EE42 Arabic
1EE47 Arabic
1EE49 Arabic
1EE4B Arabic
1EE4D 1EE4F Arabic
1EE51 1EE52 Arabic
1EE54 Arabic
1EE57 Arabic
1EE59 Arabic
1EE5B Arabic
1EE5D Arabic
1EE5F Arabic
1EE61 1EE62 Arabic
1EE64 Arabic
1EE67 1EE6A Arabic
1EE6C 1EE72 Arabic
1EE74 1EE77 Arabic
1EE79 1EE7C Arabic
1EE7E Arabic
1EE80 1EE89 Arabic
1EE8B 1EE9B Arabic
1EEA1 1EEA3 Arabic
1EEA5 1EEA9 Arabic
1EEAB 1EEBB Arabic
1EEF0 1EEF1 Arabic
1F000 1F02B Common
1F030 1F093 Common
1F0A0 1F0AE Common
1F0B1 1F0BE Common
1F0C1 1F0CF Common
1F0D1 1F0DF Common
1F100 1F10A Common
1F110 1F12E Common
1F130 1F16B Common
1F170 1F19A Common
1F1E6 1F1FF Common
1F200 Hiragana
1F201 1F202 Common
1F210 1F23A Common
1F240 1F248 Common
1F250 1F251 Common
1F300 1F320 Common
1F330 1F335 Common
1F337 1F37C Common
1F380 1F393 Common
1F3A0 1F3C4 Common
1F3C6 1F3CA Common
1F3E0 1F3F0 Common
1F400 1F43E Common
1F440 Common
1F442 1F4F7 Common
1F4F9 1F4FC Common
1F500 1F53D Common
1F540 1F543 Common
1F550 1F567 Common
1F5FB 1F640 Common
1F645 1F64F Common
1F680 1F6C5 Common
1F700 1F773 Common
20000 2A6D6 Han
2A700 2B734 Han
2B740 2B81D Han
2F800 2FA1D Han
E0001 Common
E0020 E007F Common
E0100 E01EF Inherited
END
| 19.049531 | 78 | 0.778848 |
ed252f52f8ce7475e203a6dc7440499e37df102b | 566 | t | Perl | generators/new/view/view.ejs.t | waplet/beep-vue-app | 4699190ff3f4973954322d15f173041576f717c3 | [
"MIT"
]
| 1 | 2021-11-25T19:21:20.000Z | 2021-11-25T19:21:20.000Z | generators/new/view/view.ejs.t | waplet/beep-vue-app | 4699190ff3f4973954322d15f173041576f717c3 | [
"MIT"
]
| 43 | 2020-06-09T13:46:26.000Z | 2021-10-01T05:33:49.000Z | generators/new/view/view.ejs.t | beepnl/beep-vue-app-legacy-api | da27878c09aeb110e23c25d848b19002049b1261 | [
"MIT"
]
| 2 | 2021-11-01T17:33:34.000Z | 2021-11-25T18:27:02.000Z | ---
to: "src/router/views/<%= h.changeCase.pascal(name) %>.vue"
---
<%
const fileName = h.changeCase.pascal(name)
const importName = h.changeCase.pascal(fileName)
const titleName = h.changeCase.title(name)
%><script>
import Layout from '@layouts/main.vue'
export default {
page: {
title: '<%= titleName %>',
meta: [{ name: 'description', content: 'The <%= titleName %> page.' }],
},
components: { Layout }
}
</script>
<template>
<Layout>
<%= titleName %>
</Layout>
</template>
<%
<style lang="scss" module>
@import '@design';
</style>
| 18.866667 | 75 | 0.621908 |
73fd6fd7fea345cd3e81b1c459d757f5e798d0cf | 5,194 | pl | Perl | plugins/mountdev2.pl | jgru/RegRipper3.0 | c61d937b392c73f7719357bd39b7fefe1e8b48f2 | [
"MIT"
]
| 239 | 2020-05-28T18:15:21.000Z | 2022-03-15T12:19:27.000Z | plugins/mountdev2.pl | jgru/RegRipper3.0 | c61d937b392c73f7719357bd39b7fefe1e8b48f2 | [
"MIT"
]
| 26 | 2020-06-25T14:48:21.000Z | 2021-11-24T18:10:34.000Z | plugins/mountdev2.pl | jgru/RegRipper3.0 | c61d937b392c73f7719357bd39b7fefe1e8b48f2 | [
"MIT"
]
| 65 | 2020-06-01T10:35:43.000Z | 2022-03-31T03:47:24.000Z | #-----------------------------------------------------------
# mountdev2.pl
# Plugin for Registry Ripper; Access System hive file to get the
# MountedDevices
#
# Change history
# 20200517 - updated date output format
# 20140721 - update provided by Espen Øyslebø <eoyslebo@gmail.com>
# 20130530 - updated to output Disk Signature in correct format, thanks to
# info provided by Tom Yarrish (see ref.)
# 20120403 - commented out time stamp info from volume GUIDs, added
# listing of unique MAC addresses
# 20120330 - updated to parse the Volume GUIDs to get the time stamps
# 20091116 - changed output
#
# References
# http://blogs.technet.com/b/markrussinovich/archive/2011/11/08/3463572.aspx
#
# copyright 2020 QAR, LLC
# Author: H. Carvey, keydet89@yahoo.com
#-----------------------------------------------------------
package mountdev2;
use strict;
# Required for 32-bit versions of perl that don't support unpack Q
# update provided by Espen Øyslebø <eoyslebo@gmail.com>
my $little;
BEGIN { $little= unpack "C", pack "S", 1; }
sub squad {
my $str = @_;
my $big;
if(! eval { $big= unpack( "Q", $str ); 1; }) {
my($lo, $hi)= unpack $little ? "Ll" : "lL", $str;
($hi, $lo)= ($lo, $hi) if (!$little);
if ($hi < 0) {
$hi = ~$hi;
$lo = ~$lo;
$big = -1 -$lo - $hi*(1 + ~0);
}
else {
$big = $lo + $hi*(1 + ~0);
}
if($big+1 == $big) {
warn "Forced to approximate!\n";
}
}
return $big;
}
my %config = (hive => "System",
hasShortDescr => 1,
hasDescr => 0,
hasRefs => 0,
osmask => 22,
version => 20200517);
sub getConfig{return %config}
sub getShortDescr {
return "Return contents of System hive MountedDevices key";
}
sub getDescr{}
sub getRefs {}
sub getHive {return $config{hive};}
sub getVersion {return $config{version};}
my $VERSION = getVersion();
sub pluginmain {
my $class = shift;
my $hive = shift;
::logMsg("Launching mountdev2 v.".$VERSION);
::rptMsg("");
::rptMsg("mountdev2 v.".$VERSION); # banner
::rptMsg("(".getHive().") ".getShortDescr()."\n"); # banner
my $reg = Parse::Win32Registry->new($hive);
my $root_key = $reg->get_root_key;
my $key_path = 'MountedDevices';
my $key;
my (%md,%dos,%vol,%offset,%macs);
if ($key = $root_key->get_subkey($key_path)) {
::rptMsg($key_path);
::rptMsg("LastWrite time = ".::getDateFromEpoch($key->get_timestamp())."Z");
::rptMsg("");
my @vals = $key->get_list_of_values();
if (scalar(@vals) > 0) {
foreach my $v (@vals) {
my $data = $v->get_data();
my $len = length($data);
if ($len == 12) {
my $sig = _translateBinary(substr($data,0,4));
# Section added by Espen Øyslebø <eoyslebo@gmail.com>
# gets the offset, which can be a value larger than what
# can be handled by 32-bit Perl
my $o; #offset
eval {
$o = ( unpack ("Q", substr($data,4,8)) );
};
if ($@) {
$o = (squad(substr($data,4,8)));
}
$vol{$v->get_name()} = $sig;
$offset{$v->get_name()} = $o;
}
elsif ($len > 12) {
$data =~ s/\00//g;
push(@{$md{$data}},$v->get_name());
}
else {
::logMsg("mountdev2 v.".$VERSION."\tData length = $len");
}
}
::rptMsg(sprintf "%-50s %-20s %20s","Volume","Disk Sig","Offset");
::rptMsg(sprintf "%-50s %-20s %20s","-------","--------","--------");
foreach my $v (sort keys %vol) {
my $str = sprintf "%-50s %-20s %20s",$v,$vol{$v},$offset{$v};
::rptMsg($str);
}
::rptMsg("");
foreach my $v (sort keys %vol) {
next unless ($v =~ m/^\\\?\?\\Volume\{/);
my $id = $v;
$id =~ s/^\\\?\?\\Volume\{//;
$id =~ s/\}$//;
$id =~ s/-//g;
my $l = hex(substr($id,0,8));
my $m = hex(substr($id,8,4));
my $h = hex(substr($id,12,4)) & 0x0fff;
my $h = $m | $h << 16;
my $t = (::getTime($l,$h) - 574819200);
::rptMsg($v);
::rptMsg(" ".gmtime($t));
}
::rptMsg("");
foreach my $m (sort keys %md) {
::rptMsg("Device: ".$m);
foreach my $item (@{$md{$m}}) {
if ($item =~ m/^\\\?\?\\Volume/) {
my $id = $item;
$id =~ s/^\\\?\?\\Volume\{//;
$id =~ s/\}$//;
# $id =~ s/-//g;
# my $l = hex(substr($id,0,8));
# my $m = hex(substr($id,8,4));
# my $h = hex(substr($id,12,4)) & 0x0fff;
# my $h = $m | $h << 16;
# my $t = (::getTime($l,$h) - 574819200);
# $item .= " ".gmtime($t);
my $m = (split(/-/,$id,5))[4];
$m = uc($m);
$m = join(':',unpack("(A2)*",$m));
$macs{$m} = 1;
}
::rptMsg(" ".$item);
}
::rptMsg("");
}
::rptMsg("");
::rptMsg("Unique MAC Addresses:");
foreach (keys %macs) {
::rptMsg($_);
}
}
else {
::rptMsg($key_path." has no values.");
::logMsg($key_path." has no values.");
}
}
else {
::rptMsg($key_path." not found.");
::logMsg($key_path." not found.");
}
}
sub _translateBinary {
my $str = unpack("H*",$_[0]);
my $len = length($str);
my @nstr = split(//,$str,$len);
my @list = ();
foreach (0..($len/2)) {
push(@list,$nstr[$_*2].$nstr[($_*2)+1]);
}
return join(' ',reverse @list);
}
1; | 27.052083 | 78 | 0.510974 |
ed2ad7ee5f4048149470c8bc7c862df515f945e4 | 1,597 | pm | Perl | tests/x11/thunderbird/thunderbird_imap.pm | skriesch/os-autoinst-distri-opensuse | 64271513af053c2ad6abb83234e8151064e389bd | [
"FSFAP"
]
| null | null | null | tests/x11/thunderbird/thunderbird_imap.pm | skriesch/os-autoinst-distri-opensuse | 64271513af053c2ad6abb83234e8151064e389bd | [
"FSFAP"
]
| null | null | null | tests/x11/thunderbird/thunderbird_imap.pm | skriesch/os-autoinst-distri-opensuse | 64271513af053c2ad6abb83234e8151064e389bd | [
"FSFAP"
]
| null | null | null | # SUSE's openQA tests
#
# Copyright 2019 SUSE LLC
# SPDX-License-Identifier: FSFAP
# Package: MozillaThunderbird
# Summary: send an email using SMTP and receive it using IMAP
# - Kill thunderbird, erase all config files
# - Launch thunderbird
# - Create a imap account
# - Send and email to the created mail acount
# - Fetch emails, search for the sent email
# - Check that email was well received, delete the message
# - Exit thunderbird
# Maintainer: Paolo Stivanin <pstivanin@suse.com>
use warnings;
use strict;
use testapi;
use utils;
use lockapi qw(mutex_wait);
use base "thunderbird_common";
use x11utils qw(ensure_unlocked_desktop turn_off_gnome_screensaver turn_off_gnome_suspend);
sub run {
my $self = shift;
my $account = "internal_account";
my $hostname = get_var('HOSTNAME') // '';
mutex_wait('service_setup_done') if get_var('QAM_MAIL_THUNDERBIRD');
if ($hostname eq 'client') {
$account = "internal_account_C";
}
else {
$account = "internal_account_A";
}
mouse_hide(1);
# clean up and start thunderbird
x11_start_program("xterm -e \"killall -9 thunderbird; find ~ -name *thunderbird | xargs rm -rf;\"", valid => 0);
my $success = eval { x11_start_program("thunderbird", match_timeout => 120); 1 };
unless ($success) {
force_soft_failure "bsc#1131306";
} else {
$self->tb_setup_account('imap', $account);
my $mail_subject = $self->tb_send_message('imap', $account);
$self->tb_check_email($mail_subject);
# exit Thunderbird
send_key "ctrl-q";
}
}
1;
| 27.067797 | 116 | 0.678773 |
ed04793c7740919c0b1fdcbd561de223000f5492 | 5,824 | pm | Perl | lib/Cfn/Resource/AWS/Transfer/User.pm | agimenez/cfn-perl | 66eaffd2044b6a4921b43183f7b6b20aaa46b24a | [
"Apache-2.0"
]
| null | null | null | lib/Cfn/Resource/AWS/Transfer/User.pm | agimenez/cfn-perl | 66eaffd2044b6a4921b43183f7b6b20aaa46b24a | [
"Apache-2.0"
]
| null | null | null | lib/Cfn/Resource/AWS/Transfer/User.pm | agimenez/cfn-perl | 66eaffd2044b6a4921b43183f7b6b20aaa46b24a | [
"Apache-2.0"
]
| null | null | null | # AWS::Transfer::User generated from spec 20.1.0
use Moose::Util::TypeConstraints;
coerce 'Cfn::Resource::Properties::AWS::Transfer::User',
from 'HashRef',
via { Cfn::Resource::Properties::AWS::Transfer::User->new( %$_ ) };
package Cfn::Resource::AWS::Transfer::User {
use Moose;
extends 'Cfn::Resource';
has Properties => (isa => 'Cfn::Resource::Properties::AWS::Transfer::User', is => 'rw', coerce => 1);
sub AttributeList {
[ 'Arn','ServerId','UserName' ]
}
sub supported_regions {
[ 'ap-northeast-1','ap-northeast-2','ap-south-1','ap-southeast-1','ap-southeast-2','ca-central-1','eu-central-1','eu-north-1','eu-west-1','eu-west-2','eu-west-3','sa-east-1','us-east-1','us-east-2','us-gov-east-1','us-gov-west-1','us-west-1','us-west-2' ]
}
}
subtype 'ArrayOfCfn::Resource::Properties::AWS::Transfer::User::SshPublicKey',
as 'Cfn::Value',
where { $_->isa('Cfn::Value::Array') or $_->isa('Cfn::Value::Function') },
message { "$_ is not a Cfn::Value or a Cfn::Value::Function" };
coerce 'ArrayOfCfn::Resource::Properties::AWS::Transfer::User::SshPublicKey',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
die 'Only accepts functions';
}
},
from 'ArrayRef',
via {
Cfn::Value::Array->new(Value => [
map {
Moose::Util::TypeConstraints::find_type_constraint('Cfn::Resource::Properties::AWS::Transfer::User::SshPublicKey')->coerce($_)
} @$_
]);
};
subtype 'Cfn::Resource::Properties::AWS::Transfer::User::SshPublicKey',
as 'Cfn::Value';
coerce 'Cfn::Resource::Properties::AWS::Transfer::User::SshPublicKey',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
return Cfn::Resource::Properties::Object::AWS::Transfer::User::SshPublicKey->new( %$_ );
}
};
package Cfn::Resource::Properties::Object::AWS::Transfer::User::SshPublicKey {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Value::TypedValue';
}
subtype 'ArrayOfCfn::Resource::Properties::AWS::Transfer::User::HomeDirectoryMapEntry',
as 'Cfn::Value',
where { $_->isa('Cfn::Value::Array') or $_->isa('Cfn::Value::Function') },
message { "$_ is not a Cfn::Value or a Cfn::Value::Function" };
coerce 'ArrayOfCfn::Resource::Properties::AWS::Transfer::User::HomeDirectoryMapEntry',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
die 'Only accepts functions';
}
},
from 'ArrayRef',
via {
Cfn::Value::Array->new(Value => [
map {
Moose::Util::TypeConstraints::find_type_constraint('Cfn::Resource::Properties::AWS::Transfer::User::HomeDirectoryMapEntry')->coerce($_)
} @$_
]);
};
subtype 'Cfn::Resource::Properties::AWS::Transfer::User::HomeDirectoryMapEntry',
as 'Cfn::Value';
coerce 'Cfn::Resource::Properties::AWS::Transfer::User::HomeDirectoryMapEntry',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
return Cfn::Resource::Properties::Object::AWS::Transfer::User::HomeDirectoryMapEntry->new( %$_ );
}
};
package Cfn::Resource::Properties::Object::AWS::Transfer::User::HomeDirectoryMapEntry {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Value::TypedValue';
has Entry => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has Target => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
}
package Cfn::Resource::Properties::AWS::Transfer::User {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Resource::Properties';
has HomeDirectory => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has HomeDirectoryMappings => (isa => 'ArrayOfCfn::Resource::Properties::AWS::Transfer::User::HomeDirectoryMapEntry', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has HomeDirectoryType => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has Policy => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has Role => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has ServerId => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has SshPublicKeys => (isa => 'ArrayOfCfn::Resource::Properties::AWS::Transfer::User::SshPublicKey', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has Tags => (isa => 'ArrayOfCfn::Resource::Properties::TagType', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has UserName => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
}
1;
### main pod documentation begin ###
=encoding UTF-8
=head1 NAME
Cfn::Resource::AWS::Transfer::User - Cfn resource for AWS::Transfer::User
=head1 DESCRIPTION
This module implements a Perl module that represents the CloudFormation object AWS::Transfer::User.
See L<Cfn> for more information on how to use it.
=head1 AUTHOR
Jose Luis Martinez
CAPSiDE
jlmartinez@capside.com
=head1 COPYRIGHT and LICENSE
Copyright (c) 2013 by CAPSiDE
This code is distributed under the Apache 2 License. The full text of the
license can be found in the LICENSE file included with this module.
=cut
| 38.065359 | 259 | 0.638221 |
ed3e252e0994ef5d2fde1a361536d9dbc4722388 | 1,620 | pm | Perl | auto-lib/Paws/EC2/AccountAttribute.pm | torrentalle/aws-sdk-perl | 70cc5c7b7a494e422f8412da619161a99de1f1ec | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/EC2/AccountAttribute.pm | torrentalle/aws-sdk-perl | 70cc5c7b7a494e422f8412da619161a99de1f1ec | [
"Apache-2.0"
]
| 1 | 2021-05-26T19:13:58.000Z | 2021-05-26T19:13:58.000Z | auto-lib/Paws/EC2/AccountAttribute.pm | torrentalle/aws-sdk-perl | 70cc5c7b7a494e422f8412da619161a99de1f1ec | [
"Apache-2.0"
]
| null | null | null | package Paws::EC2::AccountAttribute;
use Moose;
has AttributeName => (is => 'ro', isa => 'Str', request_name => 'attributeName', traits => ['NameInRequest']);
has AttributeValues => (is => 'ro', isa => 'ArrayRef[Paws::EC2::AccountAttributeValue]', request_name => 'attributeValueSet', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::EC2::AccountAttribute
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::EC2::AccountAttribute object:
$service_obj->Method(Att1 => { AttributeName => $value, ..., AttributeValues => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::EC2::AccountAttribute object:
$result = $service_obj->Method(...);
$result->Att1->AttributeName
=head1 DESCRIPTION
This class has no description
=head1 ATTRIBUTES
=head2 AttributeName => Str
The name of the account attribute.
=head2 AttributeValues => ArrayRef[L<Paws::EC2::AccountAttributeValue>]
The values for the account attribute.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::EC2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 26.129032 | 157 | 0.730864 |
ed19916936ef1d02b29549be924baca8764a2060 | 664 | al | Perl | benchmark/benchmarks/FASP-benchmarks/data/small-world/smallworld-0116-50-60-174.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/small-world/smallworld-0116-50-60-174.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/small-world/smallworld-0116-50-60-174.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | 1 9 21 43 60
2 18 22
3 36 54
4 40 43 56
5 33
6 8 10 29
7 2 27 29 30
8 22 45 51 56
9 5 18 20 59
10 3 19 37 47 49
11 22 37 54
12 3 52 60
13 21 38
14 51 53 56
15 5 24 36 48
16 7 8 11 26 33 46 48
17 14 24 52 54
18 14 34 47 52 57
19 21 35 53
20 3
21 18 42 45
22
23 43 60
24 33 35 57
25 9 13 35 43
26 19 53 54 58
27 30
28 3 18 20 58
29 42 47 50
30 26 31 44 45
31 13 25 41 53
32 1 5 15 28 33 59
33 4 7 20 23
34 21
35 9 59
36 4 6 33 45
37 7 12 16 35
38 25 37 42
39 20 56
40 1 33
41 14 29
42 9 10
43 14
44 10
45 2 19
46 37 55
47 13 37 49 52
48 39
49 8 14 16 34 37
50 22
51 9 10
52 23 24 46 58
53 6 11
54 34 37 46
55 11 29
56 5 23 48
57 23 50
58 8 10
59 16
60 24 33 34 41 50 | 11.066667 | 21 | 0.649096 |
73ff770457588c74c706f7c6837273c91c2073cd | 2,591 | pl | Perl | Perl STH/learn/DBIx-DataModel/training.update.pl | lust4life/quick.dirty.perl | 2e279f9ab6fe1bfed78641911597d9b5edce62bf | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | Perl STH/learn/DBIx-DataModel/training.update.pl | lust4life/quick.dirty.perl | 2e279f9ab6fe1bfed78641911597d9b5edce62bf | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | Perl STH/learn/DBIx-DataModel/training.update.pl | lust4life/quick.dirty.perl | 2e279f9ab6fe1bfed78641911597d9b5edce62bf | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | use 5.18.2;
use strict;
use warnings;
use diagnostics;
use DBI;
use Smart::Comments;
use Data::Printer colored => 1;
use Carp;
use GJDataSource;
=sample4cwd
use Cwd;
my $origin_cwd = cwd;
chdir "c:/Program Files/MySQL/MySQL Server 5.5/bin";
END{
chdir $origin_cwd;
say "\nend\n";
}
=cut
my $ds = GJ::DataSource->new(0);
my $tc_db =
DBI->connect( $ds->tc, GJ::DataSource::User, GJ::DataSource::Pwd,
{ mysql_enable_utf8 => 1, 'RaiseError' => 1 } )
or die qq(unable to connect $GJ::DataSource::tc \n);
$tc_db->do("set group_concat_max_len = 1024*1024;");
my $generate_sql = q();
my $biz_table_basic = 'biz_balance_user_';
foreach(0..9){
my $biz_table = $biz_table_basic . $_;
my $query_sql = qq{SELECT CONCAT(
"UPDATE `trading_center`.`$biz_table` b SET b.`amount` = 6 ,b.`amount_left` = 6 WHERE b.`order_id` IN(",
s.ids, ") AND b.`product_code` = 'pd_post_minor_num' ;
UPDATE `trading_center`.`$biz_table` b SET b.`amount` = 8 ,b.`amount_left` = 8 WHERE b.`order_id` IN( ",
s.ids, ") AND b.`product_code` = 'pd_manual_refresh' ;
"
)
FROM
(SELECT
GROUP_CONCAT(DISTINCT b.`order_id`) AS ids
FROM
`$biz_table` b
WHERE b.`category_type` = 9
AND b.`product_code` = 'pd_post_minor_num'
AND b.`amount` = 3
AND b.`status` = 1
AND b.`end_at` > UNIX_TIMESTAMP()) s ;
};
$generate_sql .= $tc_db->selectrow_array($query_sql) // q();
=nouse
$query_sql = qq{SELECT CONCAT("insert into `trading_center`.`$biz_table` (`order_id`,`order_item_id`,`user_id`,`balance_id`,`amount`,`amount_left`,`begin_at`,`end_at`,`status`,`created_at`,`duration_modified_at`,`product_code`,`city_id`,`category_type`,`extension`,`old_key`,`old_deposit_id`,`refund_at`,`package_id`,`package_type`,`log_id`,`source_type`,`usage_json`) values('",b.`order_id`,"','",b.`order_item_id`,"','",b.`user_id`,"','",b.`balance_id`,"','",500,"','",500,"','",b.`begin_at`,"','",b.`end_at`,"','",b.`status`,"','",b.`created_at`,"','",b.`duration_modified_at`,"','",'pd_refresh_point',"','",b.`city_id`,"','",b.`category_type`,"','",b.`extension`,"','",b.`old_key`,"','",b.`old_deposit_id`,"','",b.`refund_at`,"','",b.`package_id`,"','",b.`package_type`,"','",b.`log_id`,"','",b.`source_type`,"',",IFNULL(b.`usage_json`,'NULL'),");")
FROM (
SELECT * FROM `$biz_table` b WHERE b.`category_type` = 9 AND b.`product_code` = 'pd_post_minor_num' and b.`amount` = 3 AND b.`status` = 1 AND b.`end_at` > UNIX_TIMESTAMP()
) b;
};
foreach my $row_ref (@{$tc_db->selectall_arrayref($query_sql)}){
$generate_sql .= $$row_ref[0] . qq(\r);
}
=cut
$generate_sql .= qq(\r\n);
}
say $generate_sql;
| 37.014286 | 855 | 0.646469 |
ed16387842527ead81e3078840bd9102de2e6f32 | 492 | pm | Perl | auto-lib/Paws/EC2/DeleteEgressOnlyInternetGatewayResult.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/EC2/DeleteEgressOnlyInternetGatewayResult.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/EC2/DeleteEgressOnlyInternetGatewayResult.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::EC2::DeleteEgressOnlyInternetGatewayResult;
use Moose;
has ReturnCode => (is => 'ro', isa => 'Bool', request_name => 'returnCode', traits => ['NameInRequest',]);
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::EC2::DeleteEgressOnlyInternetGatewayResult
=head1 ATTRIBUTES
=head2 ReturnCode => Bool
Returns C<true> if the request succeeds; otherwise, it returns an
error.
=head2 _request_id => Str
=cut
| 16.965517 | 108 | 0.691057 |
ed1c9f2f226fc15ab2dbdc9579006ce0b8cc4f3f | 3,789 | t | Perl | 3rdparty/github/marpaESLIF/3rdparty/github/marpaWrapper/3rdparty/github/Marpa--R2/cpan/t/sl_wall.t | jddurand/c-marpaESLIF-JSON | 21d3fc1bfa8415ddf1bb7bb3c060a8f07b2e9b67 | [
"MIT"
]
| null | null | null | 3rdparty/github/marpaESLIF/3rdparty/github/marpaWrapper/3rdparty/github/Marpa--R2/cpan/t/sl_wall.t | jddurand/c-marpaESLIF-JSON | 21d3fc1bfa8415ddf1bb7bb3c060a8f07b2e9b67 | [
"MIT"
]
| null | null | null | 3rdparty/github/marpaESLIF/3rdparty/github/marpaWrapper/3rdparty/github/Marpa--R2/cpan/t/sl_wall.t | jddurand/c-marpaESLIF-JSON | 21d3fc1bfa8415ddf1bb7bb3c060a8f07b2e9b67 | [
"MIT"
]
| null | null | null | #!perl
# Copyright 2015 Jeffrey Kegler
# This file is part of Marpa::R2. Marpa::R2 is free software: you can
# redistribute it and/or modify it under the terms of the GNU Lesser
# General Public License as published by the Free Software Foundation,
# either version 3 of the License, or (at your option) any later version.
#
# Marpa::R2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser
# General Public License along with Marpa::R2. If not, see
# http://www.gnu.org/licenses/.
#
use 5.010;
use strict;
use warnings;
# The Wall Series: a sequence of numbers generated by an especially
# ambiguous section of Perl syntax, relaxed to ignore precedence
# and lvalue restricitons.
# This produces numbers in the series A052952 in the literature.
# It's a kind of ragtime Fibonacci series. My proof that the
# parse counts generated by this grammar and A052952 are identical
# is at perlmonks.org: http://perlmonks.org/?node_id=649892
use Test::More tests => 12;
use lib 'inc';
use Marpa::R2::Test;
use Marpa::R2;
# The inefficiency (at least some of it) is deliberate.
# Passing up a duples of [ string, value ] and then
# assembling a final string at the top would be better
# than assembling the string then taking it
# apart at each step. But I wanted to test having
# a start symbol that appears repeatedly on the RHS.
## no critic (Subroutines::RequireArgUnpacking)
sub My_Actions::minus {
shift;
my ( $right_string, $right_value ) = ( $_[2] =~ /^(.*)==(.*)$/xms );
my ( $left_string, $left_value ) = ( $_[0] =~ /^(.*)==(.*)$/xms );
my $value = $left_value - $right_value;
return '(' . $left_string . q{-} . $right_string . ')==' . $value;
} ## end sub minus
sub My_Actions::postfix_decr {
shift;
my ( $string, $value ) = ( $_[0] =~ /^(.*)==(.*)$/xms );
return '(' . $string . q{--} . ')==' . $value--;
}
sub My_Actions::prefix_decr {
shift;
my ( $string, $value ) = ( $_[2] =~ /^(.*)==(.*)$/xms );
return '(' . q{--} . $string . ')==' . --$value;
}
sub My_Actions::negation {
shift;
my ( $string, $value ) = ( $_[1] =~ /^(.*)==(.*)$/xms );
return '(' . q{-} . $string . ')==' . -$value;
}
sub My_Actions::number {
shift;
return "$_[0]==$_[0]";
}
sub My_Actions::default_action {
shift;
my $v_count = scalar @_;
return q{} if $v_count <= 0;
return $_[0] if $v_count == 1;
return '(' . join( q{;}, @_ ) . ')';
} ## end sub default_action
## use critic
my $g = Marpa::R2::Scanless::G->new(
{ source => \(<<'END_OF_SOURCE'),
:start ::= E
:default ::= action => default_action
E ::=
E Minus E action => minus
| E Minus Minus action => postfix_decr
| Minus Minus E action => prefix_decr
| Minus E action => negation
| Number action => number
Number ~ [0-9]
Minus ~ '-'
END_OF_SOURCE
}
);
my @expected = qw(0 1 1 3 4 8 12 21 33 55 88 144 232 );
for my $n ( 1 .. 12 ) {
# Set max_parses just in case there's an infinite loop.
# This is for debugging, after all
my $recce = Marpa::R2::Scanless::R->new(
{ grammar => $g,
semantics_package => 'My_Actions',
max_parses => 300
}
);
$recce->read( \'6-', 0, 1 );
$recce->resume( 1, 1 ) for 1 .. $n;
$recce->resume( 0, 1 );
my $parse_count = 0;
while ( $recce->value() ) { $parse_count++; }
Marpa::R2::Test::is( $expected[$n], $parse_count,
"Wall Series Number $n" );
} ## end for my $n ( 1 .. 12 )
1; # In case used as "do" file
# vim: expandtab shiftwidth=4:
| 30.071429 | 73 | 0.610715 |
73d638fc3a9ea2317bdd1955364c464df22fb136 | 531 | t | Perl | xt/649-lhost-yandex.t | aderumier/p5-Sisimai | b2f2c73a70c8cf3747eb4e9a154e6a00855b855b | [
"BSD-2-Clause"
]
| null | null | null | xt/649-lhost-yandex.t | aderumier/p5-Sisimai | b2f2c73a70c8cf3747eb4e9a154e6a00855b855b | [
"BSD-2-Clause"
]
| null | null | null | xt/649-lhost-yandex.t | aderumier/p5-Sisimai | b2f2c73a70c8cf3747eb4e9a154e6a00855b855b | [
"BSD-2-Clause"
]
| null | null | null | use strict;
use warnings;
use Test::More;
use lib qw(./lib ./blib/lib);
require './t/600-lhost-code';
my $enginename = 'Yandex';
my $samplepath = sprintf("./set-of-emails/private/email-%s", lc $enginename);
my $enginetest = Sisimai::Lhost::Code->maketest;
my $isexpected = [
{ 'n' => '01001', 'r' => qr/userunknown/ },
{ 'n' => '01002', 'r' => qr/(?:userunknown|mailboxfull)/ },
];
plan 'skip_all', sprintf("%s not found", $samplepath) unless -d $samplepath;
$enginetest->($enginename, $isexpected, 1, 0);
done_testing;
| 27.947368 | 77 | 0.640301 |
ed24dd67c14f275b07e42c9b0cd85c41ede7f4de | 438 | t | Perl | t/legacy/regression/validators/02-max_length.t | Htbaa/Validation-Class | 4da907295fc4dda85cc2dd99e526f3b4a054b645 | [
"Artistic-1.0"
]
| null | null | null | t/legacy/regression/validators/02-max_length.t | Htbaa/Validation-Class | 4da907295fc4dda85cc2dd99e526f3b4a054b645 | [
"Artistic-1.0"
]
| null | null | null | t/legacy/regression/validators/02-max_length.t | Htbaa/Validation-Class | 4da907295fc4dda85cc2dd99e526f3b4a054b645 | [
"Artistic-1.0"
]
| null | null | null | use Test::More tests => 3;
package MyVal;
use Validation::Class;
package main;
my $r = MyVal->new(
fields => {foobar => {max_length => 5}},
params => {foobar => 'apple'}
);
ok $r->validate(), 'foobar validates';
$r->fields->{foobar}->{max_length} = 4;
ok !$r->validate(), 'foobar doesnt validate';
ok $r->errors_to_string() =~ /must not contain more than 4/,
'displays proper error message';
#warn $r->errors_to_string();
| 20.857143 | 60 | 0.636986 |
73f7905581cd677c60bfc5cd61715206504b2dbf | 1,410 | pm | Perl | lib/Module/Install/TestBase.pm | tokuhirom/test-base-pm | 252f93c077bcee9f08ca786cac2978e0d7fd77b0 | [
"Net-SNMP",
"Xnet"
]
| 1 | 2015-11-09T01:23:13.000Z | 2015-11-09T01:23:13.000Z | lib/Module/Install/TestBase.pm | tokuhirom/test-base-pm | 252f93c077bcee9f08ca786cac2978e0d7fd77b0 | [
"Net-SNMP",
"Xnet"
]
| null | null | null | lib/Module/Install/TestBase.pm | tokuhirom/test-base-pm | 252f93c077bcee9f08ca786cac2978e0d7fd77b0 | [
"Net-SNMP",
"Xnet"
]
| null | null | null | package Module::Install::TestBase;
use strict;
use warnings;
use Module::Install::Base;
use vars qw($VERSION @ISA);
BEGIN {
$VERSION = '0.60';
@ISA = 'Module::Install::Base';
}
sub use_test_base {
my $self = shift;
$self->include('Test::Base');
$self->include('Test::Base::Filter');
$self->include('Spiffy');
$self->include('Test::More');
$self->include('Test::Builder');
$self->include('Test::Builder::Module');
$self->requires('Filter::Util::Call');
}
1;
=encoding utf8
=head1 NAME
Module::Install::TestBase - Module::Install Support for Test::Base
=head1 SYNOPSIS
use inc::Module::Install;
name 'Foo';
all_from 'lib/Foo.pm';
use_test_base;
WriteAll;
=head1 DESCRIPTION
This module adds the C<use_test_base> directive to Module::Install.
Now you can get full Test-Base support for you module with no external
dependency on Test::Base.
Just add this line to your Makefile.PL:
use_test_base;
That's it. Really. Now Test::Base is bundled into your module, so that
it is no longer any burden on the person installing your module.
=head1 AUTHOR
Ingy döt Net <ingy@cpan.org>
=head1 COPYRIGHT
Copyright (c) 2006, 2008, 2011. Ingy döt Net.
This program is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
See L<http://www.perl.com/perl/misc/Artistic.html>
=cut
| 19.859155 | 71 | 0.679433 |
ed2aedd9254e63a9b0e77b8c1101c511fbc5b975 | 8,547 | pl | Perl | perl/lib/unicore/To/Lc.pl | DDMoReFoundation/PortableNonmem | 7e40b30887537f24fed12421935b58325ba2e5c3 | [
"BSD-3-Clause-Clear"
]
| null | null | null | perl/lib/unicore/To/Lc.pl | DDMoReFoundation/PortableNonmem | 7e40b30887537f24fed12421935b58325ba2e5c3 | [
"BSD-3-Clause-Clear"
]
| null | null | null | perl/lib/unicore/To/Lc.pl | DDMoReFoundation/PortableNonmem | 7e40b30887537f24fed12421935b58325ba2e5c3 | [
"BSD-3-Clause-Clear"
]
| null | null | null | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.3.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
# The mappings in the non-hash portion of this file must be modified to get the
# correct values by adding the code point ordinal number to each one that is
# numeric.
# The name this swash is to be known by, with the format of the mappings in
# the main body of the table, and what all code points missing from this file
# map to.
$utf8::SwashInfo{'ToLc'}{'format'} = 'ax'; # mapped value in hex; some entries need adjustment
$utf8::SwashInfo{'ToLc'}{'specials_name'} = 'utf8::ToSpecLc'; # Name of hash of special mappings
$utf8::SwashInfo{'ToLc'}{'missing'} = '0'; # code point maps to itself
# Some code points require special handling because their mappings are each to
# multiple code points. These do not appear in the main body, but are defined
# in the hash below.
# Each key is the string of N bytes that together make up the UTF-8 encoding
# for the code point. (i.e. the same as looking at the code point's UTF-8
# under "use bytes"). Each value is the UTF-8 of the translation, for speed.
%utf8::ToSpecLc = (
"\xC4\xB0" => "\x{0069}\x{0307}", # U+0130 => 0069 0307
);
return <<'END';
41 5A 61
C0 D6 E0
D8 DE F8
100 101
102 103
104 105
106 107
108 109
10A 10B
10C 10D
10E 10F
110 111
112 113
114 115
116 117
118 119
11A 11B
11C 11D
11E 11F
120 121
122 123
124 125
126 127
128 129
12A 12B
12C 12D
12E 12F
130 69
132 133
134 135
136 137
139 13A
13B 13C
13D 13E
13F 140
141 142
143 144
145 146
147 148
14A 14B
14C 14D
14E 14F
150 151
152 153
154 155
156 157
158 159
15A 15B
15C 15D
15E 15F
160 161
162 163
164 165
166 167
168 169
16A 16B
16C 16D
16E 16F
170 171
172 173
174 175
176 177
178 FF
179 17A
17B 17C
17D 17E
181 253
182 183
184 185
186 254
187 188
189 18A 256
18B 18C
18E 1DD
18F 259
190 25B
191 192
193 260
194 263
196 269
197 268
198 199
19C 26F
19D 272
19F 275
1A0 1A1
1A2 1A3
1A4 1A5
1A6 280
1A7 1A8
1A9 283
1AC 1AD
1AE 288
1AF 1B0
1B1 1B2 28A
1B3 1B4
1B5 1B6
1B7 292
1B8 1B9
1BC 1BD
1C4 1C6
1C5 1C6
1C7 1C9
1C8 1C9
1CA 1CC
1CB 1CC
1CD 1CE
1CF 1D0
1D1 1D2
1D3 1D4
1D5 1D6
1D7 1D8
1D9 1DA
1DB 1DC
1DE 1DF
1E0 1E1
1E2 1E3
1E4 1E5
1E6 1E7
1E8 1E9
1EA 1EB
1EC 1ED
1EE 1EF
1F1 1F3
1F2 1F3
1F4 1F5
1F6 195
1F7 1BF
1F8 1F9
1FA 1FB
1FC 1FD
1FE 1FF
200 201
202 203
204 205
206 207
208 209
20A 20B
20C 20D
20E 20F
210 211
212 213
214 215
216 217
218 219
21A 21B
21C 21D
21E 21F
220 19E
222 223
224 225
226 227
228 229
22A 22B
22C 22D
22E 22F
230 231
232 233
23A 2C65
23B 23C
23D 19A
23E 2C66
241 242
243 180
244 289
245 28C
246 247
248 249
24A 24B
24C 24D
24E 24F
370 371
372 373
376 377
386 3AC
388 38A 3AD
38C 3CC
38E 38F 3CD
391 3A1 3B1
3A3 3AB 3C3
3CF 3D7
3D8 3D9
3DA 3DB
3DC 3DD
3DE 3DF
3E0 3E1
3E2 3E3
3E4 3E5
3E6 3E7
3E8 3E9
3EA 3EB
3EC 3ED
3EE 3EF
3F4 3B8
3F7 3F8
3F9 3F2
3FA 3FB
3FD 3FF 37B
400 40F 450
410 42F 430
460 461
462 463
464 465
466 467
468 469
46A 46B
46C 46D
46E 46F
470 471
472 473
474 475
476 477
478 479
47A 47B
47C 47D
47E 47F
480 481
48A 48B
48C 48D
48E 48F
490 491
492 493
494 495
496 497
498 499
49A 49B
49C 49D
49E 49F
4A0 4A1
4A2 4A3
4A4 4A5
4A6 4A7
4A8 4A9
4AA 4AB
4AC 4AD
4AE 4AF
4B0 4B1
4B2 4B3
4B4 4B5
4B6 4B7
4B8 4B9
4BA 4BB
4BC 4BD
4BE 4BF
4C0 4CF
4C1 4C2
4C3 4C4
4C5 4C6
4C7 4C8
4C9 4CA
4CB 4CC
4CD 4CE
4D0 4D1
4D2 4D3
4D4 4D5
4D6 4D7
4D8 4D9
4DA 4DB
4DC 4DD
4DE 4DF
4E0 4E1
4E2 4E3
4E4 4E5
4E6 4E7
4E8 4E9
4EA 4EB
4EC 4ED
4EE 4EF
4F0 4F1
4F2 4F3
4F4 4F5
4F6 4F7
4F8 4F9
4FA 4FB
4FC 4FD
4FE 4FF
500 501
502 503
504 505
506 507
508 509
50A 50B
50C 50D
50E 50F
510 511
512 513
514 515
516 517
518 519
51A 51B
51C 51D
51E 51F
520 521
522 523
524 525
526 527
531 556 561
10A0 10C5 2D00
10C7 2D27
10CD 2D2D
1E00 1E01
1E02 1E03
1E04 1E05
1E06 1E07
1E08 1E09
1E0A 1E0B
1E0C 1E0D
1E0E 1E0F
1E10 1E11
1E12 1E13
1E14 1E15
1E16 1E17
1E18 1E19
1E1A 1E1B
1E1C 1E1D
1E1E 1E1F
1E20 1E21
1E22 1E23
1E24 1E25
1E26 1E27
1E28 1E29
1E2A 1E2B
1E2C 1E2D
1E2E 1E2F
1E30 1E31
1E32 1E33
1E34 1E35
1E36 1E37
1E38 1E39
1E3A 1E3B
1E3C 1E3D
1E3E 1E3F
1E40 1E41
1E42 1E43
1E44 1E45
1E46 1E47
1E48 1E49
1E4A 1E4B
1E4C 1E4D
1E4E 1E4F
1E50 1E51
1E52 1E53
1E54 1E55
1E56 1E57
1E58 1E59
1E5A 1E5B
1E5C 1E5D
1E5E 1E5F
1E60 1E61
1E62 1E63
1E64 1E65
1E66 1E67
1E68 1E69
1E6A 1E6B
1E6C 1E6D
1E6E 1E6F
1E70 1E71
1E72 1E73
1E74 1E75
1E76 1E77
1E78 1E79
1E7A 1E7B
1E7C 1E7D
1E7E 1E7F
1E80 1E81
1E82 1E83
1E84 1E85
1E86 1E87
1E88 1E89
1E8A 1E8B
1E8C 1E8D
1E8E 1E8F
1E90 1E91
1E92 1E93
1E94 1E95
1E9E DF
1EA0 1EA1
1EA2 1EA3
1EA4 1EA5
1EA6 1EA7
1EA8 1EA9
1EAA 1EAB
1EAC 1EAD
1EAE 1EAF
1EB0 1EB1
1EB2 1EB3
1EB4 1EB5
1EB6 1EB7
1EB8 1EB9
1EBA 1EBB
1EBC 1EBD
1EBE 1EBF
1EC0 1EC1
1EC2 1EC3
1EC4 1EC5
1EC6 1EC7
1EC8 1EC9
1ECA 1ECB
1ECC 1ECD
1ECE 1ECF
1ED0 1ED1
1ED2 1ED3
1ED4 1ED5
1ED6 1ED7
1ED8 1ED9
1EDA 1EDB
1EDC 1EDD
1EDE 1EDF
1EE0 1EE1
1EE2 1EE3
1EE4 1EE5
1EE6 1EE7
1EE8 1EE9
1EEA 1EEB
1EEC 1EED
1EEE 1EEF
1EF0 1EF1
1EF2 1EF3
1EF4 1EF5
1EF6 1EF7
1EF8 1EF9
1EFA 1EFB
1EFC 1EFD
1EFE 1EFF
1F08 1F0F 1F00
1F18 1F1D 1F10
1F28 1F2F 1F20
1F38 1F3F 1F30
1F48 1F4D 1F40
1F59 1F51
1F5B 1F53
1F5D 1F55
1F5F 1F57
1F68 1F6F 1F60
1F88 1F8F 1F80
1F98 1F9F 1F90
1FA8 1FAF 1FA0
1FB8 1FB9 1FB0
1FBA 1FBB 1F70
1FBC 1FB3
1FC8 1FCB 1F72
1FCC 1FC3
1FD8 1FD9 1FD0
1FDA 1FDB 1F76
1FE8 1FE9 1FE0
1FEA 1FEB 1F7A
1FEC 1FE5
1FF8 1FF9 1F78
1FFA 1FFB 1F7C
1FFC 1FF3
2126 3C9
212A 6B
212B E5
2132 214E
2160 216F 2170
2183 2184
24B6 24CF 24D0
2C00 2C2E 2C30
2C60 2C61
2C62 26B
2C63 1D7D
2C64 27D
2C67 2C68
2C69 2C6A
2C6B 2C6C
2C6D 251
2C6E 271
2C6F 250
2C70 252
2C72 2C73
2C75 2C76
2C7E 2C7F 23F
2C80 2C81
2C82 2C83
2C84 2C85
2C86 2C87
2C88 2C89
2C8A 2C8B
2C8C 2C8D
2C8E 2C8F
2C90 2C91
2C92 2C93
2C94 2C95
2C96 2C97
2C98 2C99
2C9A 2C9B
2C9C 2C9D
2C9E 2C9F
2CA0 2CA1
2CA2 2CA3
2CA4 2CA5
2CA6 2CA7
2CA8 2CA9
2CAA 2CAB
2CAC 2CAD
2CAE 2CAF
2CB0 2CB1
2CB2 2CB3
2CB4 2CB5
2CB6 2CB7
2CB8 2CB9
2CBA 2CBB
2CBC 2CBD
2CBE 2CBF
2CC0 2CC1
2CC2 2CC3
2CC4 2CC5
2CC6 2CC7
2CC8 2CC9
2CCA 2CCB
2CCC 2CCD
2CCE 2CCF
2CD0 2CD1
2CD2 2CD3
2CD4 2CD5
2CD6 2CD7
2CD8 2CD9
2CDA 2CDB
2CDC 2CDD
2CDE 2CDF
2CE0 2CE1
2CE2 2CE3
2CEB 2CEC
2CED 2CEE
2CF2 2CF3
A640 A641
A642 A643
A644 A645
A646 A647
A648 A649
A64A A64B
A64C A64D
A64E A64F
A650 A651
A652 A653
A654 A655
A656 A657
A658 A659
A65A A65B
A65C A65D
A65E A65F
A660 A661
A662 A663
A664 A665
A666 A667
A668 A669
A66A A66B
A66C A66D
A680 A681
A682 A683
A684 A685
A686 A687
A688 A689
A68A A68B
A68C A68D
A68E A68F
A690 A691
A692 A693
A694 A695
A696 A697
A722 A723
A724 A725
A726 A727
A728 A729
A72A A72B
A72C A72D
A72E A72F
A732 A733
A734 A735
A736 A737
A738 A739
A73A A73B
A73C A73D
A73E A73F
A740 A741
A742 A743
A744 A745
A746 A747
A748 A749
A74A A74B
A74C A74D
A74E A74F
A750 A751
A752 A753
A754 A755
A756 A757
A758 A759
A75A A75B
A75C A75D
A75E A75F
A760 A761
A762 A763
A764 A765
A766 A767
A768 A769
A76A A76B
A76C A76D
A76E A76F
A779 A77A
A77B A77C
A77D 1D79
A77E A77F
A780 A781
A782 A783
A784 A785
A786 A787
A78B A78C
A78D 265
A790 A791
A792 A793
A7A0 A7A1
A7A2 A7A3
A7A4 A7A5
A7A6 A7A7
A7A8 A7A9
A7AA 266
FF21 FF3A FF41
10400 10427 10428
END
| 13.068807 | 97 | 0.651457 |
73fb83f7e19696c03b849642215527e59a2a6887 | 508 | pm | Perl | lib/VMOMI/ClusterDasAdvancedRuntimeInfoVmcpCapabilityInfo.pm | stumpr/p5-vmomi | e2571d72a1f552ddd0258ad289ec229d8d12a147 | [
"Apache-2.0"
]
| 1 | 2017-06-22T21:26:24.000Z | 2017-06-22T21:26:24.000Z | lib/VMOMI/ClusterDasAdvancedRuntimeInfoVmcpCapabilityInfo.pm | restump/p5-vmomi | e2571d72a1f552ddd0258ad289ec229d8d12a147 | [
"Apache-2.0"
]
| null | null | null | lib/VMOMI/ClusterDasAdvancedRuntimeInfoVmcpCapabilityInfo.pm | restump/p5-vmomi | e2571d72a1f552ddd0258ad289ec229d8d12a147 | [
"Apache-2.0"
]
| 1 | 2016-07-19T19:56:09.000Z | 2016-07-19T19:56:09.000Z | package VMOMI::ClusterDasAdvancedRuntimeInfoVmcpCapabilityInfo;
use parent 'VMOMI::DynamicData';
use strict;
use warnings;
our @class_ancestors = (
'DynamicData',
);
our @class_members = (
['storageAPDSupported', 'boolean', 0, ],
['storagePDLSupported', 'boolean', 0, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| 18.814815 | 63 | 0.694882 |
ed0aab58f04c7a995c9c6701682402047a3caad2 | 1,563 | t | Perl | t/Test-consistency05-A.t | quinot/zonemaster-engine | 6babc58b49e3e0875c96f676fa1b52a3630ec11c | [
"CC-BY-4.0"
]
| null | null | null | t/Test-consistency05-A.t | quinot/zonemaster-engine | 6babc58b49e3e0875c96f676fa1b52a3630ec11c | [
"CC-BY-4.0"
]
| 1 | 2018-06-29T15:14:02.000Z | 2018-06-29T15:14:02.000Z | t/Test-consistency05-A.t | quinot/zonemaster-engine | 6babc58b49e3e0875c96f676fa1b52a3630ec11c | [
"CC-BY-4.0"
]
| null | null | null | use Test::More;
BEGIN {
use_ok( q{Zonemaster::Engine} );
use_ok( q{Zonemaster::Engine::Test::Consistency} );
use_ok( q{Zonemaster::Engine::Util} );
}
my $datafile = q{t/Test-consistency05-A.data};
if ( not $ENV{ZONEMASTER_RECORD} ) {
die q{Stored data file missing} if not -r $datafile;
Zonemaster::Engine::Nameserver->restore( $datafile );
Zonemaster::Engine->profile->set( q{no_network}, 1 );
}
Zonemaster::Engine->add_fake_delegation(
'a.consistency05.exempelvis.se' => {
'ns1.a.consistency05.exempelvis.se' => [ '46.21.97.97', '2a02:750:12:77::97' ],
'ns2.a.consistency05.exempelvis.se' => [ '37.123.169.91', '2001:9b0:1:1c13::53' ],
}
);
my $zone = Zonemaster::Engine->zone( q{a.consistency05.exempelvis.se} );
my %res = map { $_->tag => $_ } Zonemaster::Engine::Test::Consistency->consistency05( $zone );
ok( !$res{CHILD_NS_FAILED}, q{should not emit CHILD_NS_FAILED} );
ok( !$res{NO_RESPONSE}, q{should not emit NO_RESPONSE} );
ok( !$res{CHILD_ZONE_LAME}, q{should not emit CHILD_ZONE_LAME} );
ok( !$res{IN_BAILIWICK_ADDR_MISMATCH}, q{should not emit IN_BAILIWICK_ADDR_MISMATCH} );
ok( !$res{OUT_OF_BAILIWICK_ADDR_MISMATCH}, q{should not emit OUT_OF_BAILIWICK_ADDR_MISMATCH} );
ok( !$res{EXTRA_ADDRESS_CHILD}, q{should not emit EXTRA_ADDRESS_CHILD} );
ok( $res{ADDRESSES_MATCH}, q{should emit ADDRESSES_MATCH} );
if ( $ENV{ZONEMASTER_RECORD} ) {
Zonemaster::Engine::Nameserver->save( $datafile );
}
done_testing;
| 38.121951 | 95 | 0.654511 |
ed3b767aa0b199a010454aab9934d671f78f0f3e | 1,033 | pm | Perl | KOST-Val/src/main/resources_notJar/resources/ExifTool-10.15/Perl/site/lib/XML/Parser/Style/Debug.pm | rebplu/KOST-VAL | 1537125425068d5faec3bc4f5263df715956ae76 | [
"BSD-3-Clause-No-Nuclear-Warranty"
]
| 1 | 2022-03-24T19:57:00.000Z | 2022-03-24T19:57:00.000Z | scoring/XML/Parser/Style/Debug.pm | BBN-E/serif | 1e2662d82fb1c377ec3c79355a5a9b0644606cb4 | [
"Apache-2.0"
]
| null | null | null | scoring/XML/Parser/Style/Debug.pm | BBN-E/serif | 1e2662d82fb1c377ec3c79355a5a9b0644606cb4 | [
"Apache-2.0"
]
| null | null | null | # $Id: Debug.pm,v 1.1 2003/07/27 16:07:49 matt Exp $
package XML::Parser::Style::Debug;
use strict;
sub Start {
my $expat = shift;
my $tag = shift;
print STDERR "@{$expat->{Context}} \\\\ (@_)\n";
}
sub End {
my $expat = shift;
my $tag = shift;
print STDERR "@{$expat->{Context}} //\n";
}
sub Char {
my $expat = shift;
my $text = shift;
$text =~ s/([\x80-\xff])/sprintf "#x%X;", ord $1/eg;
$text =~ s/([\t\n])/sprintf "#%d;", ord $1/eg;
print STDERR "@{$expat->{Context}} || $text\n";
}
sub Proc {
my $expat = shift;
my $target = shift;
my $text = shift;
my @foo = @{$expat->{Context}};
print STDERR "@foo $target($text)\n";
}
1;
__END__
=head1 NAME
XML::Parser::Style::Debug - Debug style for XML::Parser
=head1 SYNOPSIS
use XML::Parser;
my $p = XML::Parser->new(Style => 'Debug');
$p->parsefile('foo.xml');
=head1 DESCRIPTION
This just prints out the document in outline form to STDERR. Nothing special is
returned by parse.
=cut | 19.865385 | 80 | 0.569216 |
73dc6cefe83687fdf2ef2d2bc0d128285d827a28 | 6,412 | al | Perl | Apps/NO/ElectronicVATSubmission/app/src/Setup/ElectronicVATInstallation.Codeunit.al | hemisphera/ALAppExtensions | 74565f397471303585145d51f12df5596678244d | [
"MIT"
]
| null | null | null | Apps/NO/ElectronicVATSubmission/app/src/Setup/ElectronicVATInstallation.Codeunit.al | hemisphera/ALAppExtensions | 74565f397471303585145d51f12df5596678244d | [
"MIT"
]
| null | null | null | Apps/NO/ElectronicVATSubmission/app/src/Setup/ElectronicVATInstallation.Codeunit.al | hemisphera/ALAppExtensions | 74565f397471303585145d51f12df5596678244d | [
"MIT"
]
| null | null | null | codeunit 10681 "Electronic VAT Installation"
{
Subtype = Install;
var
AssistedSetupTxt: Label 'Set up an electronic VAT submission';
AssistedSetupDescriptionTxt: Label 'Connect to the ID-porten integration point and submit your VAT return to Skatteetaten.';
AssistedSetupHelpTxt: Label 'https://go.microsoft.com/fwlink/?linkid=2181211', Locked = true;
AuthenticationURLTxt: Label 'https://oidc.difi.no/idporten-oidc-provider', Locked = true;
ValidateVATReturnUrlLbl: Label 'https://idporten.api.skatteetaten.no/api/mva/grensesnittstoette/mva-melding/valider', Locked = true;
ExchangeIDPortenToAltinnUrlLbl: Label 'https://platform.altinn.no/authentication/api/v1/exchange/id-porten', Locked = true;
SubmissionEnvironmentUrlLbl: Label 'https://skd.apps.altinn.no/', Locked = true;
SubmissionAppUrlLbl: Label 'skd/mva-melding-innsending-v1/', Locked = true;
ElectronicVATLbl: Label 'ELEC VAT', Locked = true;
trigger OnInstallAppPerCompany()
var
AppInfo: ModuleInfo;
begin
NavApp.GetCurrentModuleInfo(AppInfo);
if (AppInfo.DataVersion() <> Version.Create('0.0.0.0')) then
exit;
RunExtensionSetup();
end;
[EventSubscriber(ObjectType::Codeunit, Codeunit::"Company-Initialize", 'OnCompanyInitialize', '', false, false)]
local procedure CompanyInitialize()
var
UpgradeTag: Codeunit "Upgrade Tag";
begin
RunExtensionSetup();
UpgradeTag.SetAllUpgradeTags();
end;
procedure RunExtensionSetup()
begin
InsertElectronicVATSetup();
UpdateVATReportSetup();
CreateVATReportsConfiguration();
ApplyEvaluationClassificationsForPrivacy();
end;
procedure InsertElectronicVATSetup()
var
ElecVATSetup: Record "Elec. VAT Setup";
OAuth20: Codeunit OAuth2;
RedirectUrl: Text;
begin
if ElecVATSetup.Get() then
exit;
ElecVATSetup.Init();
ElecVATSetup.Insert(true);
ElecVATSetup.Validate("OAuth Feature GUID", CreateGuid());
ElecVATSetup.Validate("Authentication URL", AuthenticationURLTxt);
OAuth20.GetDefaultRedirectURL(RedirectUrl);
ElecVATSetup.Validate("Redirect URL", CopyStr(RedirectUrl, 1, MaxStrLen(ElecVATSetup."Redirect URL")));
ElecVATSetup.Validate("Validate VAT Return Url", ValidateVATReturnUrlLbl);
ElecVATSetup.Validate("Exchange ID-Porten Token Url", ExchangeIDPortenToAltinnUrlLbl);
ElecVATSetup.Validate("Submission Environment URL", SubmissionEnvironmentUrlLbl);
ElecVATSetup.Validate("Submission App URL", SubmissionAppUrlLbl);
ElecVATSetup.Modify(true);
end;
local procedure UpdateVATReportSetup()
var
VATReportSetup: Record "VAT Report Setup";
begin
VATReportSetup.Get();
VATReportSetup.Validate("Report VAT Base", true);
VATReportSetup.Modify(true);
end;
local procedure CreateVATReportsConfiguration()
var
VATReportsConfiguration: Record "VAT Reports Configuration";
begin
if VATReportsConfiguration.Get(VATReportsConfiguration."VAT Report Type"::"VAT Return", ElectronicVATLbl) then
exit;
VATReportsConfiguration.Validate("VAT Report Type", VATReportsConfiguration."VAT Report Type"::"VAT Return");
VATReportsConfiguration.validate("VAT Report Version", ElectronicVATLbl);
VATReportsConfiguration.Validate("Suggest Lines Codeunit ID", Codeunit::"VAT Report Suggest Lines");
VATReportsConfiguration.Validate("Content Codeunit ID", Codeunit::"Elec. VAT Create Content");
VATReportsConfiguration.Validate("Submission Codeunit ID", Codeunit::"Elec. VAT Submit Return");
VATReportsConfiguration.Validate("Validate Codeunit ID", Codeunit::"Elec. VAT Validate Return");
VATReportsConfiguration.Validate("Response Handler Codeunit ID", Codeunit::"Elec. VAT Get Response");
VATReportsConfiguration.Insert(true);
end;
local procedure ApplyEvaluationClassificationsForPrivacy()
var
Company: Record Company;
VATCode: Record "VAT Code";
OAuth20Setup: Record "OAuth 2.0 Setup";
DataClassificationMgt: Codeunit "Data Classification Mgt.";
begin
Company.Get(CompanyName());
if not Company."Evaluation Company" then
exit;
DataClassificationMgt.SetTableFieldsToNormal(Database::"Elec. VAT Setup");
DataClassificationMgt.SetFieldToNormal(Database::"VAT Code", VATCode.FieldNo("VAT Rate For Reporting"));
DataClassificationMgt.SetFieldToNormal(Database::"VAT Code", VATCode.FieldNo("Report VAT Rate"));
DataClassificationMgt.SetFieldToNormal(Database::"OAuth 2.0 Setup", OAuth20Setup.FieldNo("Altinn Token"));
end;
[EventSubscriber(ObjectType::Codeunit, Codeunit::"Guided Experience", 'OnRegisterAssistedSetup', '', true, true)]
local procedure InsertIntoAssistedSetup()
var
ElecVATSetup: Record "Elec. VAT Setup";
GuidedExperience: Codeunit "Guided Experience";
AssistedSetupGroup: Enum "Assisted Setup Group";
VideoCategory: Enum "Video Category";
begin
GuidedExperience.InsertAssistedSetup(AssistedSetupTxt, CopyStr(AssistedSetupTxt, 1, 50), AssistedSetupDescriptionTxt, 5, ObjectType::Page, Page::"Elec. VAT Submission Wizard", AssistedSetupGroup::Connect,
'', VideoCategory::ReadyForBusiness, AssistedSetupHelpTxt);
if ElecVATSetup.Get() and ElecVATSetup.Enabled then
GuidedExperience.CompleteAssistedSetup(ObjectType::Page, Page::"Elec. VAT Submission Wizard");
end;
[EventSubscriber(ObjectType::Codeunit, Codeunit::"Guided Experience", 'OnAfterRunAssistedSetup', '', true, true)]
local procedure UpdateAssistedSetupStatus(ExtensionID: Guid; ObjectType: ObjectType; ObjectID: Integer)
var
ElecVATSetup: Record "Elec. VAT Setup";
GuidedExperience: Codeunit "Guided Experience";
BaseAppID: Codeunit "BaseApp ID";
begin
if ExtensionId <> BaseAppID.Get() then
exit;
if ObjectID <> Page::"Elec. VAT Submission Wizard" then
exit;
if ElecVATSetup.Get() and ElecVATSetup.Enabled then
GuidedExperience.CompleteAssistedSetup(ObjectType, ObjectID);
end;
}
| 46.463768 | 212 | 0.698846 |
ed3d36d5b5005f20152f3fc35ff2a3daf20022e2 | 10,110 | pm | Perl | modules/Bio/EnsEMBL/Funcgen/PipeConfig/PeakCalling/OtarBackbone_conf.pm | duartemolha/ensembl-funcgen | 24f4d3c6fe11b2e14472eec151198aa4e831fd8d | [
"Apache-2.0"
]
| 8 | 2016-03-30T09:18:17.000Z | 2021-01-27T15:34:29.000Z | modules/Bio/EnsEMBL/Funcgen/PipeConfig/PeakCalling/OtarBackbone_conf.pm | duartemolha/ensembl-funcgen | 24f4d3c6fe11b2e14472eec151198aa4e831fd8d | [
"Apache-2.0"
]
| 22 | 2015-02-24T14:40:31.000Z | 2019-02-13T10:17:38.000Z | modules/Bio/EnsEMBL/Funcgen/PipeConfig/PeakCalling/OtarBackbone_conf.pm | duartemolha/ensembl-funcgen | 24f4d3c6fe11b2e14472eec151198aa4e831fd8d | [
"Apache-2.0"
]
| 19 | 2015-02-06T16:49:16.000Z | 2021-12-08T13:19:13.000Z | package Bio::EnsEMBL::Funcgen::PipeConfig::PeakCalling::OtarBackbone_conf;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf');
use Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf;
sub beekeeper_extra_cmdline_options {
my $self = shift;
return '-reg_conf ' . $self->o('reg_conf') . ' -keep_alive -can_respecialize 1 -sleep 0.2';
}
sub default_options {
my $self = shift;
return {
%{$self->SUPER::default_options},
pipeline_name => 'otar',
};
}
sub pipeline_wide_parameters {
my $self = shift;
return {
%{$self->SUPER::pipeline_wide_parameters},
pipeline_name => $self->o('pipeline_name'),
};
}
sub pipeline_analyses {
my $self = shift;
return [
{ -logic_name => 'start',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
MAIN => 'backbone_fire_pre_pipeline_checks',
},
},
{ -logic_name => 'backbone_fire_pre_pipeline_checks',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_pre_pipeline_checks',
'A->1' => 'backbone_fire_populate_read_file_stats'
},
},
{
-logic_name => 'start_pre_pipeline_checks',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_populate_read_file_stats',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_populate_read_file_stats',
'A->1' => 'backbone_fire_fastqc'
},
},
{
-logic_name => 'start_populate_read_file_stats',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_fastqc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_fastqc',
'A->1' => 'backbone_fire_fastqc_report'
},
},
{
-logic_name => 'start_fastqc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_fastqc_report',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_fastqc_report',
'A->1' => 'backbone_fire_alignments'
},
},
{
-logic_name => 'start_fastqc_report',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_alignments',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_alignments',
'A->1' => 'backbone_fire_alignment_hc'
},
},
{
-logic_name => 'start_alignments',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_alignment_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_alignment_hc',
'A->1' => 'backbone_fire_write_bigwig'
},
},
{
-logic_name => 'start_alignment_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_write_bigwig',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_write_bigwig',
'A->1' => 'backbone_fire_alignment_qc'
},
},
{
-logic_name => 'start_write_bigwig',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_alignment_qc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_alignment_qc',
'A->1' => 'backbone_fire_peak_calling'
},
},
{
-logic_name => 'start_alignment_qc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_peak_calling',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_peak_calling',
'A->1' => 'backbone_fire_peak_calling_hc'
},
},
{
-logic_name => 'start_peak_calling',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_peak_calling_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_peak_calling_hc',
'A->1' => 'backbone_fire_frip'
},
},
{
-logic_name => 'start_peak_calling_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_frip',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_frip',
'A->1' => 'backbone_fire_cleanup'
},
},
{
-logic_name => 'start_frip',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_cleanup',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_cleanup',
'A->1' => 'backbone_fire_quality_check_reports'
},
},
{
-logic_name => 'start_cleanup',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_quality_check_reports',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_quality_check_reports',
'A->1' => 'backbone_fire_segmentation'
},
},
{
-logic_name => 'start_quality_check_reports',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_segmentation',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_segmentation',
'A->1' => 'backbone_fire_segmentation_statistics'
},
},
{
-logic_name => 'start_segmentation',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_segmentation_statistics',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_segmentation_statistics',
'A->1' => 'backbone_fire_regulatory_build_hc'
},
},
{
-logic_name => 'start_segmentation_statistics',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_regulatory_build_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_regulatory_build_hc',
'A->1' => 'backbone_fire_regulatory_build_statistics'
},
},
{
-logic_name => 'start_regulatory_build_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_regulatory_build_statistics',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_regulatory_build_statistics',
'A->1' => 'backbone_fire_regulatory_build_stable_id_mapping'
},
},
{
-logic_name => 'start_regulatory_build_statistics',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_regulatory_build_stable_id_mapping',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_regulatory_build_stable_id_mapping',
'A->1' => 'backbone_fire_stable_id_mapping_hc'
},
},
{
-logic_name => 'start_regulatory_build_stable_id_mapping',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_stable_id_mapping_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_stable_id_mapping_hc',
'A->1' => 'backbone_fire_ftp_export'
},
},
{
-logic_name => 'start_stable_id_mapping_hc',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_fire_ftp_export',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => 'start_ftp_export',
'A->1' => 'backbone_pipeline_finished'
},
},
{
-logic_name => 'start_ftp_export',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
MAIN => 'backbone_fire_exports',
}
},
{
-logic_name => 'backbone_fire_exports',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
{ -logic_name => 'backbone_pipeline_finished',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
}
]
}
1;
| 34.271186 | 93 | 0.481503 |
ed23e5e141ce5b7a93cc437d19ccce398f819501 | 1,015 | pm | Perl | lib/MusicBrainz/Server/Entity/URL/TheDanceGypsy.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
]
| null | null | null | lib/MusicBrainz/Server/Entity/URL/TheDanceGypsy.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
]
| null | null | null | lib/MusicBrainz/Server/Entity/URL/TheDanceGypsy.pm | qls0ulp/musicbrainz-server | ebe8a45bf6f336352cd5c56e2e825d07679c0e45 | [
"BSD-2-Clause"
]
| 1 | 2021-02-24T13:14:25.000Z | 2021-02-24T13:14:25.000Z | package MusicBrainz::Server::Entity::URL::TheDanceGypsy;
use Moose;
extends 'MusicBrainz::Server::Entity::URL';
with 'MusicBrainz::Server::Entity::URL::Sidebar';
sub sidebar_name {
my $self = shift;
return "The Dance Gypsy";
}
__PACKAGE__->meta->make_immutable;
no Moose;
1;
=head1 COPYRIGHT
Copyright (C) 2015 MetaBrainz Foundation
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
=cut
| 27.432432 | 68 | 0.771429 |
ed327d968f3eaadfe33cb4d2ca4f31818236a4cc | 132 | t | Perl | tests/fails/overload4.t | nie-game/terra | 36a544595e59c6066ab9e5b5fa923b82b4be0c41 | [
"MIT"
]
| 1,575 | 2015-01-01T13:40:05.000Z | 2019-10-24T22:08:08.000Z | tests/fails/overload4.t | nie-game/terra | 36a544595e59c6066ab9e5b5fa923b82b4be0c41 | [
"MIT"
]
| 304 | 2015-01-02T22:35:30.000Z | 2019-10-23T20:43:18.000Z | tests/fails/overload4.t | nie-game/terra | 36a544595e59c6066ab9e5b5fa923b82b4be0c41 | [
"MIT"
]
| 150 | 2015-01-06T07:18:01.000Z | 2019-10-24T22:08:10.000Z | if not require("fail") then return end
terra foo(a : int)
end
terra foo(a : &int8)
end
terra doit()
var a = foo(3)
end
doit()
| 8.8 | 38 | 0.636364 |
73d4974346dae583079bdf6ab405401af6e40f8a | 667 | t | Perl | t/02_graceful.t | syohex/Gazelle | 1c73070c0d4aeb5fc4a3a127ebb36df63711fec0 | [
"Artistic-1.0"
]
| null | null | null | t/02_graceful.t | syohex/Gazelle | 1c73070c0d4aeb5fc4a3a127ebb36df63711fec0 | [
"Artistic-1.0"
]
| null | null | null | t/02_graceful.t | syohex/Gazelle | 1c73070c0d4aeb5fc4a3a127ebb36df63711fec0 | [
"Artistic-1.0"
]
| null | null | null | use strict;
use warnings;
use HTTP::Request::Common;
use Plack::Test;
use Test::More;
$Plack::Test::Impl = 'Server';
$ENV{PLACK_SERVER} = 'Gazelle';
test_psgi
app => sub {
my $env = shift;
unless (my $pid = fork) {
die "fork failed:$!"
unless defined $pid;
# child process
sleep 1;
kill 'TERM', getppid();
exit 0;
}
sleep 5;
return [ 200, [ 'Content-Type' => 'text/plain' ], [ "hello world" ] ];
},
client => sub {
my $cb = shift;
my $res = $cb->(GET "/");
is $res->content, "hello world";
};
done_testing;
| 20.84375 | 78 | 0.476762 |
ed085805bffae38f471065bfc0669c476e356ace | 869 | pm | Perl | lib/Albatross/SocialNetwork/Schema/Result/User.pm | jonathancast/social-network-perl | dd382a71302353689ce8c29c78ef24b320579d56 | [
"Apache-2.0"
]
| null | null | null | lib/Albatross/SocialNetwork/Schema/Result/User.pm | jonathancast/social-network-perl | dd382a71302353689ce8c29c78ef24b320579d56 | [
"Apache-2.0"
]
| null | null | null | lib/Albatross/SocialNetwork/Schema/Result/User.pm | jonathancast/social-network-perl | dd382a71302353689ce8c29c78ef24b320579d56 | [
"Apache-2.0"
]
| null | null | null | use Moops;
class Albatross::SocialNetwork::Schema::Result::User extends DBIx::Class::EncodedColumn, DBIx::Class::Core {
use DBIx::Class::Candy -autotable => v1;
primary_column id => {
data_type => 'int',
is_auto_increment => 1,
};
unique_column login_id => {
data_type => 'text',
};
column password => {
data_type => 'text',
encode_column => 1,
encode_class => 'Digest',
encode_args => { algorithm => 'SHA-512', format => 'hex', salt_length => 12, },
encode_check_method => 'check_password',
};
method as_hash(@cols) {
return { map { $_ => $self->$_ } @cols };
}
has_many friends => 'Albatross::SocialNetwork::Schema::Result::UserFriend' => 'user';
has_many admirers => 'Albatross::SocialNetwork::Schema::Result::UserFriend' => 'friend';
}
1;
| 27.15625 | 108 | 0.583429 |
ed0967b57d2c4943d2fb312b6ba23e7bd8123c8d | 8,066 | pm | Perl | modules/Bio/EnsEMBL/DBSQL/MiscSetAdaptor.pm | thibauthourlier/ensembl | 5c08d9089451b3ed8e39b5a5a3d2232acb09816c | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/DBSQL/MiscSetAdaptor.pm | thibauthourlier/ensembl | 5c08d9089451b3ed8e39b5a5a3d2232acb09816c | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/DBSQL/MiscSetAdaptor.pm | thibauthourlier/ensembl | 5c08d9089451b3ed8e39b5a5a3d2232acb09816c | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::DBSQL::MiscSetAdaptor - Provides database interaction for
Bio::EnsEMBL::MiscSet objects.
=head1 SYNOPSIS
my $msa = $registry->get_adaptor( 'Human', 'Core', 'MiscSet' );
my $misc_set = $msa->fetch_by_dbID(1234);
$misc_set = $msa->fetch_by_code('clone');
=head1 DESCRIPTION
This class provides database interactivity for MiscSet objects.
MiscSets are used to classify MiscFeatures into groups.
=head1 METHODS
=cut
package Bio::EnsEMBL::DBSQL::MiscSetAdaptor;
use strict;
use warnings;
use Bio::EnsEMBL::MiscSet;
use Bio::EnsEMBL::DBSQL::BaseAdaptor;
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use vars qw(@ISA);
@ISA = qw(Bio::EnsEMBL::DBSQL::BaseAdaptor);
=head2 new
Arg [...] : Superclass args. See Bio::EnsEMBL::DBSQL::BaseAdaptor
Description: Instantiates a Bio::EnsEMBL::DBSQL::MiscSetAdaptor and
caches the contents of the MiscSet table.
Returntype : Bio::EnsEMBL::MiscSet
Exceptions : none
Caller : MiscFeatureAdaptor
Status : Stable
=cut
sub new {
my $class = shift;
my $self = $class->SUPER::new(@_);
$self->{'_id_cache'} = {};
$self->{'_code_cache'} = {};
# cache the entire contents of the misc set table
# the table is small and it removes the need to repeatedly query the
# table or join to the table
$self->fetch_all();
return $self;
}
=head2 fetch_all
Arg [1] : none
Example : foreach my $ms (@{$msa->fetch_all()}) {
print $ms->code(), ' ', $ms->name(), "\n";
}
Description: Retrieves every MiscSet defined in the DB.
NOTE: In a multi-species database, this method will
return all the entries matching the search criteria, not
just the ones associated with the current species.
Returntype : listref of Bio::EnsEMBL::MiscSets
Exceptions : none
Caller : general
Status : Stable
=cut
sub fetch_all {
my $self = shift;
my $sth = $self->prepare
('SELECT misc_set_id, code, name, description, max_length FROM misc_set');
$sth->execute();
my ($dbID, $code, $name, $desc, $max_len);
$sth->bind_columns(\$dbID, \$code, \$name, \$desc, \$max_len);
my @all;
while($sth->fetch()) {
my $ms = Bio::EnsEMBL::MiscSet->new
(-DBID => $dbID,
-ADAPTOR => $self,
-CODE => $code,
-NAME => $name,
-DESCRIPTION => $desc,
-LONGEST_FEATURE => $max_len);
$self->{'_id_cache'}->{$dbID} = $ms;
$self->{'_code_cache'}->{lc($code)} = $ms;
push @all, $ms;
}
$sth->finish();
return \@all;
}
=head2 fetch_by_dbID
Arg [1] : int $dbID
The internal identifier of the misc set to retrieve
Example : my $ms = $msa->fetch_by_dbID($dbID);
Description: Retrieves a misc set via its internal identifier
Returntype : Bio::EnsEMBL::MiscSet
Exceptions : none
Caller : general
Status : Stable
=cut
sub fetch_by_dbID {
my $self = shift;
my $dbID = shift;
if(!$self->{'_id_cache'}->{$dbID}) {
# on a cache miss reread the whole table and reload the cache
$self->fetch_all();
}
return $self->{'_id_cache'}->{$dbID};
}
=head2 fetch_by_code
Arg [1] : string $code
The unique code of the MiscSet to retrieve
Example : my $ms = $msa->fetch_by_code('clone');
Description: Retrieves a MiscSet via its code
Returntype : Bio::EnsEMBL::MiscSet
Exceptions : none
Caller : general
Status : Stable
=cut
sub fetch_by_code {
my $self = shift;
my $code = shift;
if(!$self->{'_code_cache'}->{lc($code)}) {
# on cache miss, reread whole table and reload cache
$self->fetch_all();
}
return $self->{'_code_cache'}->{lc($code)};
}
=head2 store
Arg [1] : list of MiscSets @mist_sets
Example : $misc_set_adaptor->store(@misc_sets);
Description: Stores a list of MiscSets in the database, and sets the
dbID and adaptor attributes of the stored sets.
Returntype : none
Exceptions : throw on incorrect arguments
warning if a feature is already stored in this database
Caller : MiscFeatureAdaptor::store
Status : Stable
=cut
sub store {
my $self = shift;
my @misc_sets = @_;
# we use 'insert ignore' so that inserts can occur safely on the farm
# otherwise 2 processes could try to insert at the same time and one
# would fail
my $insert_ignore = $self->insert_ignore_clause();
my $sth = $self->prepare(
qq{${insert_ignore} INTO misc_set (
code,
name,
description,
max_length
) VALUES (?, ?, ?, ?)
});
my $db = $self->db();
SET:
foreach my $ms (@misc_sets) {
if(!ref($ms) || !$ms->isa('Bio::EnsEMBL::MiscSet')) {
throw("List of MiscSet arguments expected.");
}
if($ms->is_stored($db)) {
warning("MiscSet [".$ms->dbID."] is already stored in this database.");
next SET;
}
$sth->bind_param(1,$ms->code,SQL_VARCHAR);
$sth->bind_param(2,$ms->name,SQL_VARCHAR);
$sth->bind_param(3,$ms->description,SQL_LONGVARCHAR);
$sth->bind_param(4,$ms->longest_feature,SQL_INTEGER);
my $num_inserted = $sth->execute();
my $dbID;
if($num_inserted == 0) {
# insert failed because set with this code already exists
my $sth2 = $self->prepare("SELECT misc_set_id from misc_set " .
"WHERE code = ?");
$sth2->bind_param(1,$ms->code,SQL_VARCHAR);
$sth2->execute();
($dbID) = $sth2->fetchrow_array();
if($sth2->rows() != 1) {
throw("Could not retrieve or store MiscSet, code=[".$ms->code."]\n".
"Wrong database user/permissions?");
}
} else {
$dbID = $self->last_insert_id('misc_set_id', undef, 'misc_set');
}
$ms->dbID($dbID);
$ms->adaptor($self);
# update the internal caches
$self->{'_id_cache'}->{$dbID} = $ms;
$self->{'_code_cache'}->{lc($ms->code())} = $ms;
}
return;
}
=head2 update
Arg [1] : Bio::EnsEMBL::MiscSet $miscset
Example : $adaptor->update($miscset)
Description: Updates this misc_set in the database
Returntype : int 1 if update is performed, undef if it is not
Exceptions : throw if arg is not an misc_set object
Caller : ?
Status : Stable
=cut
sub update {
my $self = shift;
my $m = shift;
if (!ref($m) || !$m->isa('Bio::EnsEMBL::MiscSet')) {
throw("Expected Bio::EnsEMBL::MiscSet argument.");
}
if(!$m->is_stored($self->db())) {
return undef;
}
my $sth = $self->prepare("UPDATE misc_set ".
"SET code =?, name =?, description = ?, max_length = ? ".
"WHERE misc_set_id = ?");
$sth->bind_param(1,$m->code,SQL_VARCHAR);
$sth->bind_param(2,$m->name,SQL_VARCHAR);
$sth->bind_param(3,$m->description,SQL_VARCHAR);
$sth->bind_param(4,$m->longest_feature,SQL_INTEGER);
$sth->bind_param(5,$m->dbID,SQL_INTEGER);
$sth->execute();
$sth->finish();
# update the internal caches
$self->{'_id_cache'}->{$m->dbID} = $m;
$self->{'_code_cache'}->{lc($m->code())} = $m;
}
1;
| 24.36858 | 100 | 0.632284 |
73e4f24645b9ffe611b972978874c21f4528ae72 | 584 | pm | Perl | t/lib/tests/TestsFor/Emitria/Schema/Result/User.pm | jonathanstowe/Emitria | 4bb700de0c4dc931e2dae24d7fb177d83e5a4514 | [
"Artistic-2.0"
]
| null | null | null | t/lib/tests/TestsFor/Emitria/Schema/Result/User.pm | jonathanstowe/Emitria | 4bb700de0c4dc931e2dae24d7fb177d83e5a4514 | [
"Artistic-2.0"
]
| null | null | null | t/lib/tests/TestsFor/Emitria/Schema/Result/User.pm | jonathanstowe/Emitria | 4bb700de0c4dc931e2dae24d7fb177d83e5a4514 | [
"Artistic-2.0"
]
| null | null | null | package TestsFor::Emitria::Schema::Result::User;
use strict;
use warnings;
use lib qw(t/lib);
use Test::Class::Moose;
with qw(
Emitria::Test::Role::Constructor
Emitria::Test::Role::Package
);
use Emitria::Schema::Result::User;
sub test_startup
{
my ( $test ) = @_;
$test->package_name('Emitria::Schema::Result::User');
$test->next::method();
}
sub test_setup
{
my ( $test ) = @_;
$test->next::method();
}
sub test_teardown
{
my ( $test ) = @_;
$test->next::method();
}
sub test_shutdown
{
my ( $test ) = @_;
$test->next::method();
}
1;
| 12.425532 | 54 | 0.601027 |
ed416254064e458fb6361d4dba1603e374afadbc | 1,440 | pm | Perl | storage/ibm/storwize/ssh/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
]
| null | null | null | storage/ibm/storwize/ssh/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
]
| null | null | null | storage/ibm/storwize/ssh/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
]
| null | null | null | #
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::ibm::storwize::ssh::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_simple);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'components' => 'storage::ibm::storwize::ssh::mode::hardware',
'eventlog' => 'storage::ibm::storwize::ssh::mode::eventlog',
'pool-usage' => 'storage::ibm::storwize::ssh::mode::poolusage',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check IBM Storwize (v3700, v5000, v7000, SAN Volume Controller) in SSH.
=cut
| 28.235294 | 74 | 0.693056 |
73e7f7ae005b3cf229be58b5477fabe824c3ff72 | 11,024 | pm | Perl | hxnyclassifierMP/pipelines/lib/file/Chunk.pm | VirusBRC/Clade_Classification | 61727e6da624ff241f702cbe8fb408c263d39b79 | [
"MIT"
]
| 6 | 2019-07-19T05:34:16.000Z | 2021-05-29T20:31:09.000Z | hxnyclassifierMP/pipelines/lib/file/Chunk.pm | DamieFC/Clade_Classification | c2f1cc552675bc34019b346d5b816dff70484abd | [
"MIT"
]
| 1 | 2020-07-27T05:22:13.000Z | 2020-07-27T05:22:13.000Z | hxnyclassifierMP/pipelines/lib/file/Chunk.pm | DamieFC/Clade_Classification | c2f1cc552675bc34019b346d5b816dff70484abd | [
"MIT"
]
| 1 | 2020-12-22T23:30:31.000Z | 2020-12-22T23:30:31.000Z | package file::Chunk;
################################################################################
#
# Required Modules
#
################################################################################
use strict;
use FileHandle;
use Pod::Usage;
use util::Cmd;
use util::Constants;
use util::PathSpecifics;
use file::ErrMsgs;
use xml::Types;
use fields qw (
chunk_index
chunk_prefix
cmds
directory
error_mgr
file_index
file_kind
file_type
files
lines
line_separator
previous_line_separator
size
source_fh
source_file
serializer
);
################################################################################
#
# Constants
#
################################################################################
###
### Size for all file as a single chunk
###
sub ALL_FILE { return '_ALL_'; }
###
### Error Category
###
sub ERR_CAT { return file::ErrMsgs::CHUNK_CAT; }
################################################################################
#
# Public Methods
#
################################################################################
sub new {
my file::Chunk $this = shift;
my ( $file_type, $size, $directory, $error_mgr ) = @_;
$this = fields::new($this) unless ref($this);
$this->{chunk_index} = 1;
$this->{chunk_prefix} = util::Constants::EMPTY_STR;
$this->{cmds} = new util::Cmd($error_mgr);
$this->{directory} = getPath($directory);
$this->{error_mgr} = $error_mgr;
$this->{file_index} = {};
$this->{file_type} = $file_type;
$this->{files} = [];
$this->{line_separator} = util::Constants::NEWLINE;
$this->{lines} = [];
$this->{previous_line_separator} = util::Constants::NEWLINE;
$this->{size} = $size;
$this->{source_fh} = new FileHandle;
$this->{source_file} = undef;
$this->{serializer} = undef;
return $this;
}
sub chunkFile {
my file::Chunk $this = shift;
#######################
### Abstract Method ###
#######################
$this->{error_mgr}->printDebug("Abstract Method file::Chunk::chunkFile");
}
################################################################################
#
# Setter Methods
#
################################################################################
sub setSerializer {
my file::Chunk $this = shift;
my ($serializer) = @_;
$this->{serializer} = $serializer;
}
sub setSize {
my file::Chunk $this = shift;
my ($size) = @_;
$this->{error_mgr}->exitProgram( ERR_CAT, 5, [$size],
util::Constants::EMPTY_LINE($size)
|| ( $size !~ /^\d+$/ && $size ne ALL_FILE )
|| ( $size =~ /^\d+$/ && $size == 0 ) );
$this->{size} = $size;
}
sub setChunkPrefix {
my file::Chunk $this = shift;
my ($chunk_prefix) = @_;
return if ( !defined($chunk_prefix) );
$this->{chunk_prefix} = $chunk_prefix;
}
sub setChunkIndex {
my file::Chunk $this = shift;
my ($chunk_index) = @_;
$this->{error_mgr}->exitProgram( ERR_CAT, 6, [$chunk_index],
!defined($chunk_index) || $chunk_index !~ /^\d+$/ || $chunk_index == 0 );
$this->{chunk_index} = $chunk_index;
}
sub setLineSeparator {
my file::Chunk $this = shift;
my ($line_separator) = @_;
$this->{error_mgr}->exitProgram( ERR_CAT, 10, [], !defined($line_separator) );
$this->{line_separator} = $line_separator;
}
sub setSourceFile {
my file::Chunk $this = shift;
my ($file) = @_;
###############################
### Re-implementable Method ###
###############################
$this->{source_file} = getPath($file);
$this->{file_kind} = undef;
if ( $this->{source_file} =~ /(\.gz|\.Z)$/ ) {
$this->{file_kind} = xml::Types::GZIP_FILE_TYPE;
}
elsif ( $this->{source_file} =~ /(\.zip)$/ ) {
$this->{file_kind} = xml::Types::ZIP_FILE_TYPE;
}
else {
$this->{file_kind} = xml::Types::PLAIN_FILE_TYPE;
}
###
### Open the file is the size is not all the file
###
return if ( $this->{size} eq ALL_FILE );
if ( $/ ne $this->{line_separator} ) {
$this->{previous_line_separator} = $/;
$/ = $this->{line_separator};
}
if ( $this->{file_kind} eq xml::Types::GZIP_FILE_TYPE ) {
$this->{error_mgr}->exitProgram(
ERR_CAT, 3,
[ $this->{source_file} ],
!$this->{source_fh}->open( 'gunzip -c ' . $this->{source_file} . '|' )
);
}
elsif ( $this->{file_kind} eq xml::Types::ZIP_FILE_TYPE ) {
$this->{error_mgr}->exitProgram( ERR_CAT, 3, [ $this->{source_file} ],
!$this->{source_fh}
->open( 'cat ' . $this->{source_file} . ' | gunzip -c |' ) );
}
else {
$this->{error_mgr}->exitProgram(
ERR_CAT, 4,
[ $this->{source_file} ],
!$this->{source_fh}->open( $this->{source_file}, '<' )
);
}
}
sub closeSourceFile($) {
my file::Chunk $this = shift;
###############################
### Re-implementable Method ###
###############################
return if ( !defined( $this->{source_fh}->fileno ) );
$this->{source_fh}->close;
return if ( $/ eq $this->{previous_line_separator} );
$/ = $this->{previous_line_separator};
$this->{previous_line_separator} = util::Constants::NEWLINE;
}
sub writeChunk {
my file::Chunk $this = shift;
return if ( $this->{size} ne ALL_FILE
&& @{ $this->{lines} } == 0 );
$this->{error_mgr}->exitProgram(
ERR_CAT, 7,
[ $this->{directory} ],
!-e $this->{directory} || !-d $this->{directory}
);
my $chunk_index = $this->{chunk_index};
if ( $chunk_index < 10 ) { $chunk_index = '000' . $chunk_index; }
elsif ( $chunk_index < 100 ) { $chunk_index = '00' . $chunk_index; }
elsif ( $chunk_index < 1000 ) { $chunk_index = '0' . $chunk_index; }
$this->{chunk_index}++;
my $chunk_file = join( util::Constants::SLASH,
$this->{directory},
join( util::Constants::DOT,
$this->{chunk_prefix}, $chunk_index, $this->{file_type}, 'gz'
)
);
push( @{ $this->{files} }, $chunk_file );
$this->{file_index}->{$chunk_file} = $chunk_index;
if ( $this->{size} eq ALL_FILE ) {
my $source_file = $this->{source_file};
###
### Chunk is the whole file
###
$this->{error_mgr}->exitProgram( ERR_CAT, 11, [$chunk_file],
util::Constants::EMPTY_LINE($source_file) );
my $cmd = undef;
if ( $this->{file_kind} eq xml::Types::GZIP_FILE_TYPE
|| $this->{file_kind} eq xml::Types::ZIP_FILE_TYPE )
{
$cmd = join( util::Constants::SPACE, 'cp', $source_file, $chunk_file );
}
else {
$cmd = join( util::Constants::SPACE,
'gzip -c ', $source_file, '>', $chunk_file );
}
$this->{error_mgr}->exitProgram(
ERR_CAT, 12,
[ $source_file, $chunk_file ],
$this->{cmds}->executeCommand(
{
source => $source_file,
target => $chunk_file
},
$cmd,
'Copying file...'
)
);
}
else {
###
### Chunk is the part of a file
###
my $fh = new FileHandle;
$this->{error_mgr}->exitProgram( ERR_CAT, 1, [$chunk_file],
!$fh->open("| gzip -c > $chunk_file") );
$fh->autoflush(util::Constants::TRUE);
my $last_index = $#{ $this->{lines} };
foreach my $index ( 0 .. ( $last_index - 1 ) ) {
$fh->print( $this->{lines}->[$index] . $this->{line_separator} );
}
$fh->print( $this->{lines}->[$last_index] );
$fh->close;
}
$this->{lines} = [];
}
################################################################################
#
# Getter Methods
#
################################################################################
sub chunkFiles {
my file::Chunk $this = shift;
return @{ $this->{files} };
}
sub chunkFileIndex {
my file::Chunk $this = shift;
my ($chunk_file) = @_;
return $this->{file_index}->{$chunk_file};
}
sub serializer {
my file::Chunk $this = shift;
return $this->{serializer};
}
################################################################################
1;
__END__
=head1 NAME
Chunk.pm
=head1 SYNOPSIS
This abstract class provides the mechanism to chunk an input source
into smaller chunks for processing.
=head1 METHODS
The following methods are exported from the class.
=head2 B<new file::Chunk(chunk_prefix, file_type, size, directory, error_mgr)>
This is the constructor of the class and requires the B<chunk_prefix>
that defines the tag for the data and the B<file_type> that defines
file type. Also, the directory is the directory where the chunks is
assumed to be writtern. The size is the default size of the chunks.
Size can be changed by L<"setSize(buffer_size)">.
=head2 B<chunkFile>
This abstract method takes the stream represented by the source_file
(file, database, etc.) and chunks it into size (as defined by the
subclass) chunks making sure integral entity boundaries are not
violated. Each chunk will be generated into a gzipped filename, where
N is the chunk number (N >= 0), B<directory>, B<chunk_prefix>, and
B<file_type> are the attributes of the object:
<directory>/[prefix.]<chunk_prefix>.000N.<file_type>.gz, N < 10
<directory>/[prefix.]<chunk_prefix>.00N.<file_type>.gz, 10 <= N < 100
<directory>/[prefix.]<chunk_prefix>.0N.<file_type>.gz, 100 <= N < 1000
<directory>/[prefix.]<chunk_prefix>.N.<file_type>.gz, 1000 <= N < 10000
This method returns the number of chunks created. The optional
B<prefix> can be added by a subclass.
=head1 SETTER METHODS
The following setter methods are exported from the class.
=head2 B<setSize(buffer_size)>
This method set the buffer size to buffer_size if it is defined and
positive.
=head2 B<setChunkPrefix(chunk_prefix)>
This method sets the chunk prefix for the chunked files. Initially,
this prefix is set to the empty string.
=head2 B<setChunkIndex(chunk_index)>
This method sets the chunk file index to start with for the chunked
files. This number must be positive. By default, it is set to one
(1).
=head2 B<setLineSeparator(line_separator)>
This method sets the line separator. By default, the line separator
is new line.
=head2 B<setSourceFile(file)>
This re-implementable method sets the source file and opens the input
stream for this source file. It manages whether the file is gzipped
or plain.
=head2 B<setSerializer(serializer)>
This optional setter method sets the Perl serializer.
=head2 B<closeSourceFile>
This re-implementable method closes the the source file handle.
=head2 B<writeChunk>
If there are lines in the lines array attribute, then the method
writes the lines to the next chunk file.
=head1 GETTER METHODS
The following getter methods are exported from the class.
=head2 B<chunkFiles>
This method returns the (unreferenced) list of chunk files generated
by L<"chunkFile">.
=head2 B<chunkFileIndex(chunk_file)>
This method returns the index for the chunk_file. If the chunk_file
is not a chunk, then undef is returned.
=head2 B<serializer>
This optional serializer.
=cut
| 27.979695 | 80 | 0.564858 |
ed3d4fab6566eb6a388ab137a044639c355418f0 | 53,614 | pl | Perl | perl/mm_validate_pecan.pl | pgajer/MCclassifier | 5da58744a4cc58b6c854efff63534aae50c72258 | [
"Unlicense"
]
| 6 | 2016-03-05T04:45:16.000Z | 2021-08-07T06:20:07.000Z | perl/mm_validate_pecan.pl | pgajer/MCclassifier | 5da58744a4cc58b6c854efff63534aae50c72258 | [
"Unlicense"
]
| 2 | 2016-09-20T18:30:27.000Z | 2016-11-08T17:35:11.000Z | perl/mm_validate_pecan.pl | pgajer/MCclassifier | 5da58744a4cc58b6c854efff63534aae50c72258 | [
"Unlicense"
]
| 2 | 2016-09-20T16:37:40.000Z | 2017-03-11T23:37:04.000Z | #!/usr/bin/env perl
=head1 NAME
mm_validate_pecan.pl
=head1 DESCRIPTION
A version of mm_validate_pecan.pl that instead of processing all species in a
one big loop, processes only selected species passed to the script in a file.
This script attempts to validate PECAN taxonomic assignment of the M&M
sequences by generating vicut clustering results report and creating
phylogenetic trees of representative sequences to PECAN species together with
ref seq's of of the corresponding phylo group.
=head1 SYNOPSIS
mm_validate_pecan.pl
=head1 OPTIONS
=over
=item B<--spp-file, -i>
Two columns: <species> <phylo-group> file
=item B<--out-dir, -o>
Output directory. Optional parameter. If not specified the output is written to
/Users/pgajer/projects/M_and_M/new_16S_classification_data/mm_validate_reports_dir.
=item B<--max-no-nr-seqs, -n>
Maximal number of non-redundant seq's.
=item B<--perc-coverage, -p>
Percentage coverage: the percentage, say 80%, of the total number of non-redundant sequences
Used to reduce the number of non-redundant seq's
=item B<--use-vsearch>
Use vsearch instead of usearch.
NOTE: Only non-redundant fasta file is produced becased in RH7 we cannot
create a pdf of a tree. Therefore,
when using this option re-run the script again with-out this option to
complete processing of the given set of species.
=item B<--build-tree>
Forces build of a tree, even if one already has been build.
=item B<--run-all>
Ignore if ( ! -e ... ) statements.
=item B<--show-tree>
Open the pdf file with the tree used to do clustering.
=item B<--verbose, -v>
Prints content of some output files.
=item B<--debug>
Prints system commands
=item B<--dry-run>
Print commands to be executed, but do not execute them.
=item B<-h|--help>
Print help message and exit successfully.
=back
=head1 EXAMPLE
in ~/projects/M_and_M/new_16S_classification_data
mm_validate_pecan.pl --max-no-nr-seqs 500 --perc-coverage 80 --spp-file mm_valid_spp_part_1.txt -o mm_june25_validate_pecan_dir
=cut
use strict;
use warnings;
use diagnostics;
use Pod::Usage;
use English qw( -no_match_vars );
use Getopt::Long qw(:config no_ignore_case no_auto_abbrev pass_through);
use Cwd 'abs_path';
use List::Util qw( sum min max );
use File::Temp qw/ tempfile /;
#use Parallel::ForkManager;
$OUTPUT_AUTOFLUSH = 1;
####################################################################
## OPTIONS
####################################################################
my $percCoverage = 80;
my $maxNumNRseqs = 500; # when the number of non-redundant seq's is more
# than maxNumNRseqs, selecte x number of largest
# cluster non-redundant seq's such that the sum of their
# cluster sizes covers percCoverage% of all seq's classified
# to the given species
my $maxNumCovSeqs = 2000; # x (as def above) cannot be greater than maxNumCovSeqs
GetOptions(
"spp-file|i=s" => \my $sppFile,
"out-dir|o=s" => \my $outDir,
"max-no-nr-seqs|n=i" => \$maxNumNRseqs,
"max-no-cov-seqs|m=i" => \$maxNumCovSeqs,
"build-tree" => \my $buildTree,
"use-vsearch" => \my $useVsearch,
"perc-coverage|p=i" => \$percCoverage,
"igs" => \my $igs,
"run-all" => \my $runAll,
"show-tree" => \my $showTree,
"verbose|v" => \my $verbose,
"debug" => \my $debug,
"dry-run" => \my $dryRun,
"help|h!" => \my $help,
)
or pod2usage(verbose => 0,exitstatus => 1);
if ($help)
{
pod2usage(verbose => 2,exitstatus => 0);
exit 1;
}
if ( !$sppFile )
{
warn "\n\n\tERROR: Missing species list file";
print "\n\n";
pod2usage(verbose => 2,exitstatus => 0);
exit 1;
}
my $phGrBaseDir = "/Users/pgajer/projects/PECAN/data/phylo_groups/v0.3/cx_hb_rdp_FL_5500_phGr_dir";
my $mmDir = "/Users/pgajer/projects/M_and_M/MM_june25/";
my $mmSppDir = "/Users/pgajer/projects/M_and_M/MM_june25/mm_spp_dir";
my $nw_labels = "nw_labels";
my $nw_order = "nw_order";
my $nw_condense = "nw_condense";
my $nw_rename = "nw_rename";
my $nw_prune = "nw_prune";
my $nw_reroot = "nw_reroot";
my $uc2clstr2 = "uc2clstr2.pl";
my $extract_seq_IDs = "extract_seq_IDs.pl";
my $select_seqs = "select_seqs.pl";
my $rmGaps = "rmGaps";
my $FastTree = "FastTree";
my $R = "R";
my $fix_fasta_headers = "fix_fasta_headers.pl";
my $mothur = "/Users/pgajer/bin/mothur";
my $usearch6 = "/Users/pgajer/bin/usearch6.0.203_i86osx32";
my $vicut = "vicut";
my $readNewickFile = "/Users/pgajer/organizer/programming/R/libs/read.newick.R";
my $quietStr = "--quiet";
my $vsearchSORT;
my $vsearch;
my $igsStr = "";
if ( defined $igs )
{
$phGrBaseDir = "/home/pgajer/projects/PECAN/data/phylo_groups/v0.3/cx_hb_rdp_FL_5500_phGr_dir";
$mmDir = "/local/scratch/MM_june25/";
$mmSppDir = "/local/scratch/MM_june25/mm_spp_dir";
$fix_fasta_headers = "/home/pgajer/devel/MCclassifier/perl/fix_fasta_headers.pl";
$nw_labels = "/usr/local/projects/pgajer/bin/nw_labels";
$nw_order = "/usr/local/projects/pgajer/bin/nw_order";
$nw_condense = "/usr/local/projects/pgajer/bin/nw_condense";
$nw_rename = "/usr/local/projects/pgajer/bin/nw_rename";
$nw_prune = "/usr/local/projects/pgajer/bin/nw_prune";
$nw_reroot = "/usr/local/projects/pgajer/bin/nw_reroot";
$uc2clstr2 = "/home/pgajer/devel/MCclassifier/perl/uc2clstr2.pl";
$extract_seq_IDs = "/home/pgajer/devel/MCclassifier/perl/extract_seq_IDs.pl";
$select_seqs = "/home/pgajer/devel/MCclassifier/perl/select_seqs.pl";
$rmGaps = "/usr/local/projects/pgajer/bin/rmGaps";
$FastTree = "/home/pgajer/bin/FastTree_no_openMP";
$R = "/home/pgajer/bin/R";
$mothur = "/usr/local/projects/pgajer/bin/mothur";
$usearch6 = "/local/projects/pgajer/bin/usearch6.0.203_i86linux32";
$vicut = "/usr/local/projects/pgajer/bin/vicut";
$readNewickFile = "/local/projects/pgajer/devel/MCclassifier/R/read.newick.R";
$vsearchSORT = "/usr/local/packages/vsearch/bin/vsearch";
$vsearch = "/usr/local/bin/vsearch";
$quietStr = "";
$igsStr = "--igs";
}
## Export LD_LIBRARY_PATH=/usr/local/packages/readline/lib:/usr/local/packages/gcc-5.3.0/lib64
local $ENV{LD_LIBRARY_PATH} = "/usr/local/packages/gcc/lib64";
my $debugStr = "";
if ($debug)
{
$debugStr = "--debug";
$quietStr = "";
}
my $verboseStr = "";
if ($verbose)
{
$verboseStr = "--verbose";
}
####################################################################
## MAIN
####################################################################
my $startRun = time();
my $initStartRun = $startRun;
my $endRun = time();
my $runTime = $endRun - $startRun;
my $timeStr;
my $timeMin = int($runTime / 60);
my $timeSec = $runTime % 60;
##
## Creating output reports directory
##
if ( !defined $outDir )
{
$outDir = $mmDir . "mm_validate_pecan_dir";
}
if ( ! -e $outDir )
{
make_dir( $outDir);
}
my $treesDir = $outDir . "/trees_dir";
if ( ! -e $treesDir )
{
make_dir( $treesDir );
}
my $tmpDir = $outDir . "/temp_dir";
if ( ! -e $tmpDir )
{
make_dir( $tmpDir );
}
##
## MAIN section
##
print "--- Parsing table of species to be processed\n";
my %phGrSppTbl = parse_spp_tbl( $sppFile ); # phGr => ref to array of species from that phylo-group
# Note that $sppFile is a 2 col table
#
# <sp> => <corresonding phGr of that species>
#
# Thus, phGrSppTbl groups all species of the same phylo-group together.
if ( $debug )
{
print "\nphGrSppTbl\n";
for my $phGr ( keys %phGrSppTbl )
{
print "\n$phGr\n";
my @spp = @{ $phGrSppTbl{$phGr} };
for ( @spp )
{
print "\t$_\n";
}
}
print "\n";
}
##
## main loop
##
for my $phGr ( keys %phGrSppTbl )
{
print "\r--- Processing $phGr species \n";
## create mm phylo-group dir
my $phGrDir = $outDir . "/mm_" . $phGr . "_dir/";
## print "\n\nphGrDir: $phGrDir\n"; exit;
make_dir( $phGrDir );
##
## Identifying algn file of the given phylo-group
##
my $phGrAlgnFile = $phGrBaseDir . "/$phGr" . "_dir/$phGr" . "_algn_trimmed_final.fa";
if ( -l $phGrAlgnFile )
{
$phGrAlgnFile = readlink( $phGrAlgnFile );
}
print "phGrAlgnFile: $phGrAlgnFile\n" if $debug;
## Final alignment has OG seq's !!!!
if ( ! -e $phGrAlgnFile )
{
warn "\n\n\tERROR: $phGrAlgnFile does not exist";
print "\n\n";
exit 1;
}
##
## Identifying fa finale file of the given phylo-group
##
my $phGrFaFile = $phGrBaseDir . "/$phGr" . "_dir/$phGr" . "_final.fa";
print "\nphGr: $phGr; phGrFaFile: $phGrFaFile\n" if $debug;
## File with the given phylo-group's fa file of all seq's before curation including outgroup seq's
my $phGrBigFaFile = $phGrFaFile;
$phGrBigFaFile =~ s/_final//;
## print "phGrBigFaFile: $phGrBigFaFile\n";
if ( ! -e $phGrBigFaFile || ! -s $phGrBigFaFile )
{
print "--- $phGrBigFaFile was not found - creating it from the algn file\n";
$cmd = "$rmGaps -i $phGrAlgnFile -o $phGrBigFaFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
}
##
## Identifying tx finale file of the given phylo-group
##
my $phGrTxFile = $phGrBaseDir . "/$phGr" . "_dir/$phGr" . "_final.tx";
## print "\nphGr: $phGr; phGrTxFile: $phGrTxFile\n";
if ( ! -e $phGrTxFile )
{
my $orig = $phGrTxFile;
$phGrTxFile = $phGrBaseDir . "/$phGr" . "_dir/$phGr" . ".tx";
if ( ! -e $phGrTxFile )
{
warn "\n\n\tERROR: Neither $orig nor $phGrTxFile files exist";
print "\n\n";
exit 1;
}
}
##
## Creating a file with the given phylo-group's outgroup seq's
##
my $phGrOGseqIDsFile = $phGrFaFile;
$phGrOGseqIDsFile =~ s/_final\.fa/_outgroup\.seqIDs/;
print "--- Reading $phGrOGseqIDsFile\n" if $debug;
my @ogSeqIDs = read_array( $phGrOGseqIDsFile );
print "\nNo. OG seq's: " . scalar(@ogSeqIDs) . "\n";
my $phGrOGfaFile = $phGrDir . "og.fa";
if ( ! -e $phGrOGfaFile || ! -s $phGrOGfaFile || $runAll ) # checking not only if the file exists, but also if it has non-empty size (-s)
{
print "--- Generating fa file of outgroup seq's of $phGr ";
$cmd = "$select_seqs $quietStr -s $phGrOGseqIDsFile -i $phGrBigFaFile -o $phGrOGfaFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
}
##
## Sanity checks
##
if ( ! exists $phGrSppTbl{$phGr} )
{
warn "\n\n\tERROR: $phGr is not a key of phGrSppTbl";
print "\n\n";
exit;
}
if ( ref( $phGrSppTbl{$phGr} ) ne 'ARRAY' )
{
warn "\n\n\tERROR: phGrSppTbl{$phGr} is a referece to ARRAY";
print "\n\n";
exit;
}
##
## Species loop
##
my @spp = @{ $phGrSppTbl{$phGr} };
for my $spIdx ( 0..$#spp )
{
$startRun = time();
my $sp = $spp[$spIdx];
my $spFaFile = "$mmSppDir/$sp" . ".fa";
if ( ! -e $spFaFile )
{
warn "\n\n\tERROR: $sp fasta file $spFaFile does not exist";
print "\n\n";
exit 1;
}
print "\n--- Processing $sp ($phGr)\n";
# Outline of species processing
# 1. Select representatives of 100% seq identity clusters (called also nr clusters)
# 2. If the number of nr-cluters (or nr-seq's ) is greater than $maxNumNRseqs
# selecte x number of largest nr-seq's such that the sum of their cluster
# sizes covers $percCoverage% of all seq's classified to the given species.
# 3. The number of slected nr-seq's cannot be more than $maxNumCovSeqs
# 4. If $percCoverage% is achieved with fewer than $maxNumCovSeqs, use
# $maxNumCovSeqs non-redundant seq's to increase the coverage.
# 5. Align these seq's to the phylo-group's ginsi alignment
# 6. Generate tree
# 7. Generate a pdf image of the tree
# Creating species dir
my $spDir = $phGrDir . $sp . "_dir";
make_dir( $spDir );
# Creating a report file
my $spReport = $spDir . "/report.txt";
open my $ROUT, ">$spReport" or die "Cannot open $spReport for writing: $OS_ERROR";
#
# Dereplicating species fa file
#
my $spSORTfaFile= "$spDir/$sp" . "_sort.fa";
my $spNRfaFile = "$spDir/$sp" . "_nr.fa";
my $spUCfile = "$spDir/$sp" . ".uc";
my $spUCfilelog = "$spDir/$sp" . "_uc.log";
if ( ! -e $spNRfaFile || ! -s $spNRfaFile || $runAll )
{
print "\r\t\tDereplicating species fasta file";
if ( $useVsearch )
{
my $spNRfaFile0 = "$spDir/$sp" . "_nr0.fa";
$cmd = "$vsearchSORT --sortbylength $spFaFile --output $spSORTfaFile --fasta_width 0; $vsearch --derep_full $spSORTfaFile --output $spNRfaFile0 --sizeout --fasta_width 0 --uc $spUCfile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?" if !$dryRun;
#
# NOTE that the _nr.fa file will have seq headers of the form >seqID;size=\d+;
# This should be fixed here, so the seq headers are of the form >seqID size=\d+
#
$cmd = "$fix_fasta_headers -i $spNRfaFile0 -o $spNRfaFile; rm -f $spNRfaFile0";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?" if !$dryRun;
next;
}
else
{
$cmd = "$usearch6 -cluster_fast $spFaFile -id 1.0 -uc $spUCfile -centroids $spNRfaFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?" if !$dryRun;
}
}
#
# NOTE that the _nr.fa file will have seq headers of the form >seqID;size=\d+;
# tenatively fixing size str here
#
fix_fasta_headers( $spNRfaFile );
my $nrSeqIDsFile = "$spDir/$sp" . "_nr.seqIDs";
if ( ! -e $nrSeqIDsFile || ! -s $nrSeqIDsFile || $runAll )
{
print "\r\t\tExtracting non-redundant seq IDs ";
## extracting seq IDs from the alignment file and selecting those IDs from the taxon file
$cmd = "$extract_seq_IDs -i $spNRfaFile -o $nrSeqIDsFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
my @nrSeqIDs = read_NR_array( $nrSeqIDsFile ); # s/;size=\d+;// substitution is performed here
my $nnrSp = @nrSeqIDs;
print "\nNo. nr seq IDs: " . commify($nnrSp) . "\n";
my $spClstr2File = "$spDir/$sp" . "_nr.clstr2";
if ( ! -e $spClstr2File || ! -s $spClstr2File || $runAll )
{
print "\r\t\tCreating clstr2 file ";
$cmd = "$uc2clstr2 $igsStr -i $spUCfile -o $spClstr2File";
print "cmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
print "\r\t\tParsing clstr2 file ";
my %cTbl = parseClstr2( $spClstr2File );
# sort cluster reference sequence IDs w/r cluster size
@nrSeqIDs = sort { $cTbl{$b} <=> $cTbl{$a} } keys %cTbl;
# print "\n\nFirst 10 ref seq's and the corresponding cluster sizes\n";
# map { print "\t$_\t$cTbl{$_}\n" } @nrSeqIDs[0..10];
my @clSizes = map { $cTbl{$_} } @nrSeqIDs;
my $nAllSpSeqs = sum ( @clSizes ); # total number of sequences
my @clPercs = map { 100.0 * $_ / $nAllSpSeqs } @clSizes;
my %clSizeTbl = map { $_ => $cTbl{$_} } @nrSeqIDs;
my %clPercsTbl = map { $_ => 100.0 * $clSizeTbl{$_} / $nAllSpSeqs } @nrSeqIDs;
print $ROUT "------------------------------------------------\n\n";
print $ROUT "$sp ($phGr)\n\n";
print $ROUT "n: " . commify($nAllSpSeqs) . "\n";
print $ROUT "n(nr): " . commify($nnrSp) . "\n";
my $covSuffix = "";
if ( @nrSeqIDs > $maxNumNRseqs )
{
## select no more than $maxNumCovSeqs nr-seq's
my $n = $#clPercs; # this is the number of all clusters -1
if ( $n > $maxNumCovSeqs )
{
$n = $maxNumCovSeqs;
}
my $cumPerc = 0;
my $percCovIdx = 0; # index of the sequence in @nrSeqIDs so that sum(clPercs[0..percCovIdx]) gives percCoverage
for my $i ( 0..$n )
{
$cumPerc += $clPercs[$i];
if ( $cumPerc > $percCoverage )
{
$percCovIdx = $i-1;
last;
}
elsif ( $i == $n )
{
$percCovIdx = $i;
}
}
$percCovIdx = 0 if $percCovIdx < 0;
print "\npercCovIdx: $percCovIdx\ncumPerc: $cumPerc\n" if $debug;
# We are happy with taking $maxNumNRseqs (500 default) seq's if it
# covers more than $percCoverage
if ( $percCovIdx < ($maxNumNRseqs-1) && ($maxNumNRseqs-1) <= $#clPercs )
{
$percCovIdx = $maxNumNRseqs-1;
$cumPerc = 0;
for my $j (0..($maxNumNRseqs-1))
{
$cumPerc += $clPercs[$j];
}
print "percCovIdx changed to $percCovIdx\ncumPerc: $cumPerc\n" if $debug;
}
## updating @nrSeqIDs !!!
@nrSeqIDs = @nrSeqIDs[0..$percCovIdx];
$cumPerc = sprintf( "%d", int($cumPerc) );
print $ROUT "no. of nr seq's covering $cumPerc" . "% of seq's classified to $sp: " . commify(scalar(@nrSeqIDs)) . "\n";
print "\nno. of nr seq's covering $cumPerc" . "% of seq's classified to $sp: " . commify(scalar(@nrSeqIDs)) . "\n";
$covSuffix = "_nr_cov" . sprintf( "%d", int($cumPerc) );
$nrSeqIDsFile = "$spDir/$sp" . $covSuffix . ".seqIDs";
writeArray(\@nrSeqIDs, $nrSeqIDsFile);
print "\n\nCurrent number of nr-seq's" . @nrSeqIDs . "\n\n" if $debug;
## Restricting nr fa file to only nr ref seq's covering $percCoverage of all seq's
$spNRfaFile = "$spDir/$sp" . $covSuffix . ".fa";
$cmd = "$select_seqs $quietStr -s $nrSeqIDsFile -i $spFaFile -o $spNRfaFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
} # end of if ( @nrSeqIDs > $maxNumNRseqs )
##
## 2. Generate alignment
##
my $bigAlgnFile = "$spDir/$sp" . $covSuffix . "_algn.fa";
if ( ! -e $bigAlgnFile || ! -s $bigAlgnFile || $runAll || $buildTree )
{
print "\r\t\tAligning phGr ref seq's (includeing OG seq's) and the selected seq's of $sp ";
if ( $debug )
{
my $wcline = qx/ grep -c '>' $spNRfaFile /;
$wcline =~ s/^\s+//;
my ($nQseqs, $qstr) = split /\s+/, $wcline;
$wcline = qx/ grep -c '>' $phGrAlgnFile /;
$wcline =~ s/^\s+//;
my ($nTemptSeqs, $astr) = split /\s+/, $wcline;
print "\n\nAligning spNRfaFile with $nQseqs\n";
print "to phGrAlgnFile with $nTemptSeqs\n\n";
}
my @tmp;
push (@tmp,"align.seqs(candidate=$spNRfaFile, template=$phGrAlgnFile, flip=T)"); # processors=8 on a grid when this is exectuted with allocated one node of the grid, asking for more nodes may cause serious slow down
printArray(\@tmp, "mothur commands") if ($debug || $verbose);
my $scriptFile = create_mothur_script( \@tmp );
$cmd = "$mothur < $scriptFile; rm -f $scriptFile mothur.*.logfile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?" if !$dryRun;
my $mothurAlgnFile = $spNRfaFile; # "$trDir/" . $candBasename . ".align";
$mothurAlgnFile =~ s/fa$/align/;
print "mothurAlgnFile: $mothurAlgnFile\n" if $debug;
$cmd = "rm -f $bigAlgnFile; cat $mothurAlgnFile $phGrAlgnFile > $bigAlgnFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
if ( $debug )
{
my $wcline = qx/ grep -c '>' $bigAlgnFile /;
$wcline =~ s/^\s+//;
my ($nBAlgSeqs, $str) = split /\s+/, $wcline;
print "\nNumber of seq's in the concatenated alignment $nBAlgSeqs\n\n";
}
}
##
## 3. Generate phylo tree
##
my $bigNotRootedTreeFile = "$spDir/$sp" . $covSuffix . "_not_rooted_with_OGs.tree";
if ( ! -e $bigNotRootedTreeFile || ! -s $bigNotRootedTreeFile || $runAll || $buildTree )
{
print "\r\t\tGenerating phylo tree of the above alignment ";
if ( $debug )
{
my $wcline = qx/ grep -c '>' $bigAlgnFile /;
$wcline =~ s/^\s+//;
my ($nBAlgSeqs, $str) = split /\s+/, $wcline;
print "\nNumber of seq's in the concatenated alignment $nBAlgSeqs\n\n";
}
$cmd = "rm -f $bigNotRootedTreeFile; $FastTree -nt $bigAlgnFile > $bigNotRootedTreeFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
## Rerooting the tree
my $bigTreeWithOGsFile = "$spDir/$sp" . $covSuffix . "_with_OGs.tree";
if ( ! -e $bigTreeWithOGsFile || ! -s $bigTreeWithOGsFile || $runAll || $buildTree )
{
print "\r\t\tRerooting the tree using outgroup sequences ";
$cmd = "rm -f $bigTreeWithOGsFile; $nw_reroot $bigNotRootedTreeFile @ogSeqIDs | $nw_order - > $bigTreeWithOGsFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
## Pruning tree froom OG seq's
my $bigTreeFile = "$spDir/$sp" . $covSuffix . ".tree";
if ( ! -e $bigTreeFile || ! -s $bigTreeFile || $runAll || $buildTree )
{
print "\r\t\tPruning the tree from OG seq's ";
$cmd = "rm -f $bigTreeFile; $nw_prune $bigTreeWithOGsFile @ogSeqIDs | $nw_order - > $bigTreeFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
##
## 4. Running vicut on the tree using seq's of $sp as query nodes
##
my $vicutDir = "$spDir/$sp" . $covSuffix . "_vicut_dir";
my $annFile = $phGrTxFile;
my $queryFile = $nrSeqIDsFile;
##my $vicutTxFile = $vicutDir . "/minNodeCut_NAge1_TXge1_querySeqs.taxonomy"; # minNodeCut.cltrs
my $vicutCltrsFile = $vicutDir . "/minNodeCut.cltrs";
if ( ! -e $vicutCltrsFile || ! -s $vicutCltrsFile || $runAll || $buildTree )
{
print "\r\t\tRunning vicut ";
if ( $debug )
{
my $wcline = qx/ wc -l $nrSeqIDsFile /;
$wcline =~ s/^\s+//;
my ($nQseqs, $qstr) = split /\s+/, $wcline;
$wcline = qx/ wc -l $phGrTxFile /;
$wcline =~ s/^\s+//;
my ($nAnnSeqs, $astr) = split /\s+/, $wcline;
my @leaves = get_leaves( $bigTreeFile );
print "\n\n";
print "Number of query seq's: $nQseqs\n";
print "Number of annotation seq's: $nAnnSeqs\n";
print "Number of leaves in the tree: ". @leaves . "\n";
print "annFile: $annFile\n\n";
}
my @query = run_vicut( $bigTreeFile, $annFile, $vicutDir );
}
##
## 5. Reporting results of vicut
##
my ($rvCltrTbl, $rvTxTbl, $rvExtTxTbl) = read_cltrs_tbl( $vicutCltrsFile );
my %vCltrTbl = %{$rvCltrTbl}; # seqID => vicut cluster ID
my %vTxTbl = %{$rvTxTbl}; # seqID => taxonomy (NA for query seq's)
my %vExtTxTbl = %{$rvExtTxTbl}; # seqID => taxonomy of seqID if seqID is a phGr ref seq and c<vicut cluster ID of seqID> if seqID is a query seq
my $vExtTxTblFile = "$spDir/$sp" . $covSuffix . "_ext.tx";
write_tbl(\%vExtTxTbl, $vExtTxTblFile);
## vicut-cltr/tx frequency table
my %vCltrvTxFreq;
my %vCltrvTxIds; # $vCltrvTxIds{cltr}{tx} = ref to seqID of the cluster's, cltr, taxon, tx.
my %vCltrIds;
for my $id ( keys %vCltrTbl )
{
$vCltrvTxFreq{$vCltrTbl{$id}}{$vTxTbl{$id}}++;
push @{$vCltrvTxIds{$vCltrTbl{$id}}{$vTxTbl{$id}}}, $id;
push @{$vCltrIds{$vCltrTbl{$id}}}, $id;
}
## Identifing clusters that contain query sequences
my @nrSeqCltrs; # = @vCltrTbl{@nrSeqIDs};
##print "\n\nvCltrTbl\n";
for (@nrSeqIDs)
{
if ( exists $vCltrTbl{$_} )
{
push @nrSeqCltrs, $vCltrTbl{$_};
}
else
{
print "\n\nWARNING: $_ undefined in vCltrTbl\n";
}
}
my @nrCltrs = unique(\@nrSeqCltrs);
print "\nnrCltrs: @nrCltrs\n";
## size of each cluster
my %vicutCltrSize;
for my $cl ( @nrCltrs )
{
if (exists $vCltrvTxFreq{$cl})
{
my @txs = keys %{$vCltrvTxFreq{$cl}};
my $size = 0;
for my $tx (@txs)
{
$size += $vCltrvTxFreq{$cl}{$tx};
}
$vicutCltrSize{$cl} = $size;
}
else
{
warn "\nWARNING $cl not found in vCltrvTxFreq";
print "\n";
}
}
#print "\nFrequency table of vicut taxonomic assignments on selected nr seq's of $sp\n";
my @nrSortedCltrs = sort { $vicutCltrSize{$b} <=> $vicutCltrSize{$a} } @nrCltrs;
for my $cl (@nrSortedCltrs)
{
print "\nCluster $cl (" . $vicutCltrSize{$cl} . ")\n";
print $ROUT "\n\nCluster $cl (" . $vicutCltrSize{$cl} . ")\n\n";
## Generating a list of species present in $cl sorted by size and with NA
## at the end (igoring the size of NA when sorting
my @txs = keys %{$vCltrvTxFreq{$cl}};
@txs = sort { $vCltrvTxFreq{$cl}{$b} <=> $vCltrvTxFreq{$cl}{$a} } @txs;
## putting NA at the end
my @na = ("NA");
@txs = diff(\@txs, \@na);
push @txs, "NA";
my %txSizes;
for my $tx ( @txs )
{
$txSizes{$tx} = $vCltrvTxFreq{$cl}{$tx};
#print "\t$tx\t" . $vCltrvTxFreq{$cl}{$tx} . "\n";
}
#print "\n";
printFormatedTbl(\%txSizes, \@txs);
printFormatedTblToFile(\%txSizes, \@txs, $ROUT);
## Reporting some characteristics of query seq's
## Coverage: percentage of seq's of the 100% identity clusters of NAs
## within all NAs' clusters
if ( ! exists $vCltrvTxIds{$cl}{"NA"} )
{
warn "\n\n\tERROR: NA is not a key of vCltrvTxIds{$cl}";
my @k = keys %{ $vCltrvTxIds{$cl} };
printArray(\@k, "keys vCltrvTxIds{$cl}\n");
print "\n";
exit 1;
}
my @clNRids = @{$vCltrvTxIds{$cl}{"NA"}};
my $nCov = sum( @cTbl{ @clNRids } );
my $pCov = sprintf( "%.1f%%", 100.0 * $nCov/ $nAllSpSeqs );
print "Coverage: $pCov (" . commify($nCov) . " out of " . commify($nAllSpSeqs) . " seq's)\n";
print $ROUT "Coverage: $pCov (" . commify($nCov) . " out of " . commify($nAllSpSeqs) . " seq's)\n";
## Size ranks
my %clNRidsTbl = map { $_ => 1 } @clNRids;
my @sizeRanks = grep { exists $clNRidsTbl{$nrSeqIDs[$_ - 1]} } 1..($#nrSeqIDs+1);
my @sizeRanks0 = grep { exists $clNRidsTbl{$nrSeqIDs[$_]} } 0..$#nrSeqIDs;
## Size percentage
my @clSizePercs = @clPercsTbl{ @nrSeqIDs[ @sizeRanks0 ] };
@clSizePercs = map { sprintf("%.2f", $_) } @clSizePercs;
my $maxSize = 15; # there is no need to see more than the first 15 sizes and ranks
if ( @sizeRanks < $maxSize )
{
print "Size ranks: @sizeRanks\n";
print "Size %'s: @clSizePercs\n";
print $ROUT "Size ranks: @sizeRanks\n";
print $ROUT "Size %'s: @clSizePercs\n";
}
else
{
my @trSizeRanks = @sizeRanks[0..($maxSize-1)];
my @trClSizePercs = @clSizePercs[0..($maxSize-1)];
print "Size ranks: @trSizeRanks ...\n";
print "Size %'s: @trClSizePercs ...\n";
print $ROUT "Size ranks: @trSizeRanks ...\n";
print $ROUT "Size %'s: @trClSizePercs ...\n";
}
}
print "\n";
print $ROUT "\n";
##
## 6. Generating and maybe viewing the tree
##
## Collapsing the tree using ref tx and vicut tx on query seq's
print "\r\t\tGenerating a condensed tree of ref seq's species and vicut tx clades collapsed to a single node ";
my $condTreeFile2 = "$spDir/$sp" . $covSuffix . "_spp_cond2.tree";
if ( (! $useVsearch && ! -e $condTreeFile2) || $runAll )
{
print "\r\t\tGenerating a tree with species names at leaves ";
my $sppTreeFile = "$spDir/$sp" . $covSuffix . "_spp.tree";
$cmd = "rm -f $sppTreeFile; $nw_rename $bigTreeFile $vExtTxTblFile | $nw_order -c n - > $sppTreeFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
my $condTreeFile = "$spDir/$sp" . $covSuffix . "_spp_cond1.tree";
$cmd = "rm -f $condTreeFile; $nw_condense $sppTreeFile > $condTreeFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
## Relabeling tree so that only $sp and vicut tx taxonomy is left
my $condSppLeavesFile = "$spDir/$sp" . $covSuffix . "_cond_spp.leaves";
$cmd = "rm -f $condSppLeavesFile; $nw_labels -I $condTreeFile > $condSppLeavesFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
my @condSppTreeLeaves = read_array($condSppLeavesFile);
##print "condSppTreeLeaves: @condSppTreeLeaves\n";
my %spThatMingleWithQuery;
for my $cl (@nrCltrs)
{
my @txs = keys %{$vCltrvTxFreq{$cl}};
for my $tx (@txs)
{
if ( $tx ne "NA" )
{
$spThatMingleWithQuery{$tx} = 1;
}
}
}
##my @matches = grep { /pattern/ } @condSppTreeLeaves;
my %newLeafNames;
for my $l (@condSppTreeLeaves)
{
if ( exists $spThatMingleWithQuery{$l} || $l =~ /^c\d+/ )
{
$newLeafNames{$l} = $l;
}
else
{
$newLeafNames{$l} = "*";
}
}
my $condSppLeavesFile2 = "$spDir/$sp" . $covSuffix . "_spp_cond.leaves2";
write_tbl(\%newLeafNames, $condSppLeavesFile2);
$cmd = "rm -f $sppTreeFile; $nw_rename $condTreeFile $condSppLeavesFile2 | $nw_order -c n - > $condTreeFile2";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
}
## Producing pdf file of the tree sending it to either dir of species where
## vicut taxonomy agrees with PECAN's or to dir with spp for which there is
## a disagreement.
my $pdfTreeFile = "$spDir/$sp" . $covSuffix . "_tree.pdf";
if ( ! -e $pdfTreeFile || ! -s $pdfTreeFile || $runAll )
{
print "\r\t\tGenerating pdf of the condensed tree";
my $treeAbsPath = abs_path( $condTreeFile2 );
plot_tree($treeAbsPath, $pdfTreeFile, $sp);
my $pdfTreeLink = $treesDir . "/$sp" . $covSuffix . "__$phGr" . "__tree.pdf";
my $ap = abs_path( $pdfTreeFile );
$cmd = "rm -f $pdfTreeLink; ln -s $ap $pdfTreeLink";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
}
if ( $showTree && $OSNAME eq "darwin")
{
$cmd = "open $pdfTreeFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
$endRun = time();
$runTime = $endRun - $startRun;
if ( $runTime > 60 )
{
$timeMin = int($runTime / 60);
$timeSec = sprintf("%02d", $runTime % 60);
print "\rCompleted processing of $sp in $timeMin:$timeSec\n";
print $ROUT "\rCompleted processing of $sp in $timeMin:$timeSec\n";
}
else
{
print "\rCompleted processing of $sp in $runTime seconds\n";
print $ROUT "\rCompleted processing of $sp in $runTime seconds\n";
}
close $ROUT;
} ## end of for my $spIdx (0..
} ## end of for my $phGr ( keys %phGrSppTbl )
## report timing
$endRun = time();
$runTime = $endRun - $initStartRun;
if ( $runTime > 60 )
{
$timeMin = int($runTime / 60);
$timeSec = sprintf("%02d", $runTime % 60);
print "\rCompleted in $timeMin:$timeSec\n"
}
else
{
print "\rCompleted in $runTime seconds\n"
}
print "\n\n\tOutput written to $outDir\n\n";
####################################################################
## SUBS
####################################################################
# parse a clstr2 file
# output table: refId -> number of elements in the corresponding cluster
sub parseClstr2
{
my $inFile = shift;
my %tbl;
open IN, "$inFile" or die "Cannot open $inFile for reading: $OS_ERROR\n";
foreach my $rec (<IN>)
{
chomp $rec;
my @ids = split ",", $rec;
my $refId = shift @ids;
$tbl{$refId} = @ids; # we are only interested in the size of the cluseter
}
close IN;
return %tbl;
}
##
## parse 3 column tbl
##
## 1642.V1_0 Lactobacillus_iners 0.93
## 0980.V2_1 Lactobacillus_iners 0.97
## 1670.V2_2 Lactobacillus_helveticus_acidophilus 0.98
## 0711.V3_3 Atopobium_vaginae 0.56
## 1149.V1_4 Lactobacillus_iners 0.94
## 1386.V1_5 Prevotella_buccalis 0.85
## 1119.V2_6 BVAB1 0.79
## 1449.V1_7 BVAB1 0.97
## 1600.V1_8 BVAB3 0.93
sub parse_pecan_tbl
{
my $file = shift;
if ( ! -f $file )
{
warn "\n\n\tERROR in readQtxTbl(): $file does not exist";
print "\n\n";
exit 1;
}
my %spIDsTbl;
##my %phGrSppTbl;
my %ppTbl;
##my %sp2phGrSppTbl;
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
my $count = 1;
foreach (<IN>)
{
next if /^$/;
if ( $count % 500 == 0 )
{
print "\r$count" if $debug;
}
$count++;
chomp;
my ($id, $sp, $pp) = split /\s+/,$_;
push @{$spIDsTbl{$sp}}, $id;
$ppTbl{$id} = $pp;
}
close IN;
return (\%spIDsTbl, \%ppTbl);
}
##
## parse 2 column species table
##
# file format
# Segniliparus_rotundus Actinobacteria_group_0_V3V4
# Streptomyces_radiopugnans_4 Actinobacteria_group_5_V3V4
# Azospirillum_sp_1 Proteobacteria_group_6_V3V4
# Burkholderia_sp_6 Proteobacteria_group_4_V3V4
# Eubacterium_sp_17 Firmicutes_group_1_V3V4
sub parse_spp_tbl
{
my $file = shift;
if ( ! -e $file )
{
warn "\n\n\tERROR in parse_spp_tbl(): $file does not exist";
print "\n\n";
exit 1;
}
my %phGrSppTbl;
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
foreach (<IN>)
{
next if /^$/;
chomp;
my ($sp, $gr) = split /\s+/,$_;
push @{$phGrSppTbl{$gr}}, $sp;
}
close IN;
return %phGrSppTbl;
}
# read 3 column clstrs table
sub read_cltrs_tbl{
my $file = shift;
if ( ! -f $file )
{
warn "\n\n\tERROR in read_cltrs_tbl(): $file does not exist";
print "\n\n";
exit 1;
}
my %vCltrTbl;
my %txTbl;
my %txTbl2;
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
foreach (<IN>)
{
chomp;
my ($id, $cl, $tx) = split /\s+/,$_;
if (defined $id)
{
$vCltrTbl{$id} = "c" . $cl;
$txTbl{$id} = $tx;
if ($tx ne "NA")
{
$txTbl2{$id} = $tx;
}
else
{
$txTbl2{$id} = "c" . $cl;
}
}
}
close IN;
return (\%vCltrTbl, \%txTbl, \%txTbl2);
}
# read two column table; create a table that assigns
# elements of the first column to the second column
sub read_tbl{
my $file = shift;
if ( ! -e $file )
{
warn "\n\n\tERROR: $file does not exist";
print "\n\n";
exit 1;
}
my %tbl;
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
foreach (<IN>)
{
chomp;
my ($id, $t, $r) = split /\s+/,$_;
$tbl{$id} = $t;
}
close IN;
return %tbl;
}
# read lineage table
sub readLineageTbl{
my $file = shift;
if ( ! -f $file )
{
warn "\n\n\tERROR in readLineageTbl(): $file does not exist";
print "\n\n";
exit 1;
}
my %tbl;
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
foreach (<IN>)
{
chomp;
my ($id, $t) = split /\s+/,$_;
$tbl{$id} = $t;
## test for '/' characters
if ($t =~ /\//)
{
warn "\n\n\tERROR: Discovered '/' for id: $id\t$t";
print "\n\n";
exit 1;
}
}
close IN;
return %tbl;
}
sub get_seqIDs_from_fa
{
my $file = shift;
my $quiet = 1;
my $startRun = time();
my $endRun = time();
open (IN, "<$file") or die "Cannot open $file for reading: $OS_ERROR";
$/ = ">";
my $junkFirstOne = <IN>;
my $count = 1;
my $timeStr = "";
my @seqIDs;
while (<IN>)
{
if ( !$quiet && ($count % 500 == 0) )
{
$endRun = time();
my $runTime = $endRun - $startRun;
if ( $runTime > 60 )
{
my $timeMin = int($runTime / 60);
my $timeSec = sprintf("%02d", $runTime % 60);
$timeStr = "$timeMin:$timeSec";
}
else
{
my $runTime = sprintf("%02d", $runTime);
$timeStr = "0:$runTime";
}
print "\r$timeStr";
}
chomp;
my ($id,@seqlines) = split /\n/, $_;
push @seqIDs, $id;
$count++;
}
close IN;
$/ = "\n";
return @seqIDs;
}
# common part of two arrays
sub comm{
my ($a1, $a2) = @_;
my @c; # common array
my %count;
foreach my $e (@{$a1}, @{$a2}){ $count{$e}++ }
foreach my $e (keys %count)
{
push @c, $e if $count{$e} == 2;
}
return @c;
}
# read table with one column
sub read_array{
my ($file, $hasHeader) = @_;
my @rows;
if ( ! -f $file )
{
warn "\n\n\tERROR in read_array(): $file does not exist";
print "\n\n";
exit 1;
}
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
if ( defined $hasHeader )
{
<IN>;
}
foreach (<IN>)
{
chomp;
push @rows, $_;
}
close IN;
return @rows;
}
# fisher_yates_shuffle( \@array ) : generate a random permutation
# of @array in place
sub fisher_yates_shuffle {
my $array = shift;
my $i;
for ($i = @$array; --$i; ) {
my $j = int rand ($i+1);
next if $i == $j;
@$array[$i,$j] = @$array[$j,$i];
}
}
# difference of two arrays
sub diff{
my ($a1, $a2) = @_;
my (%aa1, %aa2);
foreach my $e (@{$a1}){ $aa1{$e} = 1; }
foreach my $e (@{$a2}){ $aa2{$e} = 1; }
my @d; # dfference array
foreach my $e (keys %aa1, keys %aa2)
{
push @d, $e if exists $aa1{$e} && !exists $aa2{$e};
}
return @d;
}
# extract unique elements from an array
sub unique{
my $a = shift;
my %saw;
my @out = grep(!$saw{$_}++, @{$a});
return @out;
}
# print elements of a hash table
sub printTbl{
my $rTbl = shift;
map {print "$_\t" . $rTbl->{$_} . "\n"} keys %$rTbl;
}
# print elements of a hash table so that arguments are aligned
sub printFormatedTbl{
my ($rTbl, $rSub) = @_; # the second argument is a subarray of the keys of the table
my @args;
if ($rSub)
{
@args = @{$rSub};
}
else
{
@args = keys %{$rTbl};
}
my $maxStrLen = 0;
map { $maxStrLen = length($_) if( length($_) > $maxStrLen )} @args;
for (@args)
{
my $n = $maxStrLen - length($_);
my $pad = ": ";
for (my $i=0; $i<$n; $i++)
{
$pad .= " ";
}
print "\t$_$pad" . $rTbl->{$_} . "\n";
}
#print "\n";
}
# print elements of a hash table so that arguments are aligned
sub printFormatedTblToFile{
my ($rTbl, $rSub, $fh) = @_; # the second argument is a subarray of the keys of the table
my @args;
if ($rSub)
{
@args = @{$rSub};
}
else
{
@args = keys %{$rTbl};
}
my $maxStrLen = 0;
map { $maxStrLen = length($_) if( length($_) > $maxStrLen )} @args;
for (@args)
{
my $n = $maxStrLen - length($_);
my $pad = ": ";
for (my $i=0; $i<$n; $i++)
{
$pad .= " ";
}
print $fh "$_$pad" . $rTbl->{$_} . "\n";
}
print $fh "\n";
}
# write array to a file (one column format)
sub writeArray
{
my ($a, $outFile) = @_;
open OUT, ">$outFile" or die "Cannot open $outFile for writing: $OS_ERROR";
map {print OUT "$_\n"} @{$a};
close OUT
}
## put commas in numbers for better readability
## lifted from
## http://www.perlmonks.org/?node_id=2145
sub commify {
local $_ = shift;
s{(?<!\d|\.)(\d{4,})}
{my $n = $1;
$n=~s/(?<=.)(?=(?:.{3})+$)/,/g;
$n;
}eg;
return $_;
}
## plot tree with clade colors
sub plot_tree
{
my ($treeFile, $pdfFile, $title) = @_;
my $showBoostrapVals = "T";
if (!defined $title)
{
$title = "";
}
my $Rscript = qq~
source(\"$readNewickFile\")
require(phytools)
tr <- read.newick(file=\"$treeFile\")
tr <- collapse.singles(tr)
(nLeaves <- length(tr\$tip.label))
figH <- 8
figW <- 6
if ( nLeaves >= 50 )
{
figH <- 6.0/50.0 * ( nLeaves - 50) + 10
figW <- 6.0/50.0 * ( nLeaves - 50) + 6
}
pdf(\"$pdfFile\", width=figW, height=figH)
op <- par(mar=c(0,0,1.5,0), mgp=c(2.85,0.6,0),tcl = -0.3)
plot(tr, type=\"phylogram\", no.margin=FALSE, show.node.label=F, cex=0.8, main=\"$title\")
par(op)
dev.off()
~;
run_R_script( $Rscript );
}
sub plot_tree2
{
my ($treeFile, $clFile, $pdfFile, $title) = @_;
my $showBoostrapVals = "F";
if (!defined $title)
{
$title = "";
}
my $Rscript = qq~
clTbl <- read.table(\"$clFile\", header=F)
str(clTbl)
cltr <- clTbl[,2]
names(cltr) <- clTbl[,1]
source(\"$readNewickFile\")
require(phytools)
tr1 <- read.newick(file=\"$treeFile\")
tr1 <- collapse.singles(tr1)
tip.cltr <- cltr[tr1\$tip.label]
colIdx <- 1
tip.colors <- c()
tip.colors[1] <- colIdx
for ( i in 2:length(tip.cltr) )
{
if ( tip.cltr[i] != tip.cltr[i-1] )
{
colIdx <- colIdx + 1
if ( colIdx==9 )
{
colIdx <- 1
}
}
tip.colors[i] <- colIdx
if ( colIdx==7 )
{
tip.colors[i] <- "brown"
}
}
(nLeaves <- length(tr1\$tip.label))
figH <- 8
figW <- 6
if ( nLeaves >= 50 )
{
figH <- 6.0/50.0 * ( nLeaves - 50) + 10
figW <- 6.0/50.0 * ( nLeaves - 50) + 6
}
pdf(\"$pdfFile\", width=figW, height=figH)
op <- par(mar=c(0,0,1.5,0), mgp=c(2.85,0.6,0),tcl = -0.3)
plot(tr1,type=\"phylogram\", no.margin=FALSE, show.node.label=$showBoostrapVals, cex=0.8, tip.color=tip.colors, main=\"$title\")
par(op)
dev.off()
~;
run_R_script( $Rscript );
}
# execute an R-script
sub run_R_script
{
my $Rscript = shift;
my ($fh, $inFile) = tempfile("rTmpXXXX", SUFFIX => '.R', OPEN => 1, DIR => $tmpDir);
print $fh "$Rscript";
close $fh;
my $outFile = $inFile . "out";
my $cmd = "$R CMD BATCH --no-save --no-restore-data $inFile $outFile";
system($cmd) == 0 or die "system($cmd) failed:$?\n";
open IN, "$outFile" or die "Cannot open $outFile for reading: $OS_ERROR";
my $exitStatus = 1;
foreach my $line (<IN>)
{
if ( $line =~ /Error/ )
{
print "R script crashed at\n$line";
print "check $outFile for details\n";
$exitStatus = 0;
exit 1;
}
}
close IN;
}
# parse a CSV partition table
sub read_part_tbl
{
my $file = shift;
if ( ! -e $file )
{
warn "\n\n\tERROR: $file does not exist";
print "\n\n";
exit 1;
}
my %tbl;
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
my $headerStr = <IN>;
foreach my $line (<IN>)
{
chomp $line;
## $ clustername : int 426 426 432 432 432 432 432 432 432 449 ...
## $ bootstrap : num 0.904 0.904 0.908 0.908 0.908 0.908 0.908 0.908 0.908 0.976 ...
## $ leafname : chr "Lactobacillus_hordei" "Lactobacillus_mali_tRT_2" "Lactobacillus_nagelii" "Lactobacillus_vini" ...
## $ branchPath : num 0.0462 0.0525 0.0547 0.0546 0.0526 ...
## $ medianOfDistances : num 0.00651 0.00651 0.01502 0.01502 0.01502 ...
## $ sequencesperCluster: int 2 2 7 7 7 7 7 7 7 2 ...
my @f = split ",", $line;
my $cltrId = shift @f;
my $boot = shift @f;
my $leafId = shift @f;
$tbl{ $leafId } = $cltrId;
}
close IN;
return %tbl;
}
# write hash table to a file
sub write_tbl
{
my ($rTbl, $outFile) = @_;
my %tbl = %{$rTbl};
open OUT, ">$outFile" or die "Cannot open $outFile for writing: $OS_ERROR";
map {print OUT $_ . "\t" . $tbl{$_} . "\n"} sort keys %tbl;
close OUT;
}
## Testing if two arrays are identical in a set-theoretic sense. That is that
## they have exactly the same set of elements.
sub setEqual
{
my ($rA, $rB) = @_;
my @a = @{$rA};
my @b = @{$rB};
my @c = comm(\@a, \@b);
my $ret = 1;
if (@c != @a || @c != @b)
{
warn "\n\n\tERROR: Elements of the two arrays do not match";
print "\n\tNumber of elements in the first array: " . @a . "\n";
print "\tNumber of elements in the second array: " . @b . "\n";
print "\tNumber of common elements: " . @c . "\n";
# writeArray(\@a, "a.txt");
# writeArray(\@b, "b.txt");
#print "\n\tNew taxon keys and fasta IDs written to a.txt and b.txt, respectively\n\n";
if (@a > @b)
{
my @d = diff(\@a, \@b);
print "\nElements a but not b:\n";
for (@d)
{
print "\t$_\n";
}
print "\n\n";
}
if (@b > @a)
{
my @d = diff(\@b, \@a);
print "\nElements in b that are not a:\n";
for (@d)
{
print "\t$_\n";
}
print "\n\n";
}
$ret = 0;
}
return $ret;
}
sub create_mothur_script
{
my (@arr) = @{$_[0]};
my ($fh, $inFile) = tempfile("mothur.XXXX", SUFFIX => '', OPEN => 1, DIR => $tmpDir);
foreach my $c (@arr)
{
print $fh $c . "\n";
}
print $fh "quit()\n";
close $fh;
return $inFile;
}
# print array to stdout
sub printArray
{
my ($a, $header) = @_;
print "\n$header\n" if $header;
map {print "$_\n"} @{$a};
}
# read array of non-redundant seqIDs
sub read_NR_array{
my ($file, $hasHeader) = @_;
my @rows;
if ( ! -f $file )
{
warn "\n\n\tERROR in read_array(): $file does not exist";
print "\n\n";
exit 1;
}
open IN, "$file" or die "Cannot open $file for reading: $OS_ERROR";
if ( defined $hasHeader )
{
<IN>;
}
foreach (<IN>)
{
chomp;
s/;size=\d+;//;
push @rows, $_;
}
close IN;
return @rows;
}
sub get_leaves
{
my $treeFile = shift;
my ($fh, $leavesFile) = tempfile("leaves.XXXX", SUFFIX => '', OPEN => 0, DIR => $tmpDir);
my $cmd = "$nw_labels -I $treeFile > $leavesFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
my @a = read_array($leavesFile);
return @a;
}
sub run_vicut
{
my ($treeFile, $annFile, $vicutDir) = @_;
my %annTbl = read_tbl( $annFile );
my @ann = keys %annTbl;
my @leaves = get_leaves( $treeFile );
my @query = diff( \@leaves, \@ann );
if ( @query )
{
my ($fh, $qFile) = tempfile("query.XXXX", SUFFIX => 'txt', OPEN => 1, DIR => $tmpDir);
for ( @query )
{
print $fh "$_\n";
}
close $fh;
my $cmd = "$vicut $quietStr -t $treeFile -a $annFile -q $qFile -o $vicutDir";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
else
{
my $cmd = "$vicut $quietStr -t $treeFile -a $annFile -o $vicutDir";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?\n" if !$dryRun;
}
return @query;
}
sub make_dir
{
my $dir = shift;
my $cmd = "mkdir -p $dir";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed with exit code: $?" if !$dryRun;
}
sub fix_fasta_headers
{
my $spNRfaFile = shift;
my $spNRfaFileTmp = "$spDir/$sp" . "_nrTmp.fa";
$cmd = "$fix_fasta_headers -i $spNRfaFile -o $spNRfaFileTmp; mv $spNRfaFileTmp $spNRfaFile";
print "\tcmd=$cmd\n" if $dryRun || $debug;
system($cmd) == 0 or die "system($cmd) failed:$?" if !$dryRun;
}
sub get_seqIDs_from_fa
{
my $file = shift;
my $quiet = 1;
my $startRun = time();
my $endRun = time();
open (IN, "<$file") or die "Cannot open $file for reading: $OS_ERROR";
$/ = ">";
my $junkFirstOne = <IN>;
my $count = 1;
my $timeStr = "";
my @seqIDs;
while (<IN>)
{
if ( !$quiet && ($count % 500 == 0) )
{
$endRun = time();
my $runTime = $endRun - $startRun;
if ( $runTime > 60 )
{
my $timeMin = int($runTime / 60);
my $timeSec = sprintf("%02d", $runTime % 60);
$timeStr = "$timeMin:$timeSec";
}
else
{
my $runTime = sprintf("%02d", $runTime);
$timeStr = "0:$runTime";
}
print "\r$timeStr";
}
chomp;
my ($id,@seqlines) = split /\n/, $_;
push @seqIDs, $id;
$count++;
}
close IN;
$/ = "\n";
return @seqIDs;
}
exit 0;
| 29.265284 | 227 | 0.511284 |
ed1dbac3f143b787de3813fc87fb77138296cd08 | 3,771 | pm | Perl | network/freebox/restapi/mode/dslusage.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
]
| null | null | null | network/freebox/restapi/mode/dslusage.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
]
| 2 | 2016-07-28T10:18:20.000Z | 2017-04-11T14:16:48.000Z | network/freebox/restapi/mode/dslusage.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
]
| 1 | 2018-03-20T11:05:05.000Z | 2018-03-20T11:05:05.000Z | #
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::freebox::restapi::mode::dslusage;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'global', type => 0 },
];
$self->{maps_counters}->{global} = [
{ label => 'rate-up', set => {
key_values => [ { name => 'rate_up' } ],
output_template => 'Dsl available upload bandwidth : %.2f %s/s',
output_change_bytes => 2,
perfdatas => [
{ label => 'rate_up', value => 'rate_up_absolute', template => '%.2f',
unit => 'b/s', min => 0 },
],
}
},
{ label => 'rate-down', set => {
key_values => [ { name => 'rate_down' } ],
output_template => 'Dsl available download bandwidth : %.2f %s/s',
output_change_bytes => 2,
perfdatas => [
{ label => 'rate_down', value => 'rate_down_absolute', template => '%.2f',
unit => 'b/s', min => 0 },
],
}
},
{ label => 'snr-up', set => {
key_values => [ { name => 'snr_up' } ],
output_template => 'Dsl upload signal/noise ratio : %.2f dB',
output_change_bytes => 2,
perfdatas => [
{ label => 'snr_up', value => 'snr_up_absolute', template => '%.2f',
unit => 'dB' },
],
}
},
{ label => 'snr-down', set => {
key_values => [ { name => 'snr_down' } ],
output_template => 'Dsl download signal/noise ratio : %.2f dB',
output_change_bytes => 2,
perfdatas => [
{ label => 'snr_down', value => 'snr_down_absolute', template => '%.2f',
unit => 'dB' },
],
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
});
return $self;
}
sub manage_selection {
my ($self, %options) = @_;
my $result = $options{custom}->get_performance(db => 'dsl', path => 'rrd/');
$result->{snr_up} *= 10 if (defined($result->{snr_up}));
$result->{snr_down} *= 10 if (defined($result->{snr_down}));
$self->{global} = { %{$result} };
}
1;
__END__
=head1 MODE
Check dsl usage.
=over 8
=item B<--filter-counters>
Only display some counters (regexp can be used).
Example: --filter-counters='^rate-up$'
=item B<--warning-*>
Threshold warning.
Can be: 'rate-up', 'rate-down', 'snr-up', 'snr-down'.
=item B<--critical-*>
Threshold critical.
Can be: 'rate-up', 'rate-down', 'snr-up', 'snr-down'.
=back
=cut
| 29.460938 | 94 | 0.520286 |
ed333f0bc1cbb5cb96f2f4f26bf1a500aa6b5a25 | 16,544 | pm | Perl | perl5/lib/perl5/Lexical/Persistence.pm | jinnks/printevolve | 8c54f130000cd6ded290f5905bdc2093d9f264da | [
"Apache-2.0"
]
| null | null | null | perl5/lib/perl5/Lexical/Persistence.pm | jinnks/printevolve | 8c54f130000cd6ded290f5905bdc2093d9f264da | [
"Apache-2.0"
]
| null | null | null | perl5/lib/perl5/Lexical/Persistence.pm | jinnks/printevolve | 8c54f130000cd6ded290f5905bdc2093d9f264da | [
"Apache-2.0"
]
| null | null | null | =head1 NAME
Lexical::Persistence - Persistent lexical variable values for arbitrary calls.
=head1 VERSION
version 1.023
=head1 SYNOPSIS
#!/usr/bin/perl
use Lexical::Persistence;
my $persistence = Lexical::Persistence->new();
foreach my $number (qw(one two three four five)) {
$persistence->call(\&target, number => $number);
}
exit;
sub target {
my $arg_number; # Argument.
my $narf_x++; # Persistent.
my $_i++; # Dynamic.
my $j++; # Persistent.
print "arg_number = $arg_number\n";
print "\tnarf_x = $narf_x\n";
print "\t_i = $_i\n";
print "\tj = $j\n";
}
=head1 DESCRIPTION
Lexical::Persistence does a few things, all related. Note that all
the behaviors listed here are the defaults. Subclasses can override
nearly every aspect of Lexical::Persistence's behavior.
Lexical::Persistence lets your code access persistent data through
lexical variables. This example prints "some value" because the value
of $x persists in the $lp object between setter() and getter().
use Lexical::Persistence;
my $lp = Lexical::Persistence->new();
$lp->call(\&setter);
$lp->call(\&getter);
sub setter { my $x = "some value" }
sub getter { print my $x, "\n" }
Lexicals with leading underscores are not persistent.
By default, Lexical::Persistence supports accessing data from multiple
sources through the use of variable prefixes. The set_context()
member sets each data source. It takes a prefix name and a hash of
key/value pairs. By default, the keys must have sigils representing
their variable types.
use Lexical::Persistence;
my $lp = Lexical::Persistence->new();
$lp->set_context( pi => { '$member' => 3.141 } );
$lp->set_context( e => { '@member' => [ 2, '.', 7, 1, 8 ] } );
$lp->set_context(
animal => {
'%member' => { cat => "meow", dog => "woof" }
}
);
$lp->call(\&display);
sub display {
my ($pi_member, @e_member, %animal_member);
print "pi = $pi_member\n";
print "e = @e_member\n";
while (my ($animal, $sound) = each %animal_member) {
print "The $animal goes... $sound!\n";
}
}
And the corresponding output:
pi = 3.141
e = 2 . 7 1 8
The cat goes... meow!
The dog goes... woof!
By default, call() takes a single subroutine reference and an optional
list of named arguments. The arguments will be passed directly to the
called subroutine, but Lexical::Persistence also makes the values
available from the "arg" prefix.
use Lexical::Persistence;
my %animals = (
snake => "hiss",
plane => "I'm Cartesian",
);
my $lp = Lexical::Persistence->new();
while (my ($animal, $sound) = each %animals) {
$lp->call(\&display, animal => $animal, sound => $sound);
}
sub display {
my ($arg_animal, $arg_sound);
print "The $arg_animal goes... $arg_sound!\n";
}
And the corresponding output:
The plane goes... I'm Cartesian!
The snake goes... hiss!
Sometimes you want to call functions normally. The wrap() method will
wrap your function in a small thunk that does the call() for you,
returning a coderef.
use Lexical::Persistence;
my $lp = Lexical::Persistence->new();
my $thunk = $lp->wrap(\&display);
$thunk->(animal => "squirrel", sound => "nuts");
sub display {
my ($arg_animal, $arg_sound);
print "The $arg_animal goes... $arg_sound!\n";
}
And the corresponding output:
The squirrel goes... nuts!
Prefixes are the characters leading up to the first underscore in a
lexical variable's name. However, there's also a default context
named underscore. It's literally "_" because the underscore is not
legal in a context name by default. Variables without prefixes, or
with prefixes that have not been previously defined by set_context(),
are stored in that context.
The get_context() member returns a hash for a named context. This
allows your code to manipulate the values within a persistent context.
use Lexical::Persistence;
my $lp = Lexical::Persistence->new();
$lp->set_context(
_ => {
'@mind' => [qw(My mind is going. I can feel it.)]
}
);
while (1) {
$lp->call(\&display);
my $mind = $lp->get_context("_")->{'@mind'};
splice @$mind, rand(@$mind), 1;
last unless @$mind;
}
sub display {
my @mind;
print "@mind\n";
}
Displays something like:
My mind is going. I can feel it.
My is going. I can feel it.
My is going. I feel it.
My going. I feel it.
My going. I feel
My I feel
My I
My
It's possible to create multiple Lexical::Persistence objects, each
with a unique state.
use Lexical::Persistence;
my $lp_1 = Lexical::Persistence->new();
$lp_1->set_context( _ => { '$foo' => "context 1's foo" } );
my $lp_2 = Lexical::Persistence->new();
$lp_2->set_context( _ => { '$foo' => "the foo in context 2" } );
$lp_1->call(\&display);
$lp_2->call(\&display);
sub display {
print my $foo, "\n";
}
Gets you this output:
context 1's foo
the foo in context 2
You can also compile and execute perl code contained in plain strings in a
a lexical environment that already contains the persisted variables.
use Lexical::Persistence;
my $lp = Lexical::Persistence->new();
$lp->do( 'my $message = "Hello, world" );
$lp->do( 'print "$message\n"' );
Which gives the output:
Hello, world
If you come up with other fun uses, let us know.
=cut
package Lexical::Persistence;
use warnings;
use strict;
our $VERSION = '1.020';
use Devel::LexAlias qw(lexalias);
use PadWalker qw(peek_sub);
=head2 new
Create a new lexical persistence object. This object will store one
or more persistent contexts. When called by this object, lexical
variables will take on the values kept in this object.
=cut
sub new {
my $class = shift;
my $self = bless {
context => { },
}, $class;
$self->initialize_contexts();
return $self;
}
=head2 initialize_contexts
This method is called by new() to declare the initial contexts for a
new Lexical::Persistence object. The default implementation declares
the default "_" context.
Override or extend it to create others as needed.
=cut
sub initialize_contexts {
my $self = shift;
$self->set_context( _ => { } );
}
=head2 set_context NAME, HASH
Store a context HASH within the persistence object, keyed on a NAME.
Members of the context HASH are unprefixed versions of the lexicals
they'll persist, including the sigil. For example, this set_context()
call declares a "request" context with predefined values for three
variables: $request_foo, @request_foo, and %request_foo:
$lp->set_context(
request => {
'$foo' => 'value of $request_foo',
'@foo' => [qw( value of @request_foo )],
'%foo' => { key => 'value of $request_foo{key}' }
}
);
See parse_variable() for information about how Lexical::Persistence
decides which context a lexical belongs to and how you can change
that.
=cut
sub set_context {
my ($self, $context_name, $context_hash) = @_;
$self->{context}{$context_name} = $context_hash;
}
=head2 get_context NAME
Returns a context hash associated with a particular context name.
Autovivifies the context if it doesn't already exist, so be careful
there.
=cut
sub get_context {
my ($self, $context_name) = @_;
$self->{context}{$context_name} ||= { };
}
=head2 call CODEREF, ARGUMENT_LIST
Call CODEREF with lexical persistence and an optional ARGUMENT_LIST,
consisting of name => value pairs. Unlike with set_context(),
however, argument names do not need sigils. This may change in the
future, however, as it's easy to access an argument with the wrong
variable type.
The ARGUMENT_LIST is passed to the called CODEREF through @_ in the
usual way. They're also available as $arg_name variables for
convenience.
See push_arg_context() for information about how $arg_name works, and
what you can do to change that behavior.
=cut
sub call {
my ($self, $sub, @args) = @_;
my $old_arg_context = $self->push_arg_context(@args);
my $pad = peek_sub($sub);
while (my ($var, $ref) = each %$pad) {
next unless my ($sigil, $context, $member) = $self->parse_variable($var);
lexalias(
$sub, $var, $self->get_member_ref($sigil, $context, $member)
);
}
unless (defined wantarray) {
$sub->(@args);
$self->pop_arg_context($old_arg_context);
return;
}
if (wantarray) {
my @return = $sub->(@args);
$self->pop_arg_context($old_arg_context);
return @return;
}
my $return = $sub->(@args);
$self->pop_arg_context($old_arg_context);
return $return;
}
=head2 invoke OBJECT, METHOD, ARGUMENT_LIST
Invoke OBJECT->METHOD(ARGUMENT_LIST) while maintaining state for the
METHOD's lexical variables. Written in terms of call(), except that
it takes OBJECT and METHOD rather than CODEREF. See call() for more
details.
May have issues with methods invoked via AUTOLOAD, as invoke() uses
can() to find the method's CODEREF for call().
=cut
sub invoke {
my ($self, $object, $method, @args) = @_;
return unless defined( my $sub = $object->can($method) );
$self->call($sub, @args);
}
=head2 wrap CODEREF
Wrap a function or anonymous CODEREF so that it's transparently called
via call(). Returns a coderef which can be called directly. Named
arguments to the call will automatically become available as $arg_name
lexicals within the called CODEREF.
See call() and push_arg_context() for more details.
=cut
sub wrap {
my ($self, $invocant, $method) = @_;
if (ref($invocant) eq 'CODE') {
return sub {
$self->call($invocant, @_);
};
}
# FIXME - Experimental method wrapper.
# TODO - Make it resolve the method at call time.
# TODO - Possibly make it generate dynamic facade classes.
return sub {
$self->invoke($invocant, $method, @_);
};
}
=head2 prepare CODE
Wrap a CODE string in a subroutine definition, and prepend
declarations for all the variables stored in the Lexical::Persistence
default context. This avoids having to declare variables explicitly
in the code using 'my'. Returns a new code string ready for Perl's
built-in eval(). From there, a program may $lp->call() the code or
$lp->wrap() it.
Also see L</compile()>, which is a convenient wrapper for prepare()
and Perl's built-in eval().
Also see L</do()>, which is a convenient way to prepare(), eval() and
call() in one step.
=cut
sub prepare {
my ($self, $code) = @_;
# Don't worry about values because $self->call() will deal with them
my $vars = join(
" ", map { "my $_;" }
keys %{ $self->get_context('_') }
);
# Declare the variables OUTSIDE the actual sub. The compiler will
# pull any into the sub that are actually used. Any that aren't will
# just get dropped at this point
return "$vars sub { $code }";
}
=head2 compile CODE
compile() is a convenience method to prepare() a CODE string, eval()
it, and then return the resulting coderef. If it fails, it returns
false, and $@ will explain why.
=cut
sub compile {
my ($self, $code) = @_;
return eval($self->prepare($code));
}
=head2 do CODE
do() is a convenience method to compile() a CODE string and execute
it. It returns the result of CODE's execution, or it throws an
exception on failure.
This example prints the numbers 1 through 10. Note, however, that
do() compiles the same code each time.
use Lexical::Persistence;
my $lp = Lexical::Persistence->new();
$lp->do('my $count = 0');
$lp->do('print ++$count, "\\n"') for 1..10;
Lexical declarations are preserved across do() invocations, such as
with $count in the surrounding examples. This behavior is part of
prepare(), which do() uses via compile().
The previous example may be rewritten in terms of compile() and call()
to avoid recompiling code every iteration. Lexical declarations are
preserved between do() and compile() as well:
use Lexical::Persistence;
my $lp = Lexical::Persistence->new();
$lp->do('my $count = 0');
my $coderef = $lp->compile('print ++$count, "\\n"');
$lp->call($coderef) for 1..10;
do() inherits some limitations from PadWalker's peek_sub(). For
instance, it cannot alias lexicals within sub() definitions in the
supplied CODE string. However, Lexical::Persistence can do this with
careful use of eval() and some custom CODE preparation.
=cut
sub do {
my ($self, $code) = @_;
my $sub = $self->compile( $code ) or die $@;
$self->call( $sub );
}
=head2 parse_variable VARIABLE_NAME
This method determines whether VARIABLE_NAME should be persistent. If
it should, parse_variable() will return three values: the variable's
sigil ('$', '@' or '%'), the context name in which the variable
persists (see set_context()), and the name of the member within that
context where the value is stored. parse_variable() returns nothing
if VARIABLE_NAME should not be persistent.
parse_variable() also determines whether the member name includes its
sigil. By default, the "arg" context is the only one with members
that have no sigils. This is done to support the unadorned argument
names used by call().
This method implements a default behavior. It's intended to be
overridden or extended by subclasses.
=cut
sub parse_variable {
my ($self, $var) = @_;
return unless (
my ($sigil, $context, $member) = (
$var =~ /^([\$\@\%])(?!_)(?:([^_]*)_)?(\S+)/
)
);
if (defined $context) {
if (exists $self->{context}{$context}) {
return $sigil, $context, $member if $context eq "arg";
return $sigil, $context, "$sigil$member";
}
return $sigil, "_", "$sigil$context\_$member";
}
return $sigil, "_", "$sigil$member";
}
=head2 get_member_ref SIGIL, CONTEXT, MEMBER
This method fetches a reference to the named MEMBER of a particular
named CONTEXT. The returned value type will be governed by the given
SIGIL.
Scalar values are stored internally as scalars to be consistent with
how most people store scalars.
The persistent value is created if it doesn't exist. The initial
value is undef or empty, depending on its type.
This method implements a default behavior. It's intended to be
overridden or extended by subclasses.
=cut
sub get_member_ref {
my ($self, $sigil, $context, $member) = @_;
my $hash = $self->{context}{$context};
if ($sigil eq '$') {
$hash->{$member} = undef unless exists $hash->{$member};
return \$hash->{$member};
}
if ($sigil eq '@') {
$hash->{$member} = [ ] unless exists $hash->{$member};
}
elsif ($sigil eq '%') {
$hash->{$member} = { } unless exists $hash->{$member};
}
return $hash->{$member};
}
=head2 push_arg_context ARGUMENT_LIST
Convert a named ARGUMENT_LIST into members of an argument context, and
call set_context() to declare that context. This is how $arg_foo
variables are supported. This method returns the previous context,
fetched by get_context() before the new context is set.
This method implements a default behavior. It's intended to be
overridden or extended by subclasses. For example, to redefine the
parameters as $param_foo.
See pop_arg_context() for the other side of this coin.
=cut
sub push_arg_context {
my $self = shift;
my $old_arg_context = $self->get_context("arg");
$self->set_context( arg => { @_ } );
return $old_arg_context;
}
=head2 pop_arg_context OLD_ARG_CONTEXT
Restores OLD_ARG_CONTEXT after a target function has returned. The
OLD_ARG_CONTEXT is the return value from the push_arg_context() call
just prior to the target function's call.
This method implements a default behavior. It's intended to be
overridden or extended by subclasses.
=cut
sub pop_arg_context {
my ($self, $old_context) = @_;
$self->set_context( arg => $old_context );
}
=head1 SEE ALSO
L<POE::Stage>, L<Devel::LexAlias>, L<PadWalker>,
L<Catalyst::Controller::BindLex>.
=head2 BUG TRACKER
https://rt.cpan.org/Dist/Display.html?Status=Active&Queue=Lexical-Persistence
=head2 REPOSITORY
http://github.com/rcaputo/lexical-persistence
http://gitorious.org/lexical-persistence
=head2 OTHER RESOURCES
http://search.cpan.org/dist/Lexical-Persistence/
=head1 COPYRIGHT
Lexical::Persistence in copyright 2006-2013 by Rocco Caputo. All
rights reserved. Lexical::Persistence is free software. It is
released under the same terms as Perl itself.
=head1 ACKNOWLEDGEMENTS
Thanks to Matt Trout and Yuval Kogman for lots of inspiration. They
were the demon and the other demon sitting on my shoulders.
Nick Perez convinced me to make this a class rather than persist with
the original, functional design. While Higher Order Perl is fun for
development, I have to say the move to OO was a good one.
Paul "LeoNerd" Evans contributed the compile() and eval() methods.
The South Florida Perl Mongers, especially Jeff Bisbee and Marlon
Bailey, for documentation feedback.
irc://irc.perl.org/poe for support and feedback.
=cut
1;
| 25.570325 | 78 | 0.704304 |
ed3643e3999410f06c6ae98dbf578af1e64f3235 | 2,495 | pm | Perl | tools/test_modules/m16500.pm | davidbolvansky/hashcat | f05ea114a911852233a3e95c719dd690e9c631c6 | [
"MIT"
]
| 13,663 | 2015-12-04T16:08:29.000Z | 2022-03-31T23:43:17.000Z | tools/test_modules/m16500.pm | JusticeRage/hashcat | 0ba77fe7618ab48ddb2c0990baad918bb7cd2824 | [
"MIT"
]
| 2,014 | 2015-12-04T16:45:36.000Z | 2022-03-31T21:02:58.000Z | tools/test_modules/m16500.pm | JusticeRage/hashcat | 0ba77fe7618ab48ddb2c0990baad918bb7cd2824 | [
"MIT"
]
| 2,555 | 2015-12-04T16:09:31.000Z | 2022-03-31T11:34:38.000Z | #!/usr/bin/env perl
##
## Author......: See docs/credits.txt
## License.....: MIT
##
use strict;
use warnings;
use Digest::SHA qw (sha256 sha384 sha512);
use Digest::HMAC qw (hmac);
use MIME::Base64 qw (encode_base64url decode_base64url);
use JSON qw (encode_json decode_json);
sub module_constraints { [[0, 64], [-1, -1], [-1, -1], [-1, -1], [-1, -1]] }
sub module_generate_hash
{
my $word = shift;
my $salt = shift || get_random_jwt_salt ();
my ($header_base64) = split (/\./, $salt);
my $header_jwt = decode_base64url ($header_base64);
my $header = decode_json ($header_jwt);
my $alg = $header->{"alg"};
my $digest;
if ($alg eq "HS256")
{
$digest = hmac ($salt, $word, \&sha256, 64);
}
elsif ($alg eq "HS384")
{
$digest = hmac ($salt, $word, \&sha384, 128);
}
elsif ($alg eq "HS512")
{
$digest = hmac ($salt, $word, \&sha512, 128);
}
else
{
die "not supported hash\n";
}
my $hash = sprintf ("%s.%s", $salt, encode_base64url ($digest, ""));
return $hash;
}
sub module_verify_hash
{
my $line = shift;
my ($hash, $word) = split (':', $line);
return unless defined $hash;
return unless defined $word;
my @data = split (/\./, $hash);
return unless scalar @data == 3;
my ($header, $payload, $signature) = @data;
my $salt = $header . "." . $payload;
my $word_packed = pack_if_HEX_notation ($word);
my $new_hash = module_generate_hash ($word_packed, $salt);
return ($new_hash, $word);
}
sub get_random_jwt_salt
{
my @hashes =
(
"HS256",
#"HS384", #this is support in hashcat, but commented out here to prevent mixed hash output files in single mode
#"HS512", #this is support in hashcat, but commented out here to prevent mixed hash output files in single mode
#"RS256", #not supported by hashcat
#"RS384",
#"RS512",
#"PS256",
#"PS384",
#"PS512",
#"ES256",
#"ES384",
#"ES512",
);
my $rnd = random_number (0, scalar @hashes - 1);
my $hash = $hashes[$rnd];
my $header =
{
"alg" => $hash
};
my $random_key = random_number (1, 100000000);
my $random_val = random_number (1, 100000000);
my $payload =
{
$random_key => $random_val
};
my $header_json = encode_json ($header);
my $payload_json = encode_json ($payload);
my $header_base64 = encode_base64url ($header_json, "");
my $payload_base64 = encode_base64url ($payload_json, "");
return $header_base64 . "." . $payload_base64;
}
1;
| 20.120968 | 115 | 0.602806 |
73f579867512ec2ae7046c3b35deede2eaa7185d | 11,594 | pm | Perl | lib/Yancy/Plugin/Auth.pm | flash548/Yancy | adc944e09d17d86d8682de323501d283f4cdb6db | [
"Artistic-1.0"
]
| null | null | null | lib/Yancy/Plugin/Auth.pm | flash548/Yancy | adc944e09d17d86d8682de323501d283f4cdb6db | [
"Artistic-1.0"
]
| null | null | null | lib/Yancy/Plugin/Auth.pm | flash548/Yancy | adc944e09d17d86d8682de323501d283f4cdb6db | [
"Artistic-1.0"
]
| null | null | null | package Yancy::Plugin::Auth;
our $VERSION = '1.073';
# ABSTRACT: Add one or more authentication plugins to your site
=head1 SYNOPSIS
use Mojolicious::Lite;
plugin Yancy => {
backend => 'sqlite://myapp.db',
schema => {
users => {
properties => {
id => { type => 'integer', readOnly => 1 },
plugin => {
type => 'string',
enum => [qw( password token )],
},
username => { type => 'string' },
# Optional password for Password auth
password => { type => 'string' },
},
},
},
};
app->yancy->plugin( 'Auth' => {
schema => 'users',
username_field => 'username',
password_field => 'password',
plugin_field => 'plugin',
plugins => [
[
Password => {
password_digest => {
type => 'SHA-1',
},
},
],
'Token',
],
} );
=head1 DESCRIPTION
B<Note:> This module is C<EXPERIMENTAL> and its API may change before
Yancy v2.000 is released.
This plugin adds authentication to your site.
Multiple authentication plugins can be added with this plugin. If you
only ever want to have one type of auth, you can use that auth plugin
directly if you want.
This module composes the L<Yancy::Auth::Plugin::Role::RequireUser> role
to provide the
L<require_user|Yancy::Auth::Plugin::Role::RequireUser/require_user>
authorization method.
=head1 CONFIGURATION
This plugin has the following configuration options.
=head2 schema
The name of the Yancy schema that holds users. Required.
=head2 username_field
The name of the field in the schema which is the user's identifier.
This can be a user name, ID, or e-mail address, and is provided by the
user during login.
=head2 password_field
The name of the field to use for the password or secret.
=head2 plugin_field
The field to store which plugin the user is using to authenticate. This
field is only used if two auth plugins have the same username field.
=head2 plugins
An array of auth plugins to configure. Each plugin can be either a name
(in the C<Yancy::Plugin::Auth::> namespace) or an array reference with
two elements: The name (in the C<Yancy::Plugin::Auth::> namespace) and a
hash reference of configuration.
Each of this module's configuration keys will be used as the default for
all the other auth plugins. Other plugins can override this
configuration individually. For example, users and tokens can be stored
in different schemas:
app->yancy->plugin( 'Auth' => {
plugins => [
[
'Password',
{
schema => 'users',
username_field => 'username',
password_field => 'password',
password_digest => { type => 'SHA-1' },
},
],
[
'Token',
{
schema => 'tokens',
token_field => 'token',
},
],
],
} );
=head2 Single User / Multiple Auth
To allow a single user to configure multiple authentication mechanisms, do not
configure a C<plugin_field>. Instead, give every authentication plugin its own
C<username_field>. Then, once a user has registered with one auth method, they
can log in and register with another auth method to link to the same account.
=head2 Sessions
This module uses L<Mojolicious
sessions|https://mojolicious.org/perldoc/Mojolicious/Controller#session>
to store the login information in a secure, signed cookie.
To configure the default expiration of a session, use
L<Mojolicious::Sessions
default_expiration|https://mojolicious.org/perldoc/Mojolicious/Sessions#default_expiration>.
use Mojolicious::Lite;
# Expire a session after 1 day of inactivity
app->sessions->default_expiration( 24 * 60 * 60 );
=head1 HELPERS
This plugin has the following helpers.
=head2 yancy.auth.current_user
Get the current user from one of the configured plugins, if any. Returns
C<undef> if no user was found in the session.
my $user = $c->yancy->auth->current_user
|| return $c->render( status => 401, text => 'Unauthorized' );
=head2 yancy.auth.require_user
Validate there is a logged-in user and optionally that the user data has
certain values. See L<Yancy::Plugin::Auth::Role::RequireUser/require_user>.
# Display the user dashboard, but only to logged-in users
my $auth_route = $app->routes->under( '/user', $app->yancy->auth->require_user );
$auth_route->get( '' )->to( 'user#dashboard' );
=head2 yancy.auth.login_form
Return an HTML string containing the rendered login forms for all
configured auth plugins, in order.
%# Display a login form to an unauthenticated visitor
% if ( !$c->yancy->auth->current_user ) {
%= $c->yancy->auth->login_form
% }
=head2 yancy.auth.logout
Log out any current account from any auth plugin. Use this in your own
route handlers to perform a logout.
=head1 ROUTES
This plugin creates the following L<named
routes|https://mojolicious.org/perldoc/Mojolicious/Guides/Routing#Named-routes>.
Use named routes with helpers like
L<url_for|Mojolicious::Plugin::DefaultHelpers/url_for>,
L<link_to|Mojolicious::Plugin::TagHelpers/link_to>, and
L<form_for|Mojolicious::Plugin::TagHelpers/form_for>.
=head2 yancy.auth.login_form
Display all of the login forms for the configured auth plugins. This route handles C<GET>
requests and can be used with the L<redirect_to|https://mojolicious.org/perldoc/Mojolicious/Plugin/DefaultHelpers#redirect_to>,
L<url_for|https://mojolicious.org/perldoc/Mojolicious/Plugin/DefaultHelpers#url_for>,
and L<link_to|https://mojolicious.org/perldoc/Mojolicious/Plugin/TagHelpers#link_to> helpers.
%= link_to Login => 'yancy.auth.login_form'
<%= link_to 'yancy.auth.login_form', begin %>Login<% end %>
<p>Login here: <%= url_for 'yancy.auth.login_form' %></p>
=head2 yancy.auth.logout
Log out of all configured auth plugins. This route handles C<GET>
requests and can be used with the L<redirect_to|https://mojolicious.org/perldoc/Mojolicious/Plugin/DefaultHelpers#redirect_to>,
L<url_for|https://mojolicious.org/perldoc/Mojolicious/Plugin/DefaultHelpers#url_for>,
and L<link_to|https://mojolicious.org/perldoc/Mojolicious/Plugin/TagHelpers#link_to> helpers.
%= link_to Logout => 'yancy.auth.logout'
<%= link_to 'yancy.auth.logout', begin %>Logout<% end %>
<p>Logout here: <%= url_for 'yancy.auth.logout' %></p>
=head1 TEMPLATES
To override these templates, add your own at the designated path inside
your app's C<templates/> directory.
=head2 yancy/auth/login_form.html.ep
This displays all of the login forms for all of the configured plugins
(if the plugin has a login form).
=head2 yancy/auth/login_page.html.ep
This displays the login form on a page directing the user to log in.
=head2 layouts/yancy/auth.html.ep
The layout that Yancy uses when displaying the login page, the
unauthorized error message, and other auth-related pages.
=head1 SEE ALSO
=head2 Multiplex Plugins
These are possible Auth plugins that can be used with this plugin (or as
standalone, if desired).
=over
=item * L<Yancy::Plugin::Auth::Password>
=item * L<Yancy::Plugin::Auth::Token>
=item * L<Yancy::Plugin::Auth::OAuth2>
=item * L<Yancy::Plugin::Auth::Github>
=back
=cut
use Mojo::Base 'Mojolicious::Plugin';
use Role::Tiny::With;
with 'Yancy::Plugin::Auth::Role::RequireUser';
use Mojo::Loader qw( load_class );
use Yancy::Util qw( currym match );
has _plugins => sub { [] };
has route =>;
has logout_route =>;
sub register {
my ( $self, $app, $config ) = @_;
for my $plugin_conf ( @{ $config->{plugins} } ) {
my $name;
if ( !ref $plugin_conf ) {
$name = $plugin_conf;
$plugin_conf = {};
}
else {
( $name, $plugin_conf ) = @$plugin_conf;
}
# If we got a route config, we need to customize the plugin
# routes as well. If this plugin got its own "route" config,
# use it. Otherwise, build a route from the auth route and the
# plugin's moniker.
if ( my $route = $app->yancy->routify( $config->{route} ) ) {
$plugin_conf->{route} = $app->yancy->routify(
$plugin_conf->{route},
$route->any( $plugin_conf->{moniker} || lc $name ),
);
}
my %merged_conf = ( %$config, %$plugin_conf );
if ( $plugin_conf->{username_field} ) {
# If this plugin has a unique username field, we don't need
# to specify a plugin field. This means a single user can
# have multiple auth mechanisms.
delete $merged_conf{ plugin_field };
}
my $class = join '::', 'Yancy::Plugin::Auth', $name;
if ( my $e = load_class( $class ) ) {
die sprintf 'Unable to load auth plugin %s: %s', $name, $e;
}
my $plugin = $class->new( \%merged_conf );
push @{ $self->_plugins }, $plugin;
# Plugin hashref overrides config from main Auth plugin
$plugin->init( $app, \%merged_conf );
}
$app->helper(
'yancy.auth.current_user' => currym( $self, 'current_user' ),
);
$app->helper(
'yancy.auth.plugins' => currym( $self, 'plugins' ),
);
$app->helper(
'yancy.auth.logout' => currym( $self, 'logout' ),
);
$app->helper(
'yancy.auth.login_form' => currym( $self, 'login_form' ),
);
# Make this route after all the plugin routes so that it matches
# last.
$self->route( $app->yancy->routify(
$config->{route},
$app->routes->get( '/yancy/auth' ),
) );
$self->logout_route(
$self->route->get( '/logout' )->to( cb => currym( $self, '_handle_logout' ) )->name( 'yancy.auth.logout' )
);
$self->route->get( '' )->to( cb => currym( $self, '_login_page' ) )->name( 'yancy.auth.login_form' );
}
=method current_user
Returns the currently logged-in user, if any.
=cut
sub current_user {
my ( $self, $c ) = @_;
for my $plugin ( @{ $self->_plugins } ) {
if ( my $user = $plugin->current_user( $c ) ) {
return $user;
}
}
return undef;
}
=method plugins
Returns the list of configured auth plugins.
=cut
sub plugins {
my ( $self, $c ) = @_;
return @{ $self->_plugins };
}
=method login_form
%= $c->yancy->auth->login_form
Return the rendered login form template.
=cut
sub login_form {
my ( $self, $c ) = @_;
return $c->render_to_string(
template => 'yancy/auth/login_form',
plugins => $self->_plugins,
);
}
sub _login_page {
my ( $self, $c ) = @_;
$c->render(
template => 'yancy/auth/login_page',
plugins => $self->_plugins,
);
}
=method logout
Log out the current user. Will call the C<logout> method on all configured auth plugins.
=cut
sub logout {
my ( $self, $c ) = @_;
$_->logout( $c ) for $self->plugins;
}
sub _handle_logout {
my ( $self, $c ) = @_;
$self->logout( $c );
$c->res->code( 303 );
my $redirect_to = $c->param( 'redirect_to' ) // $c->req->headers->referrer // '/';
if ( $redirect_to eq $c->req->url->path ) {
$redirect_to = '/';
}
return $c->redirect_to( $redirect_to );
}
1;
| 29.576531 | 127 | 0.625496 |
73ff0eca03e82ae7abbb3a77d69afbb1f9270522 | 36 | pl | Perl | probes/original/table/compare.pl | sauloal/projects | 79068b20251fd29cadbc80a5de550fc77332788d | [
"MIT"
]
| null | null | null | probes/original/table/compare.pl | sauloal/projects | 79068b20251fd29cadbc80a5de550fc77332788d | [
"MIT"
]
| null | null | null | probes/original/table/compare.pl | sauloal/projects | 79068b20251fd29cadbc80a5de550fc77332788d | [
"MIT"
]
| 1 | 2018-10-26T05:13:42.000Z | 2018-10-26T05:13:42.000Z | #!/usr/bin/perl -w
use strict;
1;
| 6 | 18 | 0.583333 |
ed1ff4bb7f7a292c1f34dbc56f69bb43a7e91d38 | 7,737 | pm | Perl | os/solaris/local/mode/cpu.pm | dalfo77/centreon-plugins | 3cb2011c46a45b5e4a785ca6bab439142f882d45 | [
"Apache-2.0"
]
| null | null | null | os/solaris/local/mode/cpu.pm | dalfo77/centreon-plugins | 3cb2011c46a45b5e4a785ca6bab439142f882d45 | [
"Apache-2.0"
]
| null | null | null | os/solaris/local/mode/cpu.pm | dalfo77/centreon-plugins | 3cb2011c46a45b5e4a785ca6bab439142f882d45 | [
"Apache-2.0"
]
| null | null | null | #
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package os::solaris::local::mode::cpu;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::misc;
use centreon::plugins::statefile;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'hostname:s' => { name => 'hostname' },
'remote' => { name => 'remote' },
'ssh-option:s@' => { name => 'ssh_option' },
'ssh-path:s' => { name => 'ssh_path' },
'ssh-command:s' => { name => 'ssh_command', default => 'ssh' },
'timeout:s' => { name => 'timeout', default => 30 },
'sudo' => { name => 'sudo' },
'command:s' => { name => 'command', default => 'kstat' },
'command-path:s' => { name => 'command_path' },
'command-options:s' => { name => 'command_options', default => '-n sys 2>&1' },
'warning:s' => { name => 'warning' },
'critical:s' => { name => 'critical' },
});
$self->{statefile_cache} = centreon::plugins::statefile->new(%options);
$self->{hostname} = undef;
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
$self->{statefile_cache}->check_options(%options);
$self->{hostname} = $self->{option_results}->{hostname};
if (!defined($self->{hostname})) {
$self->{hostname} = 'me';
}
}
sub run {
my ($self, %options) = @_;
my $stdout = centreon::plugins::misc::execute(
output => $self->{output},
options => $self->{option_results},
sudo => $self->{option_results}->{sudo},
command => $self->{option_results}->{command},
command_path => $self->{option_results}->{command_path},
command_options => $self->{option_results}->{command_options}
);
$self->{statefile_cache}->read(statefile => 'cache_solaris_local_' . $self->{hostname} . '_' . $self->{mode});
my $old_timestamp = $self->{statefile_cache}->get(name => 'last_timestamp');
my $datas = {};
$datas->{last_timestamp} = time();
$self->{output}->output_add(severity => 'OK',
short_msg => "CPUs usages are ok.");
my @output_cpu_instance = split("instance", $stdout);
shift @output_cpu_instance;
foreach (@output_cpu_instance) {
/:\s.*?(\d+)/;
my $cpu_number = $1;
/.*?cpu_ticks_idle\s.*?(\d+).*?cpu_ticks_kernel\s.*?(\d+).*?cpu_ticks_user\s.*?(\d+)/ms;
$datas->{'cpu_idle_' . $cpu_number} = $1;
$datas->{'cpu_system_' . $cpu_number} = $2;
$datas->{'cpu_user_' . $cpu_number} = $3;
if (!defined($old_timestamp)) {
next;
}
my $old_cpu_idle = $self->{statefile_cache}->get(name => 'cpu_idle_' . $cpu_number);
my $old_cpu_system = $self->{statefile_cache}->get(name => 'cpu_system_' . $cpu_number);
my $old_cpu_user = $self->{statefile_cache}->get(name => 'cpu_user_' . $cpu_number);
if (!defined($old_cpu_system) || !defined($old_cpu_idle) || !defined($old_cpu_user)) {
next;
}
if ($datas->{'cpu_idle_' . $cpu_number} < $old_cpu_idle) {
# We set 0. Has reboot.
$old_cpu_user = 0;
$old_cpu_idle = 0;
$old_cpu_system = 0;
}
my $total_elapsed = ($datas->{'cpu_idle_' . $cpu_number} + $datas->{'cpu_user_' . $cpu_number} + $datas->{'cpu_system_' . $cpu_number}) - ($old_cpu_user + $old_cpu_idle + $old_cpu_system);
my $idle_elapsed = $datas->{'cpu_idle_' . $cpu_number} - $old_cpu_idle;
my $cpu_ratio_usetime = 100 * $idle_elapsed / $total_elapsed;
$cpu_ratio_usetime = 100 - $cpu_ratio_usetime;
my $exit_code = $self->{perfdata}->threshold_check(value => $cpu_ratio_usetime,
threshold => [ { label => 'critical', 'exit_litteral' => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
$self->{output}->output_add(long_msg => sprintf("CPU %d %.2f%%", $cpu_number, $cpu_ratio_usetime));
if (!$self->{output}->is_status(litteral => 1, value => $exit_code, compare => 'ok')) {
$self->{output}->output_add(severity => $exit_code,
short_msg => sprintf("CPU %d %.2f%%", $cpu_number, $cpu_ratio_usetime));
}
$self->{output}->perfdata_add(
label => 'cpu_' . $cpu_number, unit => '%',
value => sprintf("%.2f", $cpu_ratio_usetime),
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical'),
min => 0, max => 100
);
}
$self->{statefile_cache}->write(data => $datas);
if (!defined($old_timestamp)) {
$self->{output}->output_add(severity => 'OK',
short_msg => "Buffer creation...");
}
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check system CPUs (need 'kstat' command).
=over 8
=item B<--warning>
Threshold warning in percent.
=item B<--critical>
Threshold critical in percent.
=item B<--remote>
Execute command remotely in 'ssh'.
=item B<--hostname>
Hostname to query (need --remote).
=item B<--ssh-option>
Specify multiple options like the user (example: --ssh-option='-l=centreon-engine" --ssh-option='-p=52").
=item B<--ssh-path>
Specify ssh command path (default: none)
=item B<--ssh-command>
Specify ssh command (default: 'ssh'). Useful to use 'plink'.
=item B<--timeout>
Timeout in seconds for the command (Default: 30).
=item B<--sudo>
Use 'sudo' to execute the command.
=item B<--command>
Command to get information (Default: 'kstat').
Can be changed if you have output in a file.
=item B<--command-path>
Command path (Default: none).
=item B<--command-options>
Command options (Default: '-n sys 2>&1').
=back
=cut
| 36.154206 | 197 | 0.564172 |
ed241f7e7e80ab94e8a6a42c043c8883308a1d20 | 5,595 | pl | Perl | src/lib/atts.pl | malbarbo/scryer-prolog | 176daeec0314571b4aca3973601af7c65dc8b869 | [
"BSD-3-Clause"
]
| 1 | 2021-11-08T07:02:44.000Z | 2021-11-08T07:02:44.000Z | src/lib/atts.pl | logicmoo/scryer-prolog | a29f1eb0c244894b30c79c752ded153ff906e510 | [
"BSD-3-Clause"
]
| null | null | null | src/lib/atts.pl | logicmoo/scryer-prolog | a29f1eb0c244894b30c79c752ded153ff906e510 | [
"BSD-3-Clause"
]
| null | null | null | :- module(atts, [op(1199, fx, attribute),
call_residue_vars/2,
term_attributed_variables/2]).
:- use_module(library(dcgs)).
:- use_module(library(terms)).
/* represent the list of attributes belonging to a variable,
of a particular module, as a list of terms of the form
Module:put_atts(V, ListOfAtts). */
'$default_attr_list'(Module, V) -->
( { Module:get_atts(V, Attributes) } ->
'$default_attr_list'(Attributes, Module, V)
; []
).
'$default_attr_list'([PG | PGs], Module, AttrVar) -->
[Module:put_atts(AttrVar, PG)],
'$default_attr_list'(PGs, Module, AttrVar).
'$default_attr_list'([], _, _) --> [].
'$absent_attr'(V, Attr) :-
'$get_attr_list'(V, Ls),
'$absent_from_list'(Ls, Attr).
'$absent_from_list'(X, Attr) :-
( var(X) -> true
; X = [L|Ls], L \= Attr -> '$absent_from_list'(Ls, Attr)
).
'$get_attr'(V, Attr) :-
'$get_attr_list'(V, Ls), nonvar(Ls), '$get_from_list'(Ls, V, Attr).
'$get_from_list'([L|Ls], V, Attr) :-
nonvar(L),
( L \= Attr -> nonvar(Ls), '$get_from_list'(Ls, V, Attr)
; L = Attr, '$enqueue_attr_var'(V)
).
'$put_attr'(V, Attr) :-
'$get_attr_list'(V, Ls), '$add_to_list'(Ls, V, Attr).
'$add_to_list'(Ls, V, Attr) :-
( var(Ls) ->
Ls = [Attr | _], '$enqueue_attr_var'(V)
; Ls = [_ | Ls0], '$add_to_list'(Ls0, V, Attr)
).
'$del_attr'(Ls0, _, _) :-
var(Ls0), !.
'$del_attr'(Ls0, V, Attr) :-
Ls0 = [Att | Ls1],
nonvar(Att),
( Att \= Attr ->
'$del_attr_buried'(Ls0, Ls1, V, Attr)
; '$enqueue_attr_var'(V),
'$del_attr_head'(V),
'$del_attr'(Ls1, V, Attr)
).
'$del_attr_step'(Ls1, V, Attr) :-
( nonvar(Ls1) ->
Ls1 = [_ | Ls2],
'$del_attr_buried'(Ls1, Ls2, V, Attr)
; true
).
%% assumptions: Ls0 is a list, Ls1 is its tail;
%% the head of Ls0 can be ignored.
'$del_attr_buried'(Ls0, Ls1, V, Attr) :-
( var(Ls1) -> true
; Ls1 = [Att | Ls2] ->
( Att \= Attr ->
'$del_attr_buried'(Ls1, Ls2, V, Attr)
; '$enqueue_attr_var'(V),
'$del_attr_non_head'(Ls0), %% set tail of Ls0 = tail of Ls1. can be undone by backtracking.
'$del_attr_step'(Ls1, V, Attr)
)
).
'$copy_attr_list'(L, _Module, []) :- var(L), !.
'$copy_attr_list'([Module0:Att|Atts], Module, CopiedAtts) :-
( Module0 == Module ->
CopiedAtts = [Att|CopiedAtts0],
'$copy_attr_list'(Atts, Module, CopiedAtts0)
; '$copy_attr_list'(Atts, Module, CopiedAtts)
).
user:term_expansion(Term0, Terms) :-
nonvar(Term0),
Term0 = (:- attribute Atts),
nonvar(Atts),
prolog_load_context(module, Module),
phrase(expand_terms(Atts, Module), Terms).
expand_terms(Atts, Module) -->
put_attrs_var_check,
put_attrs(Atts, Module),
get_attrs_var_check(Module),
get_attrs(Atts, Module).
put_attrs_var_check -->
[(put_atts(Var, Attr) :- nonvar(Var),
throw(error(type_error(variable, Var), put_atts/2))),
(put_atts(Var, Attr) :- var(Attr),
throw(error(instantiation_error, put_atts/2)))].
get_attrs_var_check(Module) -->
[(get_atts(Var, Attr) :- nonvar(Var),
throw(error(type_error(variable, Var), get_atts/2))),
(get_atts(Var, Attr) :- var(Attr),
!,
'$get_attr_list'(Var, Ls),
nonvar(Ls),
atts:'$copy_attr_list'(Ls, Module, Attr))].
put_attrs(Name/Arity, Module) -->
put_attr(Name, Arity, Module),
[(put_atts(Var, Attr) :- lists:maplist(Module:put_atts(Var), Attr), !)].
put_attrs((Name/Arity, Atts), Module) -->
{ nonvar(Atts) },
put_attr(Name, Arity, Module),
put_attrs(Atts, Module).
get_attrs(Name/Arity, Module) -->
get_attr(Name, Arity, Module).
get_attrs((Name/Arity, Atts), Module) -->
{ nonvar(Atts) },
get_attr(Name, Arity, Module),
get_attrs(Atts, Module).
put_attr(Name, Arity, Module) -->
{ functor(Attr, Name, Arity) },
[(put_atts(V, +Attr) :-
!,
functor(Attr, Head, Arity),
functor(AttrForm, Head, Arity),
'$get_attr_list'(V, Ls),
atts:'$del_attr'(Ls, V, Module:AttrForm),
atts:'$put_attr'(V, Module:Attr)),
(put_atts(V, Attr) :-
!,
functor(Attr, Head, Arity),
functor(AttrForm, Head, Arity),
'$get_attr_list'(V, Ls),
atts:'$del_attr'(Ls, V, Module:AttrForm),
atts:'$put_attr'(V, Module:Attr)),
(put_atts(V, -Attr) :-
!,
functor(Attr, _, _),
'$get_attr_list'(V, Ls),
atts:'$del_attr'(Ls, V, Module:Attr))].
get_attr(Name, Arity, Module) -->
{ functor(Attr, Name, Arity) },
[(get_atts(V, +Attr) :-
!,
functor(Attr, _, _),
atts:'$get_attr'(V, Module:Attr)),
(get_atts(V, Attr) :-
!,
functor(Attr, _, _),
atts:'$get_attr'(V, Module:Attr)),
(get_atts(V, -Attr) :-
!,
functor(Attr, _, _),
atts:'$absent_attr'(V, Module:Attr))].
user:goal_expansion(Term, M:put_atts(Var, Attr)) :-
nonvar(Term),
Term = put_atts(Var, M, Attr).
user:goal_expansion(Term, M:get_atts(Var, Attr)) :-
nonvar(Term),
Term = get_atts(Var, M, Attr).
:- meta_predicate call_residue_vars(0, ?).
call_residue_vars(Goal, Vars) :-
'$get_attr_var_queue_delim'(B),
call(Goal),
'$get_attr_var_queue_beyond'(B, Vars).
term_attributed_variables(Term, Vars) :-
'$term_attributed_variables'(Term, Vars).
| 30.243243 | 98 | 0.559786 |
ed0d09b6c1d0e4179b68c0bb8897a88a7ae8d6e1 | 1,610 | t | Perl | dotnet/Example.t | jordanqin/cos-snippets | 6176eaa55e62bd660aa5d55e4ac6525bad91cf81 | [
"MIT"
]
| 21 | 2020-08-25T01:08:57.000Z | 2022-03-09T08:10:35.000Z | dotnet/Example.t | jordanqin/cos-snippets | 6176eaa55e62bd660aa5d55e4ac6525bad91cf81 | [
"MIT"
]
| 7 | 2020-08-18T17:06:51.000Z | 2022-01-12T02:57:11.000Z | dotnet/Example.t | jordanqin/cos-snippets | 6176eaa55e62bd660aa5d55e4ac6525bad91cf81 | [
"MIT"
]
| 43 | 2020-08-10T01:12:55.000Z | 2022-03-27T12:42:02.000Z | using COSXML.Common;
using COSXML.CosException;
using COSXML.Model;
using COSXML.Model.Object;
using COSXML.Model.Tag;
using COSXML.Model.Bucket;
using COSXML.Model.Service;
using COSXML.Utils;
using COSXML.Auth;
using COSXML.Transfer;
using System;
using COSXML;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace COSSnippet
{
public class {{name}}Model {
private CosXml cosXml;
{{name}}Model() {
CosXmlConfig config = new CosXmlConfig.Builder()
.SetRegion("COS_REGION") // 设置默认的区域, COS 地域的简称请参照 https://cloud.tencent.com/document/product/436/6224
.Build();
string secretId = "COS_SECRETID"; //云 API 密钥 SecretId
string secretKey = "COS_SECRETKEY"; //云 API 密钥 SecretKey
long durationSecond = 600; //每次请求签名有效时长,单位为秒
QCloudCredentialProvider qCloudCredentialProvider = new DefaultQCloudCredentialProvider(secretId,
secretKey, durationSecond);
this.cosXml = new CosXmlServer(config, qCloudCredentialProvider);
}
{{#methods}}
/// {{description}}
public void {{name}}()
{
{{{startTag}}}
{{{snippet}}}
{{{endTag}}}
}
{{/methods}}
// .cssg-methods-pragma
static void Main(string[] args)
{
{{name}}Model m = new {{name}}Model();
{{#methods}}
/// {{description}}
m.{{name}}();
{{/methods}}
// .cssg-methods-pragma
}
}
} | 25.15625 | 111 | 0.620497 |
ed3c21436f169214c59b772f54575194cba723f8 | 8,398 | pl | Perl | tests/CIF/descriptors/run_test.pl | qinwang13/CIAO | e69add1b5da8e9602bcc85d581ecbf1bd41c49a3 | [
"DOC"
]
| 10 | 2016-07-20T00:55:50.000Z | 2020-10-04T19:07:10.000Z | tests/CIF/descriptors/run_test.pl | qinwang13/CIAO | e69add1b5da8e9602bcc85d581ecbf1bd41c49a3 | [
"DOC"
]
| 13 | 2016-09-27T14:08:27.000Z | 2020-11-11T10:45:56.000Z | tests/CIF/descriptors/run_test.pl | qinwang13/CIAO | e69add1b5da8e9602bcc85d581ecbf1bd41c49a3 | [
"DOC"
]
| 12 | 2016-04-20T09:57:02.000Z | 2021-12-24T17:23:45.000Z | eval '(exit $?0)' && eval 'exec perl -S $0 ${1+"$@"}'
& eval 'exec perl -S $0 $argv:q'
if 0;
# -*- perl -*-
use lib "$ENV{'ACE_ROOT'}/bin";
use PerlACE::TestTarget;
$CIAO_ROOT = "$ENV{'CIAO_ROOT'}";
$TAO_ROOT = "$ENV{'TAO_ROOT'}";
$DANCE_ROOT = "$ENV{'DANCE_ROOT'}";
$daemons_running = 0;
$em_running = 0;
$ns_running = 0;
$nr_daemon = 2;
@ports = ( 60001, 60002 );
@iorbases = ( "Provider.ior", "User.ior" );
@iorfiles = 0;
@nodenames = ( "ProviderNode", "UserNode" );
# ior files other than daemon
# ior files other than daemon
$ior_nsbase = "ns.ior";
$ior_nsfile = 0;
$ior_embase = "EM.ior";
$ior_emfile = 0;
$nr_clients = 3;
@clients = ("../Navigation/Navigation_Test_Client",
"../Receptacle/Receptacle_Test_Client",
"../Events/Events_Test_Client");
# Processes
$E = 0;
$EM = 0;
$NS = 0;
$C = 0;
@DEAMONS = 0;
# targets
@tg_daemons = 0;
$tg_naming = 0;
$tg_exe_man = 0;
$tg_executor = 0;
$tg_client = 0;
$status = 0;
$cdp_file = "Plan.cdp";
sub create_targets {
# naming service
$tg_naming = PerlACE::TestTarget::create_target (1) || die "Create target for ns failed\n";
$tg_naming->AddLibPath ('../lib');
# daemon
for ($i = 0; $i < $nr_daemon; ++$i) {
$tg_daemons[$i] = PerlACE::TestTarget::create_target ($i+1) || die "Create target for daemon $i failed\n";
$tg_daemons[$i]->AddLibPath ('../lib');
}
# execution manager
$tg_exe_man = PerlACE::TestTarget::create_target (1) || die "Create target for EM failed\n";
$tg_exe_man->AddLibPath ('../lib');
# executor (plan_launcher)
$tg_executor = PerlACE::TestTarget::create_target (1) || die "Create target for executor failed\n";
$tg_executor->AddLibPath ('../lib');
$tg_client = PerlACE::TestTarget::create_target (1) || die "Create target for executor failed\n";
$tg_client->AddLibPath ('../lib');
}
sub init_ior_files {
$ior_nsfile = $tg_naming->LocalFile ($ior_nsbase);
$ior_emfile = $tg_exe_man->LocalFile ($ior_embase);
for ($i = 0; $i < $nr_daemon; ++$i) {
$iorfiles[$i] = $tg_daemons[$i]->LocalFile ($iorbases[$i]);
}
delete_ior_files ();
}
# Delete if there are any .ior files.
sub delete_ior_files {
for ($i = 0; $i < $nr_daemon; ++$i) {
$tg_daemons[$i]->DeleteFile ($iorbases[$i]);
}
$tg_naming->DeleteFile ($ior_nsbase);
$tg_exe_man->DeleteFile ($ior_embase);
for ($i = 0; $i < $nr_daemon; ++$i) {
$iorfiles[$i] = $tg_daemons[$i]->LocalFile ($iorbases[$i]);
}
}
sub kill_node_daemon {
for ($i = 0; $i < $nr_daemon; ++$i) {
$DEAMONS[$i]->Kill (); $DEAMONS[$i]->TimedWait (1);
}
}
sub kill_open_processes {
if ($daemons_running == 1) {
kill_node_daemon ();
}
if ($em_running == 1) {
$EM->Kill (); $EM->TimedWait (1);
}
if ($ns_running == 1) {
$NS->Kill (); $NS->TimedWait (1);
}
# in case shutdown did not perform as expected
$tg_executor->KillAll ('dance_locality_manager');
$C->Kill (); $C->TimedWait (1);
}
sub run_node_daemons {
for ($i = 0; $i < $nr_daemon; ++$i) {
$iorbase = $iorbases[$i];
$iorfile = $iorfiles[$i];
$port = $ports[$i];
$nodename = $nodenames[$i];
$iiop = "iiop://localhost:$port";
$node_app = $tg_daemons[$i]->GetArchDir("$DANCE_ROOT/bin/") . "dance_locality_manager";
$d_cmd = "$DANCE_ROOT/bin/dance_node_manager";
$d_param = "-ORBEndpoint $iiop -s $node_app -n $nodename=$iorfile -t 30 --domain-nc corbaloc:rir:/NameService";
print "Run dance_node_manager with $d_param\n";
$DEAMONS[$i] = $tg_daemons[$i]->CreateProcess ($d_cmd, $d_param);
$DEAMONS[$i]->Spawn ();
if ($tg_daemons[$i]->WaitForFileTimed($iorbase,
$tg_daemons[$i]->ProcessStartWaitInterval ()) == -1) {
print STDERR
"ERROR: The ior $iorfile file of node daemon $i could not be found\n";
for (; $i >= 0; --$i) {
$DEAMONS[$i]->Kill (); $DEAMONS[$i]->TimedWait (1);
}
return -1;
}
}
return 0;
}
create_targets ();
init_ior_files ();
for ($client_nr = 0; $client_nr < $nr_clients; ++$client_nr) {
print "================ Start $clients[$client_nr] ================\n";
# Invoke naming service
$NS = $tg_naming->CreateProcess ("$TAO_ROOT/orbsvcs/Naming_Service/tao_cosnaming", "-ORBEndpoint iiop://localhost:60003 -o $ior_nsfile");
$ns_status = $NS->Spawn ();
if ($ns_status != 0) {
print STDERR "ERROR: Unable to execute the naming service\n";
kill_open_processes ();
exit 1;
}
print "Starting Naming Service with -ORBEndpoint iiop://localhost:60003 -o ns.ior\n";
if ($tg_naming->WaitForFileTimed ($ior_nsbase,
$tg_naming->ProcessStartWaitInterval ()) == -1) {
print STDERR "ERROR: cannot find naming service IOR file\n";
$NS->Kill (); $NS->TimedWait (1);
exit 1;
}
$ns_running = 1;
# Set up NamingService environment
$ENV{"NameServiceIOR"} = "corbaloc:iiop:localhost:60003/NameService";
# Invoke node daemon.
print "Invoking node daemon\n";
$status = run_node_daemons ();
if ($status != 0) {
print STDERR "ERROR: Unable to execute the node daemon\n";
kill_open_processes ();
exit 1;
}
$daemons_running = 1;
# Invoke execution manager.
print "Invoking execution manager (dance_execution_manager.exe) with -e$ior_emfile\n";
$EM = $tg_exe_man->CreateProcess ("$DANCE_ROOT/bin/dance_execution_manager",
"-e$ior_emfile --domain-nc corbaloc:rir:/NameService");
$em_status = $EM->Spawn ();
if ($em_status != 0) {
print STDERR "ERROR: dance_execution_manager returned $em_status";
exit 1;
}
if ($tg_exe_man->WaitForFileTimed ($ior_embase,
$tg_exe_man->ProcessStartWaitInterval ()) == -1) {
print STDERR
"ERROR: The ior file of execution manager could not be found\n";
kill_open_processes ();
exit 1;
}
$em_running = 1;
# Invoke executor - start the application -.
print "Invoking executor - launch the application -\n";
print "Start dance_plan_launcher.exe with -x $cdp_file -k file://$ior_emfile\n";
$E = $tg_executor->CreateProcess ("$DANCE_ROOT/bin/dance_plan_launcher",
"-x $cdp_file -k file://$ior_emfile");
$pl_status = $E->SpawnWaitKill (2 * $tg_executor->ProcessStartWaitInterval ());
if ($pl_status != 0) {
print STDERR "ERROR: dance_plan_launcher returned $pl_status\n";
kill_open_processes ();
exit 1;
}
for ($i = 0; $i < $nr_daemon; ++$i) {
if ($tg_daemons[$i]->WaitForFileTimed ($iorbases[$i],
$tg_daemons[$i]->ProcessStopWaitInterval ()) == -1) {
print STDERR "ERROR: The ior file of daemon $i could not be found\n";
kill_open_processes ();
exit 1;
}
}
$C = $tg_client->CreateProcess ($clients[$client_nr],
"-n corbaloc:rir:/NameService");
$client_status = $C->SpawnWaitKill ($tg_client->ProcessStartWaitInterval ());
if ($client_status != 0) {
print STDERR "ERROR: client $clients[$client_nr] returned $client_status\n";
}
print "Sleeping 2 seconds to allow task $clients[$client_nr] to complete\n";
sleep (2);
# Invoke executor - stop the application -.
print "Invoking executor - stop the application -\n";
print "by running dance_plan_launcher.exe with -k file://$ior_emfile -x $cdp_file\n";
$E = $tg_executor->CreateProcess ("$DANCE_ROOT/bin/dance_plan_launcher",
"-k file://$ior_emfile -x $cdp_file -s");
$pl_status = $E->SpawnWaitKill ($tg_executor->ProcessStartWaitInterval ());
if ($pl_status != 0) {
print STDERR "ERROR: dance_plan_launcher returned $pl_status\n";
kill_open_processes ();
exit 1;
}
print "Executor returned.\n";
print "Shutting down rest of the processes.\n";
delete_ior_files ();
kill_open_processes ();
print "================ End $clients[$client_nr] ================\n\n\n";
}
exit $status;
| 31.219331 | 141 | 0.582758 |
ed19196d15845351b2ca5a4cc59a8f15d84fd0e7 | 6,998 | pl | Perl | hype/mytest.pl | LRHammond/pv4dsrl | 7053ea392f9ddbf3672a0ac348d61abe6b7264d9 | [
"MIT"
]
| null | null | null | hype/mytest.pl | LRHammond/pv4dsrl | 7053ea392f9ddbf3672a0ac348d61abe6b7264d9 | [
"MIT"
]
| 2 | 2020-03-24T16:31:16.000Z | 2020-03-31T00:56:57.000Z | hype/mytest.pl | LRHammond/sdsrl | 7053ea392f9ddbf3672a0ac348d61abe6b7264d9 | [
"MIT"
]
| null | null | null | :- use_module(library(planning)).
:- use_module(library(lists)).
:- use_module(library(distributionalclause)).
:- use_module(library(dcpf)).
:- use_module(library(sst)).
:- use_module(library(system)).
% Options
:- set_options(default),
set_query_propagation(true),
set_inference(backward(lazy)).
:- set_current2nextcopy(false).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
get_pos(X):t <- pos:t ~= distribution(val(X)).
get_pos(X):t <- pos:t ~= X.
reward:t ~ val(R) <- stop:t, R is 10.0.
reward:t ~ val(R) <- \+stop:t, R is -1.0.
stop:t <- get_pos(X):t, X>4.
adm(action(move(A,B))):t <-
member((A,B),[(1.0,0.0),(-1.0,0.0)]).
pos:t+1 ~ val(X) <-
\+pos:t ~= _,
observation(pos) ~= X.
pos:t+1 ~ val(X) <-
observation(pos) ~= X.
pos:t+1 ~ val(NX) <-
action(move(DX,DY)),
pos:t ~= X,
NX is X+DX.
pos:t+1 ~ val(X) <-
pos:t ~= X.
observation(pos):t+1 ~ val(_) <-
pos:t+1 ~= _.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
maxV(D,100):t <- true.
par(left_right,N,UsedD,End) :-
End=1,
N=200, % max number of samples (for the entire plan)
UsedD=12, % planner horizon
getparam(left_right).
getparam(left_right) :-
bb_put(user:spant,0),
setparam(
% enable abstraction
false,
% ratio of the samples reserved for the first action
1.0,
% use correct formula (leave true)
true,
% strategy to store V function
max,
% ExecAction
best,
% most,
% Domain
propfalse,
% relfalse,
% Discount
0.95,
% probability to explore in the beginning (first sample)
0.0,
% probability to explore in the end (last sample)
0.0,
% number of previous samples to use to estimate Q. Larger is better but slower
100,
% max horizon span
100,
% lambda init
0.9,
% lambda final
0.9,
% UCBV
false,
% decay
0.015,
% action selection: softmax | egreedy
softmax,
% egreedy,
% Pruning
0,
% WHeuInit
-0.1,
% WHeuFinal
-0.1),
!.
getparam2(left_right,N) :-
par(left_right,N,_,_).
score(_,Avg,Avg).
search_query(I,Q) :-
distributionalclause:proof_query_backward(I,Q).
% plotepisode(E,N) :-
% % dcpf:bb_get(offset,Offset),
% open('dataE.txt','write',S),
% NN is N+1,
% term_to_atom(p(NN,E),I),
% % writeln('datatest1.txt'),
% % I is Offset+1,
% (
% search_query(I,next(object(ID)) ~= (X,Y,Z)),
% dim(ID,(DX,DY,DZ)),
% rgbcolor(ID,(Rc,Gc,Bc)),
% write(S,ID),write(S,' '),
% write(S,X),write(S,' '),write(S,Y),write(S,' '),write(S,Z),write(S,' '),
% write(S,Rc),write(S,' '),write(S,Gc),write(S,' '),write(S,Bc),nl(S),
% fail;
% true
% ),
%
% forall( between(0,N,T),
% (
% D is N-T,
% term_to_atom(p(D,E),Key),
% search_query(Key,next(object(ID)) ~= (X,Y,Z)),
% search_query(Key,next(greedy(GR))),
% rgbcolor(ID,(Rc,Gc,Bc)),
% write(S,ID),write(S,' '),
% write(S,X),write(S,' '),write(S,Y),write(S,' '),write(S,Z),write(S,' '),
% write(S,Rc),write(S,' '),write(S,Gc),write(S,' '),write(S,Bc),write(S,' '),write(S,GR),nl(S)
% ;true
% )
% ),
% nl(S),
% close(S),!.
myplotV(MinE,MaxE,Depth) :-
% dcpf:bb_get(offset,Offset),
% open('dataV.txt','write',S),
abolish_all_tables,
(
between(1,Depth,T),
%T is Depth-2, % to remove
between(MinE,MaxE,E),
term_to_atom(p(T,E),Key),
search_query(Key,next(object(ID)) ~= (X,Y,Z)),
search_query(Key,v(next,V)),
(recorded(Key,proposallikelihood(NumValues,SumPropLikelihood,PiProp),_) ->
PropLikelihood is SumPropLikelihood/NumValues
;
(PropLikelihood is 0)
),
%Temp is sign(V)*sqrt(abs(V))/100+0.99,
Color is PropLikelihood,%min(1,max(0,Temp)),% min(1,max(0,V)),
%Color2 is V,
%rgbcolor(ID,(Color,Color,Color)),
writeln(T),writeln(' '),
writeln(X),writeln(' '),writeln(Y),writeln(' '),writeln(Z),writeln(' '),
writeln(Color),writeln(' '),writeln(Color),writeln(' '),writeln(Color),nl,
fail;
true
).
% plotV(MinE,MaxE,Depth) :-
% dcpf:bb_get(offset,Offset),
% open('dataV.txt','write',S),
% abolish_all_tables,
% (
% between(1,Depth,T),
% %T is Depth-2, % to remove
% between(MinE,MaxE,E),
% term_to_atom(p(T,E),Key),
% search_query(Key,next(object(ID)) ~= (X,Y,Z)),
% search_query(Key,v(next,V)),
% ( ->
% PropLikelihood is SumPropLikelihood/NumValues
% ;
% (PropLikelihood is 0)
% ),
% %Temp is sign(V)*sqrt(abs(V))/100+0.99,
% Color is PropLikelihood,%min(1,max(0,Temp)),% min(1,max(0,V)),
% %Color2 is V,
% %rgbcolor(ID,(Color,Color,Color)),
% write(S,T),write(S,' '),
% write(S,X),write(S,' '),write(S,Y),write(S,' '),write(S,Z),write(S,' '),
% write(S,Color),write(S,' '),write(S,Color),write(S,' '),write(S,Color),nl(S),
% fail;
% true
% ),
% nl(S),
% close(S).
myfullplan(File,Init,AVG,Instance,D,Times1,Notes) :-
statistics(runtime,_),
par(Instance,N,UsedD,Startp),
% Init=[],
bb_get(user:abstraction,Abstract),
resamplingplan(0,Abstract,Init,[],N,D,AVG,T1,UsedD,Startp,Endp),
writeln(resamplingplan(0,Abstract,Init,[],N,D,AVG,T1,UsedD,Startp,Endp)),
(T1==true -> bb_put(numterminatedexplans,1);bb_put(numterminatedexplans,0)),
% setparam(1,0,0,200,0,0.9,0.9,false,0.015,egreedy,200,0.00000001,-0.0001),
getparam2(Instance,D2),
% D2 is D*2,
bb_put(currentepisode,Endp),
Times is Times1-1,
% Notes1='withoutrestart+100',
Notes1='withrestart',
findall(AVG2,
(between(1,Times,X),
bb_get(currentepisode,Start2),
Start3 is Startp+100*Times,
Start4 is 1, %Start3,% Start3,%Start2,%
resamplingplan(0,Abstract,Init,[],D2,D,AVG2,T2,UsedD,Start4,End),
writeln(resamplingplan(times(Times),0,Abstract,Init,[],D2,D,AVG2,T2,UsedD,Start4,End)),
EndEpisode is Start2+End-Start4,
bb_put(currentepisode,EndEpisode),
bb_get(numterminatedexplans,OldNumT),
(T2==true -> NewNumT is OldNumT+1;NewNumT is OldNumT),
bb_put(numterminatedexplans,NewNumT)
),L),
statistics(runtime,[_,Time]),
writeparam,nl,
bb_get(currentepisode,TotalN),
sum_list([AVG|L],Sum),
length([AVG|L],Length),
AVGFinal is Sum/Length,
variance([AVG|L],AVGFinal,Var),
STD is sqrt(Var),
getparam(Instance),
writeparam,
findall(Score,(member(Elem,[AVG|L]),score(Instance,Elem,Score)),L2),
sum_list(L2,SumScore),
length(L2,LengthScore),
AVGScore is SumScore/LengthScore,
variance(L2,AVGScore,VarScore),
Conf95 is 1.96*sqrt(VarScore)/sqrt(LengthScore),
bb_get(numterminatedexplans,Terminated),
T is round(Time/10)/100/Length,
writeln(seconds(T)),
score(Instance,AVGFinal,Score2),
writeln(([AVG|L],AVGFinal,Score2,AVGScore+Conf95,Length,VarScore,Terminated)),
(host_name(Host)->true;Host=unknown),
writetofile(File,Instance,AVGFinal,AVGScore,Conf95,Startp+N+D2*Times=TotalN,D,UsedD,T,([AVG|L],Terminated,Host,Notes,Notes1)),
!.
% plotV(1,100,10) :- !.
% plotepisode(100,100) :- !.
test:- myfullplan('resultspush.csv',[observation(pos)~=(0)],AVG,left_right,10,10,' ').
| 26.608365 | 127 | 0.610889 |
ed20e3a2e8a8b77119fcd293450e0587f64837eb | 191 | t | Perl | packages/app/_templates/component/new/style.ejs.t | tglatt/emjpm | 27c9337eb17f561ec671cd7526493ca314269fd2 | [
"Apache-2.0"
]
| null | null | null | packages/app/_templates/component/new/style.ejs.t | tglatt/emjpm | 27c9337eb17f561ec671cd7526493ca314269fd2 | [
"Apache-2.0"
]
| null | null | null | packages/app/_templates/component/new/style.ejs.t | tglatt/emjpm | 27c9337eb17f561ec671cd7526493ca314269fd2 | [
"Apache-2.0"
]
| null | null | null | ---
to: src/components-v2/<%= h.inflection.camelize(name) %>/style.js
---
const <%= h.inflection.camelize(name)%>Style = {
bg: "blue"
};
export { <%= h.inflection.camelize(name)%>Style };
| 21.222222 | 65 | 0.628272 |
73d965433270d99a772d7e8c0fd041cde7706d04 | 8,183 | pm | Perl | modules/Bio/EnsEMBL/Compara/RunnableDB/PairAligner/ImportNets.pm | manuelcarbajo/ensembl-compara | 0ffe653215a20e6921c5f4983ea9e4755593a491 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Compara/RunnableDB/PairAligner/ImportNets.pm | manuelcarbajo/ensembl-compara | 0ffe653215a20e6921c5f4983ea9e4755593a491 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Compara/RunnableDB/PairAligner/ImportNets.pm | manuelcarbajo/ensembl-compara | 0ffe653215a20e6921c5f4983ea9e4755593a491 | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
Bio::EnsEMBL::Compara::RunnableDB::PairAligner::ImportNets
=head1 DESCRIPTION
Reads a Net file and imports the data into a compara database, saving the results in the
genomic_align_block and genomic_align tables with a given method_link_species_set_id. Needs the
presence of the corresponding Chain data already in the database.
Download from:
http://hgdownload.cse.ucsc.edu/downloads.html
Choose reference species
Choose Pairwise Alignments
wget http://hgdownload.cse.ucsc.edu/goldenPath/hg19/vsSelf/hg19.hg19.net.gz
=cut
package Bio::EnsEMBL::Compara::RunnableDB::PairAligner::ImportNets;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Compara::Production::Analysis::AlignmentNets');
############################################################
=head2 fetch_input
Title : fetch_input
Usage : $self->fetch_input
Returns : nothing
Args : none
=cut
sub fetch_input {
my( $self) = @_;
$self->SUPER::fetch_input;
my $mlssa = $self->compara_dba->get_MethodLinkSpeciesSetAdaptor;
my $dafa = $self->compara_dba->get_DnaAlignFeatureAdaptor;
my $gaba = $self->compara_dba->get_GenomicAlignBlockAdaptor;
my $genome_dba = $self->compara_dba->get_GenomeDBAdaptor;
my $ref_dnafrag;
if(defined($self->param('dnafrag_id'))) {
$ref_dnafrag = $self->compara_dba->get_DnaFragAdaptor->fetch_by_dbID($self->param('dnafrag_id'));
}
################################################################
# get the compara data: MethodLinkSpeciesSet, reference DnaFrag,
# and GenomicAlignBlocks
################################################################
#get ref species
my $ref_gdb = $genome_dba->fetch_by_name_assembly($self->param('ref_species'));
#get non-ref species. If self alignment, set non-ref species to be the same as ref-species
my $non_ref_gdb;
if (!$self->param('non_ref_species')) {
$self->param('non_ref_species', $self->param('ref_species'));
}
$non_ref_gdb = $genome_dba->fetch_by_name_assembly($self->param('non_ref_species'));
#get method_link_species_set of Chains, defined by input_method_link_type
my $mlss;
if ($ref_gdb->dbID == $non_ref_gdb->dbID) {
#self alignments
$mlss = $mlssa->fetch_by_method_link_type_GenomeDBs($self->param('input_method_link_type'), [$ref_gdb]);
} else {
$mlss = $mlssa->fetch_by_method_link_type_GenomeDBs($self->param('input_method_link_type'), [$ref_gdb, $non_ref_gdb]);
}
$self->throw("No MethodLinkSpeciesSet for method_link_type". $self->param('input_method_link_type') . " and species " . $ref_gdb->name . " and " . $non_ref_gdb->name)
if not $mlss;
#Check if doing self_alignment where the species_set will contain only one
#entry
my $self_alignment = 0;
if (@{$mlss->species_set->genome_dbs} == 1) {
$self_alignment = 1;
}
#get Net method_link_species_set_id.
my $out_mlss = $mlssa->fetch_by_dbID($self->param('output_mlss_id'));
$self->throw("No MethodLinkSpeciesSet for method_link_species_set_id".$self->param('output_mlss_id'))
if not $out_mlss;
######## needed for output####################
$self->param('output_MethodLinkSpeciesSet', $out_mlss);
#Check if need to delete alignments. This shouldn't be needed if using transactions
if ($self->input_job->retry_count > 0) {
$self->warning("Deleting alignments as it is a rerun");
$self->delete_alignments($out_mlss,
$ref_dnafrag,
$self->param('start'),
$self->param('end'));
}
#Get Chain GenomicAlignBlocks associated with reference dnafrag and start and end
my $gabs = $gaba->fetch_all_by_MethodLinkSpeciesSet_DnaFrag($mlss,
$ref_dnafrag,
$self->param('start'),
$self->param('end'));
###################################################################
# get the target slices and bin the GenomicAlignBlocks by group id
###################################################################
my (%features_by_group, %query_lengths, %target_lengths);
my $self_gabs;
while (my $gab = shift @{$gabs}) {
#Find reference genomic_align by seeing which has the visible field set (reference has visible=1 for chains)
my $ga1 = $gab->genomic_align_array->[0];
my $ga2 = $gab->genomic_align_array->[1];
my $ref_ga;
my $non_ref_ga;
#visible is true on the reference genomic_align
if ($ga1->visible) {
$ref_ga = $ga1;
$non_ref_ga = $ga2;
} else {
$ref_ga = $ga2;
$non_ref_ga = $ga1;
}
#Check the ref_ga dnafrag_id is valid for this job. Since the gabs were fetched using fetch_all_by_MethodLinkSpeciesSet_DnaFrag, the $gab->reference_genomic_align->dnafrag_id needs to be the same as the visible genomic_align_id else this isn't the reference genomic_align and we need to skip it)
next if ($ref_ga->dnafrag_id != $gab->reference_genomic_align->dnafrag_id);
#Set the gab reference ga
$gab->reference_genomic_align($ref_ga);
if (not exists($self->param('query_DnaFrag_hash')->{$ref_ga->dnafrag->name})) {
######### needed for output ######################################
$self->param('query_DnaFrag_hash')->{$ref_ga->dnafrag->name} = $ref_ga->dnafrag;
}
if (not exists($self->param('target_DnaFrag_hash')->{$non_ref_ga->dnafrag->name})) {
######### needed for output #######################################
$self->param('target_DnaFrag_hash')->{$non_ref_ga->dnafrag->name} = $non_ref_ga->dnafrag;
}
my $group_id = $gab->group_id();
push @{$features_by_group{$group_id}}, $gab;
}
foreach my $group_id (keys %features_by_group) {
$features_by_group{$group_id} = [ sort {$a->reference_genomic_align->dnafrag_start <=> $b->reference_genomic_align->dnafrag_start} @{$features_by_group{$group_id}} ];
}
foreach my $nm (keys %{$self->param('query_DnaFrag_hash')}) {
$query_lengths{$nm} = $self->param('query_DnaFrag_hash')->{$nm}->length;
}
foreach my $nm (keys %{$self->param('target_DnaFrag_hash')}) {
$target_lengths{$nm} = $self->param('target_DnaFrag_hash')->{$nm}->length;
}
#Must store chains in array indexed by [group_id-1] so that the AlignmentNets code uses the correct genomic_align_block chain
my $features_array;
foreach my $group_id (keys %features_by_group) {
$features_array->[$group_id-1] = $features_by_group{$group_id};
}
if (!defined $features_array) {
print "No features found for " . $ref_dnafrag->name . "\n";
$self->param('chains', []);
return;
}
$self->param('query_length_hash', \%query_lengths);
$self->param('target_length_hash', \%target_lengths);
$self->param('chains', $features_array);
$self->param('chains_sorted', 1);
##################################
# read the net file
##################################
my $fh;
open $fh, '<', $self->param('net_file') or throw("Could not open net file '" . $self-param('net_file') . "' for reading\n");
my $res_chains = $self->parse_Net_file($fh);
close($fh);
$self->cleanse_output($res_chains);
$self->param('chains', $res_chains);
}
1;
| 36.048458 | 301 | 0.649028 |
ed37d02ecb0dbf2f7cb366e7f35911eed0f1d1a6 | 7,028 | pm | Perl | modules/EnsEMBL/Draw/GlyphSet/Videogram_legend.pm | at7/backup-ensembl-webcode | 4c8c30f2ba9e0eebc3dd07e068fb6e02c388d086 | [
"Apache-2.0",
"MIT"
]
| null | null | null | modules/EnsEMBL/Draw/GlyphSet/Videogram_legend.pm | at7/backup-ensembl-webcode | 4c8c30f2ba9e0eebc3dd07e068fb6e02c388d086 | [
"Apache-2.0",
"MIT"
]
| null | null | null | modules/EnsEMBL/Draw/GlyphSet/Videogram_legend.pm | at7/backup-ensembl-webcode | 4c8c30f2ba9e0eebc3dd07e068fb6e02c388d086 | [
"Apache-2.0",
"MIT"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Draw::GlyphSet::Videogram_legend;
### Module for drawing "highlights" (aka pointers) on
### vertical ideogram images, including user data
###
### (Note that despite its name, this module is not currently
### used to draw a legend for vertical ideogram tracks)
use strict;
use base qw(EnsEMBL::Draw::GlyphSet);
sub _init {
my ($self) = @_;
my $Config = $self->{'config'};
my $Container = $self->{'container'};
my $fn = "highlight_$Container";
$self->push( $self->fn( ) ) if $self->can($fn);
}
sub highlight_box {
my $self = shift;
my $details = shift;
return $self->Rect({
'x' => $details->{'start'},
'y' => $details->{'h_offset'},
'width' => $details->{'end'}-$details->{'start'},
'height' => $details->{'wid'},
'colour' => $details->{'col'},
'absolutey' => 1,
'href' => $details->{'href'},
'zmenu' => $details->{'zmenu'}
});
}
sub highlight_filledwidebox {
my $self = shift;
my $details = shift;
return $self->Rect({
'x' => $details->{'start'},
'y' => $details->{'h_offset'}-$details->{'padding'},
'width' => $details->{'end'}-$details->{'start'},
'height' => $details->{'wid'}+$details->{'padding'}*2,
'colour' => $details->{'col'},
'absolutey' => 1,
'href' => $details->{'href'},
'zmenu' => $details->{'zmenu'}
});
}
sub highlight_widebox {
my $self = shift;
my $details = shift;
return $self->Rect({
'x' => $details->{'start'},
'y' => $details->{'h_offset'}-$details->{'padding'},
'width' => $details->{'end'}-$details->{'start'},
'height' => $details->{'wid'}+$details->{'padding'}*2,
'bordercolour' => $details->{'col'},
'absolutey' => 1,
'href' => $details->{'href'},
'zmenu' => $details->{'zmenu'}
});
}
sub highlight_outbox {
my $self = shift;
my $details = shift;
return $self->Rect({
'x' => $details->{'start'} - $details->{'padding2'} *1.5,
'y' => $details->{'h_offset'}-$details->{'padding'} *1.5,
'width' => $details->{'end'}-$details->{'start'} + $details->{'padding2'} * 3,
'height' => $details->{'wid'}+$details->{'padding'}*3,
'bordercolour' => $details->{'col'},
'absolutey' => 1,
'href' => $details->{'href'},
'zmenu' => $details->{'zmenu'}
});
}
sub highlight_labelline {
my $self = shift;
my $details = shift;
my $composite = $self->Composite();
$composite->push(
$self->Line({
'x' => $details->{'mid'},
'y' => $details->{'h_offset'}-$details->{'padding'},,
'width' => 0,
'height' => $details->{'wid'}+$details->{'padding'}*2,
'colour' => $details->{'col'},
'absolutey' => 1,
})
);
return $composite;
}
sub highlight_wideline {
my $self = shift;
my $details = shift;
return $self->Line({
'x' => $details->{'mid'},
'y' => $details->{'h_offset'}-$details->{'padding'},,
'width' => 0,
'height' => $details->{'wid'}+$details->{'padding'}*2,
'colour' => $details->{'col'},
'absolutey' => 1,
});
}
sub highlight_text {
my $self = shift;
my $details = shift;
my $composite = $self->Composite();
$composite->push($self->Rect({
'x' => $details->{'start'},
'y' => $details->{'h_offset'}-$details->{'padding'},
'width' => $details->{'end'}-$details->{'start'},
'height' => $details->{'wid'}+$details->{'padding'}*2,
'bordercolour' => $details->{'col'},
'absolutey' => 1,
})
);
# text label for feature
$composite->push ($self->Text({
'x' => $details->{'mid'}-$details->{'padding2'},
'y' => $details->{'wid'}+$details->{'padding'}*3,
'width' => 0,
'height' => $details->{'wid'},
'font' => 'Tiny',
'colour' => $details->{'col'},
'text' => $details->{'id'},
'absolutey' => 1,
}));
# set up clickable area for complete graphic
return $composite;
}
# Direction of arrows is rotated because the image is vertical
sub highlight_lharrow { return shift->highlight_arrow('down', $_[0]{'h_offset'}, @_); }
sub highlight_rharrow { return shift->highlight_arrow('up', $_[0]{'h_offset'} + $_[0]->{'wid'}, @_); }
sub highlight_bowtie { my $self = shift; return ($self->highlight_lharrow(@_), $self->highlight_rharrow(@_)); }
sub highlight_arrow {
my ($self, $direction, $mid_y, $details) = @_;
return $self->Triangle({
width => $details->{'padding2'} * 2,
height => $details->{'padding'},
direction => $direction,
mid_point => [ $details->{'mid'}, $mid_y ],
colour => $details->{'col'},
href => $details->{'href'},
zmenu => $details->{'zmenu'},
id => $details->{'html_id'},
absolutey => 1,
});
}
sub highlight_rhbox {
my ($self, $details) = @_;
$details->{'strand'} = "+";
return $self->highlight_strandedbox($details);
}
sub highlight_lhbox {
my ($self, $details) = @_;
$details->{'strand'} = "-";
return $self->highlight_strandedbox($details);
}
sub highlight_strandedbox {
my ($self, $details) = @_;
my $strand = $details->{'strand'} || "";
my $draw_length = $details->{'end'}-$details->{'start'};
my $bump_start = int($details->{'start'} * $self->{'pix_per_bp'});
my $bump_end = $bump_start + int($draw_length * $self->{'pix_per_bp'}) +1;
my $ori = ($strand eq "-")?-1:1;
my $key = $strand eq "-" ? "_bump_reverse" : "_bump_forward";
my $row = $self->bump_row( $bump_start, $bump_end, 0, $key );
my $pos = 7 + $ori*12 + $ori*$row*($details->{'padding'}+2);
my $dep = $self->my_config('dep');
return $dep && $row>$dep-1 ? $self->Rect({
'x' => $details->{'start'},
'y' => $pos,
'width' => $draw_length, #$details->{'end'}-$details->{'start'},
'height' => $details->{'padding'},
'colour' => $details->{'col'},
'absolutey' => 1,
'href'=>$details->{'href'},'zmenu' => $details->{'zmenu'}
}) : ();
}
1;
| 32.537037 | 112 | 0.533295 |
73d55c7b67a9c04bb780c4ffaffd5fcb2551c2f2 | 1,967 | t | Perl | code/tests/theano.t | dcrossey/automl | 104fa22885f547c9eb3beeb45cc9a9291a5dd00a | [
"Apache-2.0"
]
| 23 | 2020-04-30T01:22:46.000Z | 2021-12-17T10:42:10.000Z | code/tests/theano.t | dcrossey/automl | 104fa22885f547c9eb3beeb45cc9a9291a5dd00a | [
"Apache-2.0"
]
| 3 | 2020-04-24T17:04:23.000Z | 2021-02-08T20:26:11.000Z | code/tests/theano.t | dcrossey/automl | 104fa22885f547c9eb3beeb45cc9a9291a5dd00a | [
"Apache-2.0"
]
| 14 | 2020-04-24T17:31:46.000Z | 2021-07-11T02:53:46.000Z | \l automl.q
.automl.loadfile`:init.q
.automl.loadfile`:code/tests/utils.q
// don't run tests if theano can't be loaded
if[not 0~.automl.checkimport[5];exit 0];
// Create feature and target data
nGeneral:100
featureDataNormal:([]nGeneral?1f;asc nGeneral?1f;nGeneral?`a`b`c)
targetBinary :asc 100?0b
// language agnostic function for moving a file to a new location
.test.moveFiles:{[filePaths]
os:.z.o like "w*";
filePaths:{" "sv raze each x,/:y}[.automl.path;filePaths];
if[os;filePaths:ssr[filePaths;"/";"\\"]];
system $[os;"move ";"mv "],filePaths;
}
// file paths to allow theano model tests to be added and reverted to original
.test.filePaths:(("/code/customization/models/libSupport/theano.q";"/code/customization/models/libSupport/oldTheano.q");
("/code/customization/models/libSupport/theano.p";"/code/customization/models/libSupport/oldTheano.p");
("/code/customization/models/modelConfig/models.json";"/code/customization/models/modelConfig/oldModels.json");
("/code/tests/files/theano/theano.q";"/code/customization/models/libSupport/theano.q");
("/code/tests/files/theano/theano.p";"/code/customization/models/libSupport/theano.p");
("/code/tests/files/theano/models.json";"/code/customization/models/modelConfig/models.json"));
.test.moveFiles each .test.filePaths;
// reload the library contents to ensure the correct theano files used
.automl.loadfile`:init.q
//Create function to ensure fit runs correctly
.test.checkFit:{[params]fitReturn:(key;value)@\:.automl.fit . params;type[first fitReturn],type each last fitReturn}
-1"\nTesting appropriate inputs to fit function for with theano models loaded\n";
passingTest[.test.checkFit;(featureDataNormal;targetBinary;`normal;`class;::);1b;11 99 104h]
// Revert to the default theano setup
.test.moveFiles each reverse each .test.filePaths rotate[3;til 6];
// Revert the automl library version to use
.automl.loadfile`:init.q
| 40.142857 | 121 | 0.732588 |
73d9771a0a145847ee7941e4a78832c1c79e40cf | 4,529 | pm | Perl | lib/App/Netdisco/Util/Node.pm | DimaRU/netdisco | 596986d190e1803c6fde5aaa58c14252a7bb2b12 | [
"BSD-3-Clause"
]
| 376 | 2017-01-05T00:05:53.000Z | 2022-03-30T18:56:44.000Z | lib/App/Netdisco/Util/Node.pm | DimaRU/netdisco | 596986d190e1803c6fde5aaa58c14252a7bb2b12 | [
"BSD-3-Clause"
]
| 597 | 2017-01-05T22:51:23.000Z | 2022-03-30T08:52:17.000Z | lib/App/Netdisco/Util/Node.pm | DimaRU/netdisco | 596986d190e1803c6fde5aaa58c14252a7bb2b12 | [
"BSD-3-Clause"
]
| 84 | 2017-01-06T23:36:27.000Z | 2022-03-01T09:31:32.000Z | package App::Netdisco::Util::Node;
use Dancer qw/:syntax :script/;
use Dancer::Plugin::DBIC 'schema';
use NetAddr::MAC;
use App::Netdisco::Util::Permission qw/check_acl_no check_acl_only/;
use base 'Exporter';
our @EXPORT = ();
our @EXPORT_OK = qw/
check_mac
is_nbtstatable
store_arp
/;
our %EXPORT_TAGS = (all => \@EXPORT_OK);
=head1 NAME
App::Netdisco::Util::Node
=head1 DESCRIPTION
A set of helper subroutines to support parts of the Netdisco application.
There are no default exports, however the C<:all> tag will export all
subroutines.
=head1 EXPORT_OK
=head2 check_mac( $node, $device?, $port_macs? )
Given a MAC address, perform various sanity checks which need to be done
before writing an ARP/Neighbor entry to the database storage.
Returns false, and might log a debug level message, if the checks fail.
Returns a true value (the MAC address in IEEE format) if these checks pass:
=over 4
=item *
MAC address is well-formed (according to common formats)
=item *
MAC address is not all-zero, broadcast, CLIP, VRRP or HSRP
=back
Optionally pass a Device instance or IP to use in logging.
Optionally pass a cached set of Device port MAC addresses as the third
argument, in which case an additional check is added:
=over 4
=item *
MAC address does not belong to an interface on any known Device
=back
=cut
sub check_mac {
my ($node, $device, $port_macs) = @_;
return 0 if !$node;
my $mac = NetAddr::MAC->new(mac => ($node || ''));
my $devip = ($device ? (ref $device ? $device->ip : $device) : '');
$port_macs ||= {};
# incomplete MAC addresses (BayRS frame relay DLCI, etc)
if (!defined $mac or $mac->errstr) {
debug sprintf ' [%s] check_mac - mac [%s] malformed - skipping',
$devip, $node;
return 0;
}
else {
# lower case, hex, colon delimited, 8-bit groups
$node = lc $mac->as_ieee;
}
# broadcast MAC addresses
return 0 if $mac->is_broadcast;
# all-zero MAC addresses
return 0 if $node eq '00:00:00:00:00:00';
# CLIP
return 0 if $node eq '00:00:00:00:00:01';
# multicast
if ($mac->is_multicast and not $mac->is_msnlb) {
debug sprintf ' [%s] check_mac - multicast mac [%s] - skipping',
$devip, $node;
return 0;
}
# VRRP
if ($mac->is_vrrp) {
debug sprintf ' [%s] check_mac - VRRP mac [%s] - skipping',
$devip, $node;
return 0;
}
# HSRP
if ($mac->is_hsrp or $mac->is_hsrp2) {
debug sprintf ' [%s] check_mac - HSRP mac [%s] - skipping',
$devip, $node;
return 0;
}
# device's own MACs
if ($port_macs and exists $port_macs->{$node}) {
debug sprintf ' [%s] check_mac - mac [%s] is device port - skipping',
$devip, $node;
return 0;
}
return $node;
}
=head2 is_nbtstatable( $ip )
Given an IP address, returns C<true> if Netdisco on this host is permitted by
the local configuration to nbtstat the node.
The configuration items C<nbtstat_no> and C<nbtstat_only> are checked
against the given IP.
Returns false if the host is not permitted to nbtstat the target node.
=cut
sub is_nbtstatable {
my $ip = shift;
return if check_acl_no($ip, 'nbtstat_no');
return unless check_acl_only($ip, 'nbtstat_only');
return 1;
}
=head2 store_arp( \%host, $now? )
Stores a new entry to the C<node_ip> table with the given MAC, IP (v4 or v6)
and DNS host name. Host details are provided in a Hash ref:
{
ip => '192.0.2.1',
node => '00:11:22:33:44:55',
dns => 'myhost.example.com',
}
The C<dns> entry is optional. The update will mark old entries for this IP as
no longer C<active>.
Optionally a literal string can be passed in the second argument for the
C<time_last> timestamp, otherwise the current timestamp (C<now()>) is used.
=cut
sub store_arp {
my ($hash_ref, $now) = @_;
$now ||= 'now()';
my $ip = $hash_ref->{'ip'};
my $mac = NetAddr::MAC->new(mac => ($hash_ref->{'node'} || ''));
my $name = $hash_ref->{'dns'};
return if !defined $mac or $mac->errstr;
schema('netdisco')->txn_do(sub {
my $current = schema('netdisco')->resultset('NodeIp')
->search(
{ ip => $ip, -bool => 'active'},
{ columns => [qw/mac ip/] })->update({active => \'false'});
schema('netdisco')->resultset('NodeIp')
->update_or_create(
{
mac => $mac->as_ieee,
ip => $ip,
dns => $name,
active => \'true',
time_last => \$now,
},
{
key => 'primary',
for => 'update',
});
});
}
1;
| 22.758794 | 77 | 0.633252 |
73d5168071406906311ecd6658a593203eb90ab3 | 2,621 | t | Perl | t/SublimePackages.t | alexander95015/zeroclickinfo-spice | f690084c237d4482e6d31256e0f145b55e0dec15 | [
"Apache-2.0"
]
| 9 | 2018-04-02T10:10:06.000Z | 2021-07-07T04:51:46.000Z | t/SublimePackages.t | alexander95015/zeroclickinfo-spice | f690084c237d4482e6d31256e0f145b55e0dec15 | [
"Apache-2.0"
]
| null | null | null | t/SublimePackages.t | alexander95015/zeroclickinfo-spice | f690084c237d4482e6d31256e0f145b55e0dec15 | [
"Apache-2.0"
]
| 1 | 2018-10-02T06:37:27.000Z | 2018-10-02T06:37:27.000Z | #!/usr/bin/env perl
use strict;
use warnings;
use Test::More;
use DDG::Test::Spice;
ddg_spice_test(
[qw( DDG::Spice::SublimePackages )],
# Basic tests
'sublimetext package code' => test_spice(
'/js/spice/sublime_packages/code',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublime text php' => test_spice(
'/js/spice/sublime_packages/php',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublime text package for json' => test_spice(
'/js/spice/sublime_packages/json',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
# Operating system filtering tests
'sublimetext package html linux' => test_spice(
'/js/spice/sublime_packages/html%20%3Alinux',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublimetext package javascript mac os x' => test_spice(
'/js/spice/sublime_packages/javascript%20%3Aosx',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublimetext package auto osx' => test_spice(
'/js/spice/sublime_packages/auto%20%3Aosx',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublime text package javascript win' => test_spice(
'/js/spice/sublime_packages/javascript%20%3Awin',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
# Version filtering tests
'sublimetext package text version 2' => test_spice(
'/js/spice/sublime_packages/text%20%3Ast2',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublimetext package text version 3' => test_spice(
'/js/spice/sublime_packages/text%20%3Ast3',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublime text 2 yml' => test_spice(
'/js/spice/sublime_packages/%3Ast2%20yml',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'sublime text v3 yml' => test_spice(
'/js/spice/sublime_packages/%3Ast3%20yml',
call_type => 'include',
caller => 'DDG::Spice::SublimePackages'
),
'about sublime text' => undef,
'sublimetext download' => undef,
# Skip word triggers
'sublime text download' => undef,
'sublime text purchase' => undef,
'sublime text buy' => undef,
'sublime text about' => undef
);
done_testing;
| 27.882979 | 60 | 0.594048 |
73e0439c483df18a39bdccf9e8017330dd44eace | 1,039 | pm | Perl | lib/Catmandu/Fix/clean_preselects.pm | datango/LibreCat | d59a09af0ccaeef4c5fab82c2171510990c8d004 | [
"Artistic-1.0"
]
| 42 | 2015-09-15T12:02:11.000Z | 2022-02-14T07:47:33.000Z | lib/Catmandu/Fix/clean_preselects.pm | datango/LibreCat | d59a09af0ccaeef4c5fab82c2171510990c8d004 | [
"Artistic-1.0"
]
| 887 | 2015-10-08T11:38:41.000Z | 2021-12-09T15:51:26.000Z | lib/Catmandu/Fix/clean_preselects.pm | datango/LibreCat | d59a09af0ccaeef4c5fab82c2171510990c8d004 | [
"Artistic-1.0"
]
| 20 | 2016-05-18T09:15:59.000Z | 2021-10-30T13:31:27.000Z | package Catmandu::Fix::clean_preselects;
=pod
=head1 NAME
Catmandu::Fix::clean_preselects - cleans empty abstract and related_material.link
=cut
use Catmandu::Sane;
use Moo;
sub fix {
my ($self, $pub) = @_;
if ($pub->{abstract}) {
my @new_abstract;
for my $ab (@{$pub->{abstract}}) {
$ab->{lang} = "eng" unless $ab->{lang};
push @new_abstract, $ab if ($ab->{lang} && $ab->{text});
}
if (@new_abstract) {
$pub->{abstract} = \@new_abstract;
}
else {
delete $pub->{abstract};
}
}
if ($pub->{related_material} and $pub->{related_material}->{link}) {
my @new_link;
for my $rm (@{$pub->{related_material}->{link}}) {
push @new_link, $rm if ($rm->{relation} && $rm->{url});
}
if (@new_link) {
$pub->{related_material}->{link} = \@new_link;
}
else {
delete $pub->{related_material}->{link};
}
}
return $pub;
}
1;
| 19.980769 | 81 | 0.493744 |
ed1507f8c0e472dc5dc94d797de9e32cd2de0c9f | 175 | t | Perl | v5.22/t/extra/unop_aux.t | perl11/p5-coretests | 65f340f49aea59bd666f1bf5c077a66004b51731 | [
"Artistic-2.0"
]
| 1 | 2015-12-07T12:45:44.000Z | 2015-12-07T12:45:44.000Z | v5.22/t/extra/unop_aux.t | perl11/p5-coretests | 65f340f49aea59bd666f1bf5c077a66004b51731 | [
"Artistic-2.0"
]
| null | null | null | v5.22/t/extra/unop_aux.t | perl11/p5-coretests | 65f340f49aea59bd666f1bf5c077a66004b51731 | [
"Artistic-2.0"
]
| null | null | null | #!perl
print "1..2\n";
# one level hash
my %a; $a{b} = qq{ok 1\n};
print $a{b};
# multi levels hash
my %foo; $foo{b}{c}{d}{e}{f} = qq{ok 2\n};
print $foo{b}{c}{d}{e}{f};
| 14.583333 | 43 | 0.508571 |
ed40c9b1df3226a27ed85c12b61d7d9955f219cb | 6,187 | pm | Perl | tools/test_modules/m19600.pm | vbalyasnyy/hashcat | 84d3981e7b6bc4efe3c1bcbaa6f4198f4c523204 | [
"MIT"
]
| 3 | 2019-02-08T11:12:44.000Z | 2021-05-09T13:43:50.000Z | tools/test_modules/m19600.pm | e-ago/hashcat | dbbdb7e5ac4f7db2dd503c1820b91fbbcfc37c92 | [
"MIT"
]
| null | null | null | tools/test_modules/m19600.pm | e-ago/hashcat | dbbdb7e5ac4f7db2dd503c1820b91fbbcfc37c92 | [
"MIT"
]
| 1 | 2021-05-01T09:38:27.000Z | 2021-05-01T09:38:27.000Z | #!/usr/bin/env perl
##
## Author......: See docs/credits.txt
## License.....: MIT
##
use strict;
use warnings;
use Digest::SHA qw (hmac_sha1);
use Crypt::Mode::CBC;
use Crypt::PBKDF2;
use Encode;
use POSIX qw (strftime);
sub byte2hex
{
my $input = shift;
return unpack ("H*", $input);
}
sub hex2byte
{
my $input = shift;
return pack ("H*", $input);
}
sub pad
{
my $n = shift;
my $size = shift;
return (~$n + 1) & ($size - 1);
}
sub module_constraints { [[0, 256], [16, 16], [-1, -1], [-1, -1], [-1, -1]] }
sub module_generate_hash
{
my $word = shift;
my $salt = shift;
my $user = shift // "user";
my $realm = shift // "realm";
my $checksum = shift;
my $edata2 = shift;
my $mysalt = uc $realm;
$mysalt = $mysalt . $user;
# first we generate the 'seed'
my $iter = 4096;
my $pbkdf2 = Crypt::PBKDF2->new
(
hash_class => 'HMACSHA1',
iterations => $iter,
output_len => 16
);
my $b_seed = $pbkdf2->PBKDF2 ($mysalt, $word);
# we can precompute this
my $b_kerberos_nfolded = hex2byte('6b65726265726f737b9b5b2b93132b93');
my $b_iv = hex2byte('0' x 32);
# 'key_bytes' will be the AES key used to generate 'ki' (for final hmac-sha1)
# and 'ke' (AES key to decrypt/encrypt the ticket)
my $cbc = Crypt::Mode::CBC->new ('AES', 0);
my $b_key_bytes = $cbc->encrypt ($b_kerberos_nfolded, $b_seed, $b_iv);
# precomputed stuff
my $b_nfolded1 = hex2byte('62dc6e371a63a80958ac562b15404ac5');
my $b_nfolded2 = hex2byte('b5b0582c14b6500aad56ab55aa80556a');
my $b_ki = $cbc->encrypt ($b_nfolded1, $b_key_bytes, $b_iv);
my $b_ke = $cbc->encrypt ($b_nfolded2, $b_key_bytes, $b_iv);
my $cleartext_ticket = '6381b03081ada00703050050a00000a11b3019a003020117a1'.
'12041058e0d77776e8b8e03991f2966939222aa2171b154d594b5242544553542e434f4e5'.
'44f534f2e434f4da3133011a003020102a10a30081b067472616e6365a40b3009a0030201'.
'01a1020400a511180f32303136303231353134343735305aa611180f32303136303231353'.
'134343735305aa711180f32303136303231363030343735305aa811180f32303136303232'.
'323134343735305a';
if (defined $edata2)
{
my $len_last_block = length($edata2) % 32;
my $tmp = $len_last_block + 32;
my $b_truncated_enc_ticket = hex2byte (substr $edata2, 0, -$tmp);
my $b_last_block = hex2byte (substr $edata2, -$len_last_block);
my $b_n_1_block = hex2byte (substr(substr($edata2, -$tmp), 0, 32));
my $b_truncated_ticket_decrypted = $cbc->decrypt ($b_truncated_enc_ticket, $b_ke, $b_iv);
my $truncated_ticket_decrypted = byte2hex($b_truncated_ticket_decrypted);
my $check_correct = ((substr ($truncated_ticket_decrypted, 32, 4) eq "6381" && substr ($truncated_ticket_decrypted, 38, 2) eq "30") ||
(substr ($truncated_ticket_decrypted, 32, 4) eq "6382")) &&
((substr ($truncated_ticket_decrypted, 48, 6) eq "030500") ||
(substr ($truncated_ticket_decrypted, 48, 8) eq "050307A0"));
if ($check_correct == 1)
{
my $b_n_2 = substr $b_truncated_enc_ticket, -16;
my $b_n_1_decrypted = $cbc->decrypt ($b_n_1_block, $b_ke, $b_iv);
my $b_last_plain = substr $b_n_1_decrypted, 0, $len_last_block/2;
$b_last_plain = $b_last_plain ^ $b_last_block;
my $omitted = substr $b_n_1_decrypted, -(16 - $len_last_block/2);
my $b_n_1 = $b_last_block . $omitted;
$b_n_1 = $cbc->decrypt ($b_n_1, $b_ke, $b_iv);
$b_n_1 = $b_n_1 ^ $b_n_2;
my $b_cleartext_ticket = $b_truncated_ticket_decrypted . $b_n_1 . $b_last_plain;
$cleartext_ticket = byte2hex($b_cleartext_ticket);
}
else # validation failed
{
# fake/wrong ticket (otherwise if we just decrypt/encrypt we end
#up with false positives all the time)
$cleartext_ticket = "0" x (length ($cleartext_ticket) + 32);
}
}
if (defined $checksum)
{
$checksum = pack ("H*", $checksum);
}
else
{
if (!defined $edata2)
{
my $nonce = unpack ("H*", random_bytes (16));
$cleartext_ticket = $nonce . $cleartext_ticket;
}
# we have what is required to compute checksum
$checksum = hmac_sha1 (hex2byte($cleartext_ticket), $b_ki);
$checksum = substr $checksum, 0, 12;
}
my $len_cleartext_last_block = length($cleartext_ticket) % 32;
my $cleartext_last_block = substr $cleartext_ticket, -$len_cleartext_last_block;
my $padding = pad(length($cleartext_ticket), 32);
my $b_cleartext_last_block_padded = hex2byte($cleartext_last_block . '0' x $padding);
# we will encrypt until n-1 block (included)
my $truncated_cleartext_ticket = substr $cleartext_ticket, 0, -$len_cleartext_last_block;
my $b_truncated_enc_ticket = $cbc->encrypt (hex2byte($truncated_cleartext_ticket), $b_ke, $b_iv);
my $b_enc_ticket_n_1_block= substr $b_truncated_enc_ticket, -16;
my $b_enc_last_block = substr $b_enc_ticket_n_1_block, 0, $len_cleartext_last_block/2;
# we now craft the new n-1 block
my $tmp = $b_enc_ticket_n_1_block ^ $b_cleartext_last_block_padded;
$b_enc_ticket_n_1_block = $cbc->encrypt ($tmp, $b_ke, $b_iv);
$tmp = substr $b_truncated_enc_ticket, 0, -16;
$edata2 = $tmp . $b_enc_ticket_n_1_block . $b_enc_last_block;
my $tmp_hash = sprintf ('$krb5tgs$17$%s$%s$%s$%s', $user, $realm, unpack ("H*", $checksum), unpack ("H*", $edata2));
return $tmp_hash;
}
sub module_verify_hash
{
my $line = shift;
my ($hash, $word) = split (':', $line);
return unless defined $hash;
return unless defined $word;
my @data = split ('\$', $hash);
return unless scalar @data == 7;
shift @data;
my $signature = shift @data;
my $algorithm = shift @data;
my $user = shift @data;
my $realm = shift @data;
my $checksum = shift @data;
my $edata2 = shift @data;
return unless ($signature eq "krb5tgs");
return unless ($algorithm eq "17");
return unless (length ($checksum) == 24);
return unless (length ($edata2) >= 64);
my $word_packed = pack_if_HEX_notation ($word);
my $new_hash = module_generate_hash ($word_packed, undef, $user, $realm, $checksum, $edata2);
return ($new_hash, $word);
}
1; | 28.251142 | 139 | 0.65476 |
ed3917a642e491ab0aba139c0a507ea9c06ce05e | 4,555 | pm | Perl | modules/Bio/EnsEMBL/Production/Pipeline/GVF/JobForEachSeqRegion.pm | nicklangridge/ensembl-production | a00614922ba51b3065fd7b57463ab626f4747b1e | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Production/Pipeline/GVF/JobForEachSeqRegion.pm | nicklangridge/ensembl-production | a00614922ba51b3065fd7b57463ab626f4747b1e | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Production/Pipeline/GVF/JobForEachSeqRegion.pm | nicklangridge/ensembl-production | a00614922ba51b3065fd7b57463ab626f4747b1e | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2016] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 NAME
Bio::EnsEMBL::Production::Pipeline::GVF::JobForEachSeqRegion;
=head1 DESCRIPTION
=head1 MAINTAINER
ckong@ebi.ac.uk
=cut
package Bio::EnsEMBL::Production::Pipeline::GVF::JobForEachSeqRegion;
use strict;
use Data::Dumper;
use Hash::Util qw( lock_hash );
#use base ('EGVar::FTP::RunnableDB::GVF::Base');
use base qw/Bio::EnsEMBL::Production::Pipeline::Common::Base/;
sub run {
my $self = shift @_;
my $species = $self->param('species');
my $division = $self->param('division'),
my $gvf_type = $self->param('gvf_type');
confess('Type error!') unless (ref $gvf_type eq 'ARRAY');
my $slice_adaptor = Bio::EnsEMBL::Registry->get_adaptor( $species, 'Core', 'Slice' );
confess('Type error!') unless ($slice_adaptor->isa('Bio::EnsEMBL::DBSQL::SliceAdaptor'));
# Fetch all toplevel sequences.
# This may consume a lot of memory, so if there are memory issues, they
# are likely to be here.
print "\nFetching all toplevel slices.\n" if($self->debug);
my $slices = $slice_adaptor->fetch_all('toplevel', undef, 1);
print "\nDone fetching all toplevel slices. Got ".@$slices." slices.\n" if($self->debug);
use Bio::EnsEMBL::Production::Pipeline::GVF::BatchSeqRegions;
my $batch_seq_regions = Bio::EnsEMBL::Production::Pipeline::GVF::BatchSeqRegions->new();
my @batch_to_dump;
my $callback = sub {
my $batch = shift;
# We can't create the jobs here right away, because they rely on the
# gvf_species and gvf_merge tables. In order to create these tables
# we need to know the total number of files and a species id.
#
# So the batches are collected here first and dump jobs are created
# at the end.
push @batch_to_dump, $batch;
};
print "\nCreating batches.\n" if($self->debug);
$batch_seq_regions->batch_slices({
slices => $slices,
callback => $callback
});
print "\nDone creating batches.\n" if($self->debug);
#
# Write to gvf_species table
#
my $job_parameters = {
species => $species,
total_files => scalar @batch_to_dump,
division => $division,
};
print "\nFlowing to gvf_species table:\n" . Dumper($job_parameters) if($self->debug);
$self->dataflow_output_id($job_parameters, 3);
my $species_entry = $self->fetch_species($species);
my $species_id = $species_entry->{species_id};
#
# Write to gvf_merge table
#
foreach my $current_gvf_type (@$gvf_type) {
my $job_parameters = {
species_id => $species_id,
type => $current_gvf_type,
created => undef,
};
if ($self->debug) {
print "\nWriting gvf_merge entry for '$current_gvf_type':\n";
print Dumper($job_parameters);
}
$self->dataflow_output_id($job_parameters, 4);
}
print "\nDone writing gvf_merge entries.\n" if($self->debug);
#
# Create the dump jobs.
#
# Important:
# The jobs we create here rely on data being in the
# gvf_merge merge table. Therefore the jobs must be
# created only after the loop above has run.
foreach my $current_batch_to_dump (@batch_to_dump) {
foreach my $current_gvf_type (@$gvf_type) {
#
# Create job to dump this batch of seq regions in the specified
# gvf_type.
#
$self->dataflow_output_id( {
batch => $current_batch_to_dump,
species => $species,
gvf_type => $current_gvf_type,
}, 2);
}
}
}
#### TODO: Can move to GVF/Base.pm
=head1 fetch_species
Returns a has for the species like this:
$VAR1 = {
'total_files' => '13',
'species' => 'plasmodium_falciparum',
'species_id' => '1'
};
=cut
sub fetch_species {
my $self = shift;
my $species_name = shift;
my $adaptor = $self->db->get_NakedTableAdaptor();
$adaptor->table_name( 'gvf_species' );
my $hash = $adaptor->fetch_by_species( $species_name );
lock_hash(%$hash);
return $hash;
}
1;
| 28.647799 | 100 | 0.670472 |
ed1e5a659aa20abc6a1f5b0a71c55d2b959b1929 | 10,302 | pl | Perl | webapp/perl/local/lib/perl5/auto/share/dist/DateTime-Locale/uk-UA.pl | AK-10/AK-10-isucon8-preliminary-revenge | f390710721b2f2e3d9f60120394ec37c9c96b975 | [
"MIT"
]
| 2 | 2019-04-15T04:28:23.000Z | 2019-04-16T12:45:51.000Z | webapp/perl/local/lib/perl5/auto/share/dist/DateTime-Locale/uk-UA.pl | AK-10/AK-10-isucon8-preliminary-revenge | f390710721b2f2e3d9f60120394ec37c9c96b975 | [
"MIT"
]
| 16 | 2019-08-28T23:45:01.000Z | 2019-12-20T02:12:13.000Z | webapp/perl/local/lib/perl5/auto/share/dist/DateTime-Locale/uk-UA.pl | AK-10/AK-10-isucon8-preliminary-revenge | f390710721b2f2e3d9f60120394ec37c9c96b975 | [
"MIT"
]
| 1 | 2019-04-14T01:11:20.000Z | 2019-04-14T01:11:20.000Z | {
am_pm_abbreviated => [
"\N{U+0434}\N{U+043f}",
"\N{U+043f}\N{U+043f}",
],
available_formats => {
Bh => "h B",
Bhm => "h:mm B",
Bhms => "h:mm:ss B",
E => "ccc",
EBhm => "E h:mm B",
EBhms => "E h:mm:ss B",
EHm => "E HH:mm",
EHms => "E HH:mm:ss",
Ed => "E, d",
Ehm => "E h:mm a",
Ehms => "E h:mm:ss a",
Gy => "y G",
GyMMM => "LLL y G",
GyMMMEd => "E, d MMM y G",
GyMMMd => "d MMM y G",
H => "HH",
Hm => "HH:mm",
Hms => "HH:mm:ss",
Hmsv => "HH:mm:ss v",
Hmv => "HH:mm v",
M => "LL",
MEd => "E, dd.MM",
MMM => "LLL",
MMMEd => "E, d MMM",
MMMMEd => "E, d MMMM",
"MMMMW-count-few" => "W-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. MMMM",
"MMMMW-count-many" => "W-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. MMMM",
"MMMMW-count-one" => "W-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. MMMM",
"MMMMW-count-other" => "W-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. MMMM",
MMMMd => "d MMMM",
MMMd => "d MMM",
Md => "dd.MM",
d => "d",
h => "h a",
hm => "h:mm a",
hms => "h:mm:ss a",
hmsv => "h:mm:ss a v",
hmv => "h:mm a v",
ms => "mm:ss",
y => "y",
yM => "MM.y",
yMEd => "E, dd.MM.y",
yMMM => "LLL y",
yMMMEd => "E, d MMM y",
yMMMM => "LLLL y",
yMMMd => "d MMM y",
yMd => "dd.MM.y",
yQQQ => "QQQ y",
yQQQQ => "QQQQ y '\N{U+0440}'.",
"yw-count-few" => "w-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. Y '\N{U+0440}'.",
"yw-count-many" => "w-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. Y '\N{U+0440}'.",
"yw-count-one" => "w-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. Y '\N{U+0440}'.",
"yw-count-other" => "w-'\N{U+0439}' '\N{U+0442}\N{U+0438}\N{U+0436}'. Y '\N{U+0440}'.",
},
code => "uk-UA",
date_format_full => "EEEE, d MMMM y '\N{U+0440}'.",
date_format_long => "d MMMM y '\N{U+0440}'.",
date_format_medium => "d MMM y '\N{U+0440}'.",
date_format_short => "dd.MM.yy",
datetime_format_full => "{1} '\N{U+043e}' {0}",
datetime_format_long => "{1} '\N{U+043e}' {0}",
datetime_format_medium => "{1}, {0}",
datetime_format_short => "{1}, {0}",
day_format_abbreviated => [
"\N{U+043f}\N{U+043d}",
"\N{U+0432}\N{U+0442}",
"\N{U+0441}\N{U+0440}",
"\N{U+0447}\N{U+0442}",
"\N{U+043f}\N{U+0442}",
"\N{U+0441}\N{U+0431}",
"\N{U+043d}\N{U+0434}",
],
day_format_narrow => [
"\N{U+041f}",
"\N{U+0412}",
"\N{U+0421}",
"\N{U+0427}",
"\N{U+041f}",
"\N{U+0421}",
"\N{U+041d}",
],
day_format_wide => [
"\N{U+043f}\N{U+043e}\N{U+043d}\N{U+0435}\N{U+0434}\N{U+0456}\N{U+043b}\N{U+043e}\N{U+043a}",
"\N{U+0432}\N{U+0456}\N{U+0432}\N{U+0442}\N{U+043e}\N{U+0440}\N{U+043e}\N{U+043a}",
"\N{U+0441}\N{U+0435}\N{U+0440}\N{U+0435}\N{U+0434}\N{U+0430}",
"\N{U+0447}\N{U+0435}\N{U+0442}\N{U+0432}\N{U+0435}\N{U+0440}",
"\N{U+043f}\N{U+02bc}\N{U+044f}\N{U+0442}\N{U+043d}\N{U+0438}\N{U+0446}\N{U+044f}",
"\N{U+0441}\N{U+0443}\N{U+0431}\N{U+043e}\N{U+0442}\N{U+0430}",
"\N{U+043d}\N{U+0435}\N{U+0434}\N{U+0456}\N{U+043b}\N{U+044f}",
],
day_stand_alone_abbreviated => [
"\N{U+043f}\N{U+043d}",
"\N{U+0432}\N{U+0442}",
"\N{U+0441}\N{U+0440}",
"\N{U+0447}\N{U+0442}",
"\N{U+043f}\N{U+0442}",
"\N{U+0441}\N{U+0431}",
"\N{U+043d}\N{U+0434}",
],
day_stand_alone_narrow => [
"\N{U+041f}",
"\N{U+0412}",
"\N{U+0421}",
"\N{U+0427}",
"\N{U+041f}",
"\N{U+0421}",
"\N{U+041d}",
],
day_stand_alone_wide => [
"\N{U+043f}\N{U+043e}\N{U+043d}\N{U+0435}\N{U+0434}\N{U+0456}\N{U+043b}\N{U+043e}\N{U+043a}",
"\N{U+0432}\N{U+0456}\N{U+0432}\N{U+0442}\N{U+043e}\N{U+0440}\N{U+043e}\N{U+043a}",
"\N{U+0441}\N{U+0435}\N{U+0440}\N{U+0435}\N{U+0434}\N{U+0430}",
"\N{U+0447}\N{U+0435}\N{U+0442}\N{U+0432}\N{U+0435}\N{U+0440}",
"\N{U+043f}\N{U+02bc}\N{U+044f}\N{U+0442}\N{U+043d}\N{U+0438}\N{U+0446}\N{U+044f}",
"\N{U+0441}\N{U+0443}\N{U+0431}\N{U+043e}\N{U+0442}\N{U+0430}",
"\N{U+043d}\N{U+0435}\N{U+0434}\N{U+0456}\N{U+043b}\N{U+044f}",
],
era_abbreviated => [
"\N{U+0434}\N{U+043e} \N{U+043d}. \N{U+0435}.",
"\N{U+043d}. \N{U+0435}.",
],
era_narrow => [
"\N{U+0434}\N{U+043e} \N{U+043d}.\N{U+0435}.",
"\N{U+043d}.\N{U+0435}.",
],
era_wide => [
"\N{U+0434}\N{U+043e} \N{U+043d}\N{U+0430}\N{U+0448}\N{U+043e}\N{U+0457} \N{U+0435}\N{U+0440}\N{U+0438}",
"\N{U+043d}\N{U+0430}\N{U+0448}\N{U+043e}\N{U+0457} \N{U+0435}\N{U+0440}\N{U+0438}",
],
first_day_of_week => 1,
glibc_date_1_format => "%A, %-d %B %Y %X %z",
glibc_date_format => "%d.%m.%y",
glibc_datetime_format => "%a, %d-%b-%Y %X %z",
glibc_time_12_format => "%I:%M:%S %p",
glibc_time_format => "%T",
language => "Ukrainian",
month_format_abbreviated => [
"\N{U+0441}\N{U+0456}\N{U+0447}.",
"\N{U+043b}\N{U+044e}\N{U+0442}.",
"\N{U+0431}\N{U+0435}\N{U+0440}.",
"\N{U+043a}\N{U+0432}\N{U+0456}\N{U+0442}.",
"\N{U+0442}\N{U+0440}\N{U+0430}\N{U+0432}.",
"\N{U+0447}\N{U+0435}\N{U+0440}\N{U+0432}.",
"\N{U+043b}\N{U+0438}\N{U+043f}.",
"\N{U+0441}\N{U+0435}\N{U+0440}\N{U+043f}.",
"\N{U+0432}\N{U+0435}\N{U+0440}.",
"\N{U+0436}\N{U+043e}\N{U+0432}\N{U+0442}.",
"\N{U+043b}\N{U+0438}\N{U+0441}\N{U+0442}.",
"\N{U+0433}\N{U+0440}\N{U+0443}\N{U+0434}.",
],
month_format_narrow => [
"\N{U+0441}",
"\N{U+043b}",
"\N{U+0431}",
"\N{U+043a}",
"\N{U+0442}",
"\N{U+0447}",
"\N{U+043b}",
"\N{U+0441}",
"\N{U+0432}",
"\N{U+0436}",
"\N{U+043b}",
"\N{U+0433}",
],
month_format_wide => [
"\N{U+0441}\N{U+0456}\N{U+0447}\N{U+043d}\N{U+044f}",
"\N{U+043b}\N{U+044e}\N{U+0442}\N{U+043e}\N{U+0433}\N{U+043e}",
"\N{U+0431}\N{U+0435}\N{U+0440}\N{U+0435}\N{U+0437}\N{U+043d}\N{U+044f}",
"\N{U+043a}\N{U+0432}\N{U+0456}\N{U+0442}\N{U+043d}\N{U+044f}",
"\N{U+0442}\N{U+0440}\N{U+0430}\N{U+0432}\N{U+043d}\N{U+044f}",
"\N{U+0447}\N{U+0435}\N{U+0440}\N{U+0432}\N{U+043d}\N{U+044f}",
"\N{U+043b}\N{U+0438}\N{U+043f}\N{U+043d}\N{U+044f}",
"\N{U+0441}\N{U+0435}\N{U+0440}\N{U+043f}\N{U+043d}\N{U+044f}",
"\N{U+0432}\N{U+0435}\N{U+0440}\N{U+0435}\N{U+0441}\N{U+043d}\N{U+044f}",
"\N{U+0436}\N{U+043e}\N{U+0432}\N{U+0442}\N{U+043d}\N{U+044f}",
"\N{U+043b}\N{U+0438}\N{U+0441}\N{U+0442}\N{U+043e}\N{U+043f}\N{U+0430}\N{U+0434}\N{U+0430}",
"\N{U+0433}\N{U+0440}\N{U+0443}\N{U+0434}\N{U+043d}\N{U+044f}",
],
month_stand_alone_abbreviated => [
"\N{U+0441}\N{U+0456}\N{U+0447}",
"\N{U+043b}\N{U+044e}\N{U+0442}",
"\N{U+0431}\N{U+0435}\N{U+0440}",
"\N{U+043a}\N{U+0432}\N{U+0456}",
"\N{U+0442}\N{U+0440}\N{U+0430}",
"\N{U+0447}\N{U+0435}\N{U+0440}",
"\N{U+043b}\N{U+0438}\N{U+043f}",
"\N{U+0441}\N{U+0435}\N{U+0440}",
"\N{U+0432}\N{U+0435}\N{U+0440}",
"\N{U+0436}\N{U+043e}\N{U+0432}",
"\N{U+043b}\N{U+0438}\N{U+0441}",
"\N{U+0433}\N{U+0440}\N{U+0443}",
],
month_stand_alone_narrow => [
"\N{U+0421}",
"\N{U+041b}",
"\N{U+0411}",
"\N{U+041a}",
"\N{U+0422}",
"\N{U+0427}",
"\N{U+041b}",
"\N{U+0421}",
"\N{U+0412}",
"\N{U+0416}",
"\N{U+041b}",
"\N{U+0413}",
],
month_stand_alone_wide => [
"\N{U+0441}\N{U+0456}\N{U+0447}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+043b}\N{U+044e}\N{U+0442}\N{U+0438}\N{U+0439}",
"\N{U+0431}\N{U+0435}\N{U+0440}\N{U+0435}\N{U+0437}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+043a}\N{U+0432}\N{U+0456}\N{U+0442}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+0442}\N{U+0440}\N{U+0430}\N{U+0432}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+0447}\N{U+0435}\N{U+0440}\N{U+0432}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+043b}\N{U+0438}\N{U+043f}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+0441}\N{U+0435}\N{U+0440}\N{U+043f}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+0432}\N{U+0435}\N{U+0440}\N{U+0435}\N{U+0441}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+0436}\N{U+043e}\N{U+0432}\N{U+0442}\N{U+0435}\N{U+043d}\N{U+044c}",
"\N{U+043b}\N{U+0438}\N{U+0441}\N{U+0442}\N{U+043e}\N{U+043f}\N{U+0430}\N{U+0434}",
"\N{U+0433}\N{U+0440}\N{U+0443}\N{U+0434}\N{U+0435}\N{U+043d}\N{U+044c}",
],
name => "Ukrainian Ukraine",
native_language => "\N{U+0443}\N{U+043a}\N{U+0440}\N{U+0430}\N{U+0457}\N{U+043d}\N{U+0441}\N{U+044c}\N{U+043a}\N{U+0430}",
native_name => "\N{U+0443}\N{U+043a}\N{U+0440}\N{U+0430}\N{U+0457}\N{U+043d}\N{U+0441}\N{U+044c}\N{U+043a}\N{U+0430} \N{U+0423}\N{U+043a}\N{U+0440}\N{U+0430}\N{U+0457}\N{U+043d}\N{U+0430}",
native_script => undef,
native_territory => "\N{U+0423}\N{U+043a}\N{U+0440}\N{U+0430}\N{U+0457}\N{U+043d}\N{U+0430}",
native_variant => undef,
quarter_format_abbreviated => [
"1-\N{U+0439} \N{U+043a}\N{U+0432}.",
"2-\N{U+0439} \N{U+043a}\N{U+0432}.",
"3-\N{U+0439} \N{U+043a}\N{U+0432}.",
"4-\N{U+0439} \N{U+043a}\N{U+0432}.",
],
quarter_format_narrow => [
1,
2,
3,
4,
],
quarter_format_wide => [
"1-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
"2-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
"3-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
"4-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
],
quarter_stand_alone_abbreviated => [
"1-\N{U+0439} \N{U+043a}\N{U+0432}.",
"2-\N{U+0439} \N{U+043a}\N{U+0432}.",
"3-\N{U+0439} \N{U+043a}\N{U+0432}.",
"4-\N{U+0439} \N{U+043a}\N{U+0432}.",
],
quarter_stand_alone_narrow => [
1,
2,
3,
4,
],
quarter_stand_alone_wide => [
"1-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
"2-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
"3-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
"4-\N{U+0439} \N{U+043a}\N{U+0432}\N{U+0430}\N{U+0440}\N{U+0442}\N{U+0430}\N{U+043b}",
],
script => undef,
territory => "Ukraine",
time_format_full => "HH:mm:ss zzzz",
time_format_long => "HH:mm:ss z",
time_format_medium => "HH:mm:ss",
time_format_short => "HH:mm",
variant => undef,
version => 33,
}
| 37.057554 | 191 | 0.500388 |
73f720be95b68e6ee3fc32fcd8c39b7392694888 | 20,127 | pl | Perl | bin/parse_bowtie2_output_realtime_includemultifamily_pe.pl | mr-c/eclip | 833a389b773e12492d316e61db802dd353404f4f | [
"BSD-3-Clause"
]
| null | null | null | bin/parse_bowtie2_output_realtime_includemultifamily_pe.pl | mr-c/eclip | 833a389b773e12492d316e61db802dd353404f4f | [
"BSD-3-Clause"
]
| null | null | null | bin/parse_bowtie2_output_realtime_includemultifamily_pe.pl | mr-c/eclip | 833a389b773e12492d316e61db802dd353404f4f | [
"BSD-3-Clause"
]
| null | null | null | #!/usr/bin/env perl
use warnings;
use strict;
my %enst2gene;
my %convert_enst2type;
my %multimapping_hash;
my %rRNA_extra_hash;
my %rRNA_extra_hash_rev;
$rRNA_extra_hash{"RNA28S"} = "rRNA_extra";
$rRNA_extra_hash{"RNA18S"} = "rRNA_extra";
$rRNA_extra_hash{"RNA5-8S"} = "rRNA_extra";
$rRNA_extra_hash{"antisense_RNA28S"} = "antisense_rRNA_extra";
$rRNA_extra_hash{"antisense_RNA18S"} = "antisense_rRNA_extra";
$rRNA_extra_hash{"antisense_RNA5-8S"} = "antisense_rRNA_extra";
$rRNA_extra_hash_rev{"rRNA_extra"}{"RNA28S"} = 1;
$rRNA_extra_hash_rev{"rRNA_extra"}{"RNA18S"} = 1;
$rRNA_extra_hash_rev{"rRNA_extra"}{"RNA5-8S"} = 1;
$rRNA_extra_hash_rev{"antisense_rRNA_extra"}{"antisense_RNA28S"} = 1;
$rRNA_extra_hash_rev{"antisense_rRNA_extra"}{"antisense_RNA18S"} = 1;
$rRNA_extra_hash_rev{"antisense_rRNA_extra"}{"antisense_RNA5-8S"} = 1;
my $fastq_file1 = $ARGV[0];
my $fastq_file2 = $ARGV[1];
my $bowtie_db = $ARGV[2];
my $output = $ARGV[3];
my $filelist_file = $ARGV[4];
&read_in_filelists($filelist_file);
my $print_batch = 10000;
my $read_counter = 0;
my $bowtie_out = $output.".bowtieout";
open(SAMOUT,">$output");
my $multimapping_out = $output.".multimapping_deleted";
open(MULTIMAP,">$multimapping_out");
my $done_file = $output.".done";
###########################################################################################################################################################
# print("stdbuf -oL bowtie2 -q --sensitive -a -p 3 --no-mixed --reorder -x $bowtie_db -1 $fastq_file1 -2 $fastq_file2 2> $bowtie_out")
###########################################################################################################################################################
# changed in 0.0.2 parameter -p 1 instead of -p 3
my $pid = open(BOWTIE, "-|", "stdbuf -oL bowtie2 -q --sensitive -a -p 1 --no-mixed --reorder -x $bowtie_db -1 $fastq_file1 -2 $fastq_file2 2> $bowtie_out");
my %fragment_hash;
my $duplicate_count=0;
my $unique_count=0;
my $all_count=0;
my $prev_r1name = "";
my %read_hash;
if ($pid) {
while (<BOWTIE>) {
my $r1 = $_;
chomp($r1);
if ($r1 =~ /^\@/) {
print SAMOUT "$r1\n";
next;
}
my $r2 = <BOWTIE>;
chomp($r2);
my @tmp_r1 = split(/\t/,$r1);
my @tmp_r2 = split(/\t/,$r2);
my ($r1name,$r1bc) = split(/\s+/,$tmp_r1[0]);
my ($r2name,$r2bc) = split(/\s+/,$tmp_r2[0]);
unless ($r1name eq $r2name) {
print STDERR "paired end mismatch error: r1 $tmp_r1[0] r2 $tmp_r2[0]\n";
}
my $debug_flag = 0;
print STDERR "read1 $r1\n" if ($debug_flag == 1);
my $r1sam_flag = $tmp_r1[1];
my $r2sam_flag = $tmp_r2[1];
unless ($r1sam_flag) {
print STDERR "error $r1 $r2\n";
}
next if ($r1sam_flag == 77 || $r1sam_flag == 141);
my $frag_strand;
### This section is for only properly paired reads
if ($r1sam_flag == 99 || $r1sam_flag == 355) {
$frag_strand = "-";
} elsif ($r1sam_flag == 83 || $r1sam_flag == 339) {
$frag_strand = "+";
} elsif ($r1sam_flag == 147 || $r1sam_flag == 403) {
$frag_strand = "-";
} elsif ($r1sam_flag == 163 || $r1sam_flag == 419) {
$frag_strand = "+";
} else {
next;
print STDERR "R1 strand error $r1sam_flag\n";
}
###
# 77 = R1, unmapped
# 141 = R2, unmapped
# 99 = R1, mapped, fwd strand --- frag on rev strand -> 355 = not primary
# 147 = R2, mapped, rev strand -- frag on rev strand -> 403 = not primary
# 101 = R1 unmapped, R2 mapped rev strand -- frag on rev strand
# 73 = R1, mapped, fwd strand --- frag on rev strand
# 153 = R2 mapped (R1 unmapped), rev strand -- frag on rev strand
# 133 = R2 unmapped, R1 mapped fwd strand -- frag on rev strand
# 83 = R1, mapped, rev strand --- frag on fwd strand -> 339 = not primary
# 163 = R2, mapped, fwd strand -- frag on fwd strand -> 419 = not primary
# 69 = R1 unmapped, R2 mapped fwd strand -- frag on fwd strand
# 89 = R1 mapped rev strand, R2 unmapped -- frag on fwd strand
# 137 = R2 mapped (R1 unmapped), fwd strand -- frag on fwd strand
# 165 = R2 unmapped, R1 rev strand -- frag on fwd strand
my $flags_r1 = join("\t",@tmp_r1[11..$#tmp_r1]);
my $flags_r2 = join("\t",@tmp_r2[11..$#tmp_r2]);
my ($mismatch_score_r1,$mismatch_score_r2);
if ($flags_r1 =~ /AS\:i\:(\S+?)\s/) {
$mismatch_score_r1 = $1;
}
if ($flags_r2 =~ /AS\:i\:(\S+?)\s/) {
$mismatch_score_r2 = $1;
}
my $paired_mismatch_score = $mismatch_score_r1 + $mismatch_score_r2;
my $mapped_enst = $tmp_r1[2];
my $mapped_enst_full = $tmp_r1[2];
if ($mapped_enst =~ /^(.+)\_spliced/) {
$mapped_enst = $1;
}
if ($mapped_enst =~ /^(.+)\_withgenomeflank/) {
$mapped_enst = $1;
}
unless (exists $convert_enst2type{$mapped_enst}) {
print STDERR "enst2type is missing for $mapped_enst $r1\n";
}
my ($ensttype,$enstpriority) = split(/\:/,$convert_enst2type{$mapped_enst});
if ($frag_strand eq "-") {
$ensttype = "antisense_".$ensttype;
$mapped_enst_full = "antisense_".$mapped_enst_full;
}
print "mapped $mapped_enst ensttype $ensttype priority $enstpriority\n" if ($debug_flag == 1);
if ($r1name eq $prev_r1name) {
# current read is same as previous - do nothing
} else {
#current read is different than previous - print and clear hash every 100,000 entries
$read_counter++;
if ($read_counter > $print_batch) {
&print_output();
%read_hash = ();
$read_counter = 0;
}
$prev_r1name = $r1name;
}
unless (exists $read_hash{$r1name}) {
# if read has never been seen before, keep first mapping
$read_hash{$r1name}{R1}{$ensttype} = $r1;
$read_hash{$r1name}{R2}{$ensttype} = $r2;
$read_hash{$r1name}{flags}{$ensttype} = $enstpriority;
$read_hash{$r1name}{quality} = $paired_mismatch_score;
# push @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst;
push @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst_full;
$read_hash{$r1name}{enst}{$mapped_enst} = $mapped_enst_full;
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full;
print "read never seen before quality $paired_mismatch_score $ensttype\n" if ($debug_flag == 1);
} else {
# is score better than previous mapping?
if ($paired_mismatch_score < $read_hash{$r1name}{quality}) {
# new one is worse, skip
print "new match has worse score than previous - skip $paired_mismatch_score\n" if ($debug_flag == 1);
} elsif ($paired_mismatch_score > $read_hash{$r1name}{quality}) {
# new one is better match than previous - old should all be discarded
delete($read_hash{$r1name});
$read_hash{$r1name}{R1}{$ensttype} = $r1;
$read_hash{$r1name}{R2}{$ensttype} = $r2;
$read_hash{$r1name}{flags}{$ensttype} = $enstpriority;
$read_hash{$r1name}{quality} = $paired_mismatch_score;
push @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst_full;
$read_hash{$r1name}{enst}{$mapped_enst} = $mapped_enst_full;
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full;
print "new match has better quality score - discard all previous $paired_mismatch_score\n" if ($debug_flag == 1);
} elsif ($paired_mismatch_score == $read_hash{$r1name}{quality}) {
# equal quality, both are good - now do family analysis
print "has equal quality, now doing family mapping... \n" if ($debug_flag == 1);
if (exists $read_hash{$r1name}{flags}{$ensttype}) {
# if mapping within family exists before...
print "mapping exists within family before... \n" if ($debug_flag == 1);
# first - did it already map to this transcript before?
if (exists $read_hash{$r1name}{enst}{$mapped_enst}) {
# is it spliced vs unspliced or tRNA flank vs whole genome? if yes ok
if ($read_hash{$r1name}{enst}{$mapped_enst}."_withgenomeflank" eq $mapped_enst_full || $read_hash{$r1name}{enst}{$mapped_enst}."_spliced" eq $mapped_enst_full) {
#original entry is to shorter transcript - keep that one, skip new one entirely
print "original was to original (non-genome flank version for tRNA or non-spliced for others) - keep old, skip this one\n" if ($debug_flag == 1);
} elsif ($read_hash{$r1name}{enst}{$mapped_enst} eq $mapped_enst_full."_withgenomeflank" || $read_hash{$r1name}{enst}{$mapped_enst} eq $mapped_enst_full."_spliced") {
# original entry is to longer transcript - replace with shorter
$read_hash{$r1name}{R1}{$ensttype} = $r1;
$read_hash{$r1name}{R2}{$ensttype} = $r2;
$read_hash{$r1name}{flags}{$ensttype} = $enstpriority;
unless (exists $read_hash{$r1name}{"mult_ensts"}{$ensttype}) {
print STDERR "error doesn't exist? $r1name $ensttype \n";
}
for (my $i=0;$i<@{$read_hash{$r1name}{"mult_ensts"}{$ensttype}};$i++) {
if ($read_hash{$r1name}{mult_ensts}{$ensttype}[$i] eq $read_hash{$r1name}{enst}{$mapped_enst}) {
$read_hash{$r1name}{mult_ensts}{$ensttype}[$i] = $mapped_enst_full;
}
}
$read_hash{$r1name}{enst}{$mapped_enst} = $mapped_enst_full;
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full;
print "new mapping is the one i want - discard old and keep newer annotation \n" if ($debug_flag == 1);
} else {
# maps to two places in the same transcript - this is probably actually ok for counting purposes, but for now I'm going to flag these as bad
# 7/20/16 - going to comment this out for now - basically just skip the second entry but don't flag as bad
# $read_hash{$r1name}{flags}{"double_maps"} = 1;
print "double maps - $ensttype prev length of mult_ensts array is ".scalar(@{$read_hash{$r1name}{mult_ensts}{$ensttype}})."\n" if ($debug_flag == 1);
for (my $i=0;$i<@{$read_hash{$r1name}{"mult_ensts"}{$ensttype}};$i++) {
if ($read_hash{$r1name}{mult_ensts}{$ensttype}[$i] eq $read_hash{$r1name}{enst}{$mapped_enst}) {
$read_hash{$r1name}{mult_ensts}{$ensttype}[$i] = $mapped_enst_full."_DOUBLEMAP";
}
}
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full."_DOUBLEMAP";
print "double maps - $ensttype length of mult_ensts array is ".scalar(@{$read_hash{$r1name}{mult_ensts}{$ensttype}})."\n" if ($debug_flag == 1);
}
} elsif ($enstpriority < $read_hash{$r1name}{flags}{$ensttype}) {
# priority of new mapping is better than old - replace old
print "priority of new mapping is better than old - replace old mapping\n" if ($debug_flag == 1);
$read_hash{$r1name}{R1}{$ensttype} = $r1;
$read_hash{$r1name}{R2}{$ensttype} = $r2;
$read_hash{$r1name}{flags}{$ensttype} = $enstpriority;
unshift @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst_full;
$read_hash{$r1name}{enst}{$mapped_enst} = $mapped_enst_full;
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full;
} else {
# Mapping is equal quality, but priority of new mapping is worse than old - keep new enst_full but otherwise discard
push @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst_full;
}
} elsif (exists $rRNA_extra_hash{$ensttype} && exists $read_hash{$r1name}{R1}{$rRNA_extra_hash{$ensttype}}) {
# If old mapping was to rRNA_extra but new mapping is RNA18S, RNA28S, or RNA5-8S, keep new mapping and discard old
my $old_rRNA_label = $rRNA_extra_hash{$ensttype};
delete($read_hash{$r1name}{R1}{$old_rRNA_label});
delete($read_hash{$r1name}{R2}{$old_rRNA_label});
delete($read_hash{$r1name}{flags}{$old_rRNA_label});
delete($read_hash{$r1name}{master_enst}{$old_rRNA_label});
delete($read_hash{$r1name}{mult_ensts}{$old_rRNA_label});
delete($read_hash{$r1name}{enst}{"NR_046235.1"});
$read_hash{$r1name}{R1}{$ensttype} = $r1;
$read_hash{$r1name}{R2}{$ensttype} = $r2;
$read_hash{$r1name}{flags}{$ensttype} = $enstpriority;
$read_hash{$r1name}{quality} = $paired_mismatch_score;
$read_hash{$r1name}{enst}{$mapped_enst} = $mapped_enst_full;
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full;
push @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst_full;
print "new mapping is the one i want - discard old and keep newer annotation \n" if ($debug_flag == 1);
} elsif (exists $rRNA_extra_hash_rev{$ensttype}) {
# new mapping is to rRNA_extra
my $rRNA_flag = 0;
for my $rRNA_elements (keys %{$rRNA_extra_hash_rev{$ensttype}}) {
if (exists $read_hash{$r1name}{R1}{$rRNA_elements}) {
# new mapping is to rRNA_extra, but old is to RNA28S, RNA18S, or RNA5-8S - discard new mapping
$rRNA_flag = 1;
}
}
if ($rRNA_flag == 1) {} else {
# new mapping is to rRNA_extra, but old is to something non-rRNA - do same as multi-family below
$read_hash{$r1name}{R1}{$ensttype} = $r1;
$read_hash{$r1name}{R2}{$ensttype} = $r2;
$read_hash{$r1name}{flags}{$ensttype} = $enstpriority;
$read_hash{$r1name}{quality} = $paired_mismatch_score;
$read_hash{$r1name}{enst}{$mapped_enst} = $mapped_enst_full;
push @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst_full;
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full;
}
} else {
print "maps to two families - $ensttype\n" if ($debug_flag == 1);
$read_hash{$r1name}{R1}{$ensttype} = $r1;
$read_hash{$r1name}{R2}{$ensttype} = $r2;
$read_hash{$r1name}{flags}{$ensttype} = $enstpriority;
$read_hash{$r1name}{quality} = $paired_mismatch_score;
$read_hash{$r1name}{enst}{$mapped_enst} = $mapped_enst_full;
push @{$read_hash{$r1name}{mult_ensts}{$ensttype}},$mapped_enst_full;
#adds new master enst to save for below
$read_hash{$r1name}{master_enst}{$ensttype} = $mapped_enst_full;
}
} else {
# I don't think this should ever be hit
print STDERR "this shouldn't be hit - $paired_mismatch_score $read_hash{$r1name}{quality} $r1name $r1 $r2\n";
}
}
# end of line processing
}
}
&print_output();
close(SAMOUT);
for my $multi_key (keys %multimapping_hash) {
print MULTIMAP "$multi_key\t".$multimapping_hash{$multi_key}."\n";
}
close(MULTIMAP);
open(DONE,">$done_file");
print DONE "jobs done\n";
close(DONE);
sub print_output {
my %count;
my %count_enst;
for my $read (keys %read_hash) {
my @ensttype_array = sort {$a cmp $b} keys %{$read_hash{$read}{flags}};
my $ensttype = $ensttype_array[0];
my @masterenst_array;
for my $type (@ensttype_array) {
push @masterenst_array,$read_hash{$read}{master_enst}{$type};
}
my $ensttype_join = join("|",@ensttype_array);
my $masterenst_join = join("|",@masterenst_array);
$count{$ensttype_join}++;
if (scalar(keys %{$read_hash{$read}{flags}}) == 1) {
print STDERR "this shouldn't happen this should be 1 ".scalar(keys %{$read_hash{$read}{R1}})."\n" unless (scalar(keys %{$read_hash{$read}{R1}}) == 1);
my @r1_split = split(/\t/,$read_hash{$read}{R1}{$ensttype});
my @r2_split = split(/\t/,$read_hash{$read}{R2}{$ensttype});
$r1_split[2] = $ensttype_join."||".$masterenst_join;
$r2_split[2] = $ensttype_join."||".$masterenst_join;
my $r1_line = join("\t",@r1_split);
my $r2_line = join("\t",@r2_split);
print SAMOUT "".$r1_line."\tZZ:Z:".join("|",@{$read_hash{$read}{mult_ensts}{$ensttype}})."\n".$r2_line."\tZZ:Z:".join("|",@{$read_hash{$read}{mult_ensts}{$ensttype}})."\n";
my @blah = split(/\t/,$read_hash{$read}{R1}{$ensttype});
$count_enst{$ensttype."|".$blah[2]}++;
} else {
my @all_mult_ensts;
for my $key (@ensttype_array) {
push @all_mult_ensts,join("|",@{$read_hash{$read}{mult_ensts}{$key}});
}
my $final_mult_ensts = join("|",@all_mult_ensts);
unless (exists $read_hash{$read}{R1}{$ensttype} && $read_hash{$read}{R1}{$ensttype}) {
print "weird error - $read $ensttype readhash doesn't exist ? ".$read_hash{$read}{flags}{$ensttype}."\n";
}
my @r1_split = split(/\t/,$read_hash{$read}{R1}{$ensttype});
my @r2_split = split(/\t/,$read_hash{$read}{R2}{$ensttype});
$r1_split[2] = $ensttype_join."||".$masterenst_join;
$r2_split[2] = $ensttype_join."||".$masterenst_join;
my $r1_line = join("\t",@r1_split);
my $r2_line = join("\t",@r2_split);
print SAMOUT "".$r1_line."\tZZ:Z:".$final_mult_ensts."\n".$r2_line."\tZZ:Z:".$final_mult_ensts."\n";
my $multimapping_type = join("|",keys %{$read_hash{$read}{flags}});
$multimapping_hash{$multimapping_type}++;
}
}
}
sub read_in_filelists {
my $fi = shift;
my $priority_n = 0;
open(F,$fi);
for my $line (<F>) {
chomp($line);
my ($allenst,$allensg,$gid,$type_label,$typefile) = split(/\t/,$line);
$type_label =~ s/\_$//;
unless ($allenst) {
print STDERR "error missing enst $line $fi\n";
}
my @ensts = split(/\|/,$allenst);
for my $enst (@ensts) {
$enst2gene{$enst} = $gid;
$convert_enst2type{$enst} = $type_label.":".$priority_n;
$priority_n++;
}
}
close(F);
}
| 47.807601 | 190 | 0.535649 |
73d2c2affab4e4671ced0dbdf4f47cb32118a0f6 | 8,067 | al | Perl | benchmark/benchmarks/FASP-benchmarks/data/random-oriented/randomoriented-1335-370-1813.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/random-oriented/randomoriented-1335-370-1813.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/random-oriented/randomoriented-1335-370-1813.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | 1 80 128 163 311 316
2 75 116 163 291 295
3 17 126 204
4 80 164 169 255
5 84 131
6 75 103 169 276 347
7 33 130 303 365
8 15 118 213 258 304
9 124 154 242 263 351
10 84 100 197 199 233 273 304
11 4 7 78 81 98 196 300 313
12 32 352
13 16 18 197 256 269 301 310 367 370
14 17 88 101 149 168 190 271 281 312
15 9 23 71 163
16 29 264 326 350
17 84 172 327
18 43 69 81
19 2 86 218 245 352
20 70 105 268 274 324 329
21 124 131 254 326 348
22 96 196 229 352
23 46 76 129 162 203 216 259
24 66 237 264 306
25 16 44 295
26 39 82 294 359
27
28 37 64 152 200 210
29 13 228 232 293 360
30 57 93 172 354
31 29 144 315
32 117 158 248
33 39 174
34 55 89 127 178 261 345
35 6 69 74 183
36 13 236 290 304 355
37 63 161 165 187 245 281 298
38 158 279 291 355
39 94 137 147 240 268 338
40 288
41 62 71 147 182 207 271 283 333 334 370
42 66 105 111 138 169 175 208 353
43 25 70 79 95 177 254 305
44 21 105 269 275
45 71 179 288
46 43 71 205 230 254
47 15 17 215 369
48 58 76
49 38 126 148 160 200 225 366
50 192 213 282 295 365
51 90 130 320
52 73 203 237 304 305
53 146 199 235 341
54 117 270 322 355
55 12 107 129 142
56 11 14 263 300
57 163 189 285 341
58 87 100 273 314 320
59 19 24 209 303 309 360
60 103 145 287 346
61 59 83
62 23 202
63 36 131 190 225 229 281 325 341
64 104 269 277 344
65 124 169 246 257 321 355
66 8 39 142
67 164 213 287 293
68 164 188 247 292 314
69 77 183 268 309 332
70 135 241 295
71 54 203 206 265
72 33 91 108 249 319
73 3 168 221 239 289 290 306 333
74 7 21 44 66 159 289
75 162 173 230 249
76 16 43 79 157 162 332
77 136 144 236 241 244 246
78 31 47 63 142 201 353
79 23 50 368
80 31 318
81 168 325 326
82 59 60 92 169 180 275 347
83 10 85 245 256 272 324
84 27 39 62 219 249 349
85 160 170 191
86 4 5 167 198 204 256
87 88 312 314
88 18 307
89 25 218 366
90 86 107 195 204 215 253 266 292
91 22 88 151 326 338
92 157 189 216 316 361
93 140 290 298
94 30 87 134 203 245 259
95 154 251 288
96 75 90 154 239 283
97 58 355 357 369
98 139 212 217 266
99 128
100 241 324
101 20 173 268 296 300
102 67 222 239 329
103 137 204
104 65 80 169 197 206 367
105 48 73 162 268 312
106 105 120 210 239 252 346
107 65 100 176 285
108 23 50 65 226 286 336
109 26 213 235 250 278 281 331 352
110 6 23 115 196 273 296 344
111 320
112 290
113 6 71 72 368
114 67 77 97
115 54 71 282 312
116 72 79 144 213 261 293 328
117 156 310 334
118 64 88 207 232 266 330
119 43 51 77 139 163 174 269 327 330
120 12 124 230 251 348
121 44 52 97 215 297 313
122 78 142 144 166 206 213 240 260
123 3 11 196 296 305 336
124 24 35 39 96 122 130
125 20 50
126 30 87 210 222
127 26 65 109 272
128 141 211 293 364
129 99 186 212 280 340
130 15 35 85 139 181 202 235 272
131 61 216 349 365 370
132 218 230 241
133 41 44 53 59 254 261 285 314
134 12 44 79 333 351 368
135 168 196 347 365
136 49 193 233 247 267
137 61 163
138 132
139 23 24 116 253 260 328
140 75 150 158 265
141 15 27 110 156 225 321 348
142 115 237 245 273 300 309
143 58 100 114 282 293 294 340
144 44 98 183 216 263 276 307
145 110 308
146 4 24 49 176 242 270 324
147 191 235 352 368 370
148 40 58 222 294
149 4 12 59 161 186 250 261
150 80 117 146 216 263 286
151 21 118 227 338 351
152 157 163 231 309
153 7 10 57 92 127 189 348
154 120 152 164 165 168 297 329
155 18 113 241
156 44 314
157 140 339
158 80 118 172 182 221 295 316 321 352
159 46 137 214 275 297
160 117 149 307
161 196 239 294
162 87 140 347 354
163 77 97
164 27 46 79 109 142 157
165 42 118 196 359
166 70 76 176 188 274 302
167 14 24 40 48 126 136 142 193 306 351
168 33 62 89
169 59 97 124 312 317 331
170 173 235 249 276 282 356
171 6 37 85 93 119 204 297 361
172 141 229 262 264 321
173 171 208
174 43 145 277 361
175 21 155 162 251 363
176 62 190 242 303
177 38 75 109 159 165
178 122 190 203 207
179 49 62 86 116 256 315
180 31 40 244 298 303 361
181 1 8 30 33 88 224 260 305 340
182 83 301 316 318 366
183 26 83 85 93 196
184 2 24 196 209 216 232
185 22 279 294 354 360
186 48 139 147 184 287 328 348
187 159 306
188 117 316
189 98 144 283 334 369
190 101 183 228 232 367
191 24 29 47 251 302 334 367
192 40 87 366
193 2 8 37 71 72 110
194 32 36 74 81 126 191 259 338
195 34 229 246
196 144 172 221 232 251 298
197 86 296 337
198 67 117 225
199 12 28 165 247 275
200 117 198 347
201 110 114 156 176 346
202 28 276 327 362
203 44 184 311 330 340
204 62 69 150 185 220 284 303 304 352
205 44 97 213 218 302
206 16 57 62 187 210 218 355
207 77 97 155 212 311 337 341 346
208 108 222 279
209 188 201 275 347
210 3 81 245 266 299 320 331
211 99 222 237
212 18 101 133 150 219 283
213 60 210 366
214 17 50 222 230
215 14 312 332
216 198 366
217 37 53 140 290
218 93 103 196 300 336
219 35 106 140 336
220 63 80 87 230 268 318
221 154 218 289
222 129 136 235 354 361
223 21 105 149
224 40 162 214 333 339
225 10 117 129 149 293
226 5 14 38 111 184 295 297
227 115
228 273 301
229 82 251 324
230 22 57 62 111 119 155 170 331 356 358
231 27 84 94 177 204 236 291 297
232 73 97 107 145 269 332 357
233 14 42 64 199 251 268 269 270 310
234 116 171 200 329
235 34 85 97 188 202 220 276
236 30 152 238 260 288
237 119 135 166 175 200 263
238 67 69 97 105 183 345
239 44 124 193 270 272
240 32 73 206 228
241 6 28 157 201 242 318
242 67 79 94 119 120 256 367 369
243 49 91 102 225 275
244 225 275 298 346
245 9 231 239 256 264 369
246 84 139 350
247 66 112 131 232 340
248 155 184 199 243 299 363
249 9 13 47 147 331 357
250 24 40 77 122 151 155 224 323
251 123 204 223 227 326
252 19 159 196 242 286 352
253 14 93 137 339 353
254 11 17 18 100 103 118 269 273
255 156 193 218 360
256 9 55 132 189 330 338
257 71 131 206 221 252
258 128 168 229 282
259 7 36 41 50 118 156 262 308 328 358
260 219 244 285 315 328
261 54 136 155 306 334
262 151 226 254 285 319 345
263 148 213 331 336 357
264 51 122 188 205
265 37 195 241 326
266 143
267 17 59 80 92 161 200 231 312
268 56 120 182 211 221 222 303 321 331
269 77 223
270 82 93 210 307 316 323 363
271 122 135 230
272 8 20 71 100 169 186 232 237
273 202 348
274 175 253 326
275 27 40 50 61 222 256 311 346
276 157 201 311
277 63 91 173 214 233 296
278 83 168 234 250 252 264 357
279 19 63 72 77 86 115 182 218
280 246 349
281 8 42 90 204 214 302 348
282 13 191 224
283 16 48 152 206 248 289
284 24 226 270
285 2 93 193 208 267
286 33 138 224 241 242 278 314 343
287 43 210
288 66 152 196
289 124 230 239 245 250 327
290 86 165
291 56 79 221 254 263 271 315
292 160 196 248
293 4 58 73 186 250 325 352
294 209
295 48 85 108 133 139 199 234
296 16 71 152 274 343
297 10 60 125 150 190
298 23 147 179 216 226
299 9 11 50 211 347
300 7 184 303 335 341 342
301 16 158 230 291
302 16 136 349
303 78 164 201 239 299 331 342 349
304 14 22 115 254 267 285
305 20 44 110 232
306 175 213 240 283 295
307 189 194 258 311
308 299
309 72
310 6 22 136 138 167 332
311 29 80 91 104 245
312 110 134 172 297
313 234 240 277 302 342
314 75 116 192 249 268 329 335 358
315 6 45 70 207 235 267 338 340
316 79 185 198 355
317 45 76 225 237 269 315
318 313 330
319 164 258 318
320 1 16 175 184 195 208
321 6 43 88 96
322 9 45 202 216 367
323 172 294 324
324 93 186 203 204 281 329 337 344
325 8 13 14 22 153 287
326 1 4 65 126 154 220 248 297
327 210 222 319
328 137
329 2 36 160 226 341
330 2 172 195 220 353
331 8 26 36 146 203
332 38 47 185 219 280
333 33 152 198 264 299 336
334 64 102 118 153 207
335 43 189 190 253 269 295 352
336 17 71 133 293 326 359 367
337 15 126 144 195 330
338 212 233 337
339 90 135 141 158 340 350
340 47 96 111 124 134 238 241 344
341 19 65 69 135 175 246 327
342 55 85 114 150
343 38 103 139 191 202 236 295
344 31 85 100 129 150 183 290 299 356
345 33 35 155 239 300 342 359
346 123 265
347 76 132 330
348 184 195 258 290 328 353 361
349 69 83 232 321 346
350 8 30 63 139 207 242 247 323
351 291 305 309 340
352 140 267
353 117 160
354 24 44 169 364
355 6 75 139 149 155 173 295 343
356 19 251 354
357 7 36 38 164 251 362
358 35 61 100 213 249 331
359 8 122 193 234 268 335 365
360 140 145 283 352
361 5 176 177 232 270 328
362 15 210 291 352
363 4 64 169 342
364 1 15 78 249 288 312 328
365 283 293 335
366 71 195 252 312 350
367 251 271 353
368 4 142 302
369 57 71 140 153 317
370 152 187 221 229 242 259 | 21.802703 | 40 | 0.729515 |
Subsets and Splits